diff --git a/CHANGELOG.md b/CHANGELOG.md index 67d9e493f..727d3def2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://www.npmjs.com/package/@google-cloud/bigtable?activeTab=versions +## [6.2.0](https://github.com/googleapis/nodejs-bigtable/compare/v6.1.0...v6.2.0) (2025-07-23) + + +### Features + +* Add client side metrics collection to readRows calls ([#1571](https://github.com/googleapis/nodejs-bigtable/issues/1571)) ([71f4d78](https://github.com/googleapis/nodejs-bigtable/commit/71f4d78422137c88f1521be45004982367dbda31)) +* Add plumbing to support unary calls for client side metric collection ([#1631](https://github.com/googleapis/nodejs-bigtable/issues/1631)) ([c267ede](https://github.com/googleapis/nodejs-bigtable/commit/c267ede0140aa29bc75feada93899a4945980375)) +* Add support for Execute Query ([#1613](https://github.com/googleapis/nodejs-bigtable/issues/1613)) ([e3894ed](https://github.com/googleapis/nodejs-bigtable/commit/e3894edf4fc881153432f77ce976141397dc0348)) +* Initial timed stream implementation for application latencies ([#1639](https://github.com/googleapis/nodejs-bigtable/issues/1639)) ([ca490e8](https://github.com/googleapis/nodejs-bigtable/commit/ca490e80f2359156475e52c5f72fe0a9fe8e9740)) + + +### Bug Fixes + +* In Client-side metrics makes sure that the right views get created for the right metrics ([#1590](https://github.com/googleapis/nodejs-bigtable/issues/1590)) ([6cb7cdd](https://github.com/googleapis/nodejs-bigtable/commit/6cb7cddf42ff1fe29b2ae4a729739bc12c3d4942)) + ## [6.1.0](https://github.com/googleapis/nodejs-bigtable/compare/v6.0.0...v6.1.0) (2025-05-30) diff --git a/package.json b/package.json index b78c563fe..0bcb69e20 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigtable", - "version": "6.1.0", + "version": "6.2.0", "description": "Cloud Bigtable Client Library for Node.js", "keywords": [ "bigtable", @@ -58,10 +58,12 @@ "@opentelemetry/sdk-metrics": "^1.30.0", "@types/long": "^4.0.0", "arrify": "2.0.0", + "abort-controller": "^3.0.0", "concat-stream": "^2.0.0", "dot-prop": "6.0.0", "escape-string-regexp": "4.0.0", "extend": "^3.0.2", + "fast-crc32c": "^2.0.0", "google-gax": "^5.0.1-rc.0", "grpc-gcp": "^1.0.1", "is": "^3.3.0", diff --git a/protos/protos.d.ts b/protos/protos.d.ts index a39ddc6c7..cf33904dd 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -28903,6 +28903,9 @@ export namespace google { /** CommonLanguageSettings destinations */ destinations?: (google.api.ClientLibraryDestination[]|null); + + /** CommonLanguageSettings selectiveGapicGeneration */ + selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); } /** Represents a CommonLanguageSettings. */ @@ -28920,6 +28923,9 @@ export namespace google { /** CommonLanguageSettings destinations. */ public destinations: google.api.ClientLibraryDestination[]; + /** CommonLanguageSettings selectiveGapicGeneration. */ + public selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); + /** * Creates a new CommonLanguageSettings instance using the specified properties. * @param [properties] Properties to set @@ -29620,6 +29626,9 @@ export namespace google { /** PythonSettings common */ common?: (google.api.ICommonLanguageSettings|null); + + /** PythonSettings experimentalFeatures */ + experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); } /** Represents a PythonSettings. */ @@ -29634,6 +29643,9 @@ export namespace google { /** PythonSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); + /** PythonSettings experimentalFeatures. */ + public experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); + /** * Creates a new PythonSettings instance using the specified properties. * @param [properties] Properties to set @@ -29712,6 +29724,118 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + namespace PythonSettings { + + /** Properties of an ExperimentalFeatures. */ + interface IExperimentalFeatures { + + /** ExperimentalFeatures restAsyncIoEnabled */ + restAsyncIoEnabled?: (boolean|null); + + /** ExperimentalFeatures protobufPythonicTypesEnabled */ + protobufPythonicTypesEnabled?: (boolean|null); + + /** ExperimentalFeatures unversionedPackageDisabled */ + unversionedPackageDisabled?: (boolean|null); + } + + /** Represents an ExperimentalFeatures. */ + class ExperimentalFeatures implements IExperimentalFeatures { + + /** + * Constructs a new ExperimentalFeatures. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.PythonSettings.IExperimentalFeatures); + + /** ExperimentalFeatures restAsyncIoEnabled. */ + public restAsyncIoEnabled: boolean; + + /** ExperimentalFeatures protobufPythonicTypesEnabled. */ + public protobufPythonicTypesEnabled: boolean; + + /** ExperimentalFeatures unversionedPackageDisabled. */ + public unversionedPackageDisabled: boolean; + + /** + * Creates a new ExperimentalFeatures instance using the specified properties. + * @param [properties] Properties to set + * @returns ExperimentalFeatures instance + */ + public static create(properties?: google.api.PythonSettings.IExperimentalFeatures): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @param message ExperimentalFeatures message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @param message ExperimentalFeatures message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Verifies an ExperimentalFeatures message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExperimentalFeatures + */ + public static fromObject(object: { [k: string]: any }): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. + * @param message ExperimentalFeatures + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.PythonSettings.ExperimentalFeatures, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExperimentalFeatures to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ExperimentalFeatures + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + /** Properties of a NodeSettings. */ interface INodeSettings { @@ -30038,6 +30162,9 @@ export namespace google { /** GoSettings common */ common?: (google.api.ICommonLanguageSettings|null); + + /** GoSettings renamedServices */ + renamedServices?: ({ [k: string]: string }|null); } /** Represents a GoSettings. */ @@ -30052,6 +30179,9 @@ export namespace google { /** GoSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); + /** GoSettings renamedServices. */ + public renamedServices: { [k: string]: string }; + /** * Creates a new GoSettings instance using the specified properties. * @param [properties] Properties to set @@ -30376,6 +30506,109 @@ export namespace google { PACKAGE_MANAGER = 20 } + /** Properties of a SelectiveGapicGeneration. */ + interface ISelectiveGapicGeneration { + + /** SelectiveGapicGeneration methods */ + methods?: (string[]|null); + + /** SelectiveGapicGeneration generateOmittedAsInternal */ + generateOmittedAsInternal?: (boolean|null); + } + + /** Represents a SelectiveGapicGeneration. */ + class SelectiveGapicGeneration implements ISelectiveGapicGeneration { + + /** + * Constructs a new SelectiveGapicGeneration. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ISelectiveGapicGeneration); + + /** SelectiveGapicGeneration methods. */ + public methods: string[]; + + /** SelectiveGapicGeneration generateOmittedAsInternal. */ + public generateOmittedAsInternal: boolean; + + /** + * Creates a new SelectiveGapicGeneration instance using the specified properties. + * @param [properties] Properties to set + * @returns SelectiveGapicGeneration instance + */ + public static create(properties?: google.api.ISelectiveGapicGeneration): google.api.SelectiveGapicGeneration; + + /** + * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @param message SelectiveGapicGeneration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @param message SelectiveGapicGeneration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.SelectiveGapicGeneration; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.SelectiveGapicGeneration; + + /** + * Verifies a SelectiveGapicGeneration message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SelectiveGapicGeneration + */ + public static fromObject(object: { [k: string]: any }): google.api.SelectiveGapicGeneration; + + /** + * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. + * @param message SelectiveGapicGeneration + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.SelectiveGapicGeneration, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SelectiveGapicGeneration to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SelectiveGapicGeneration + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + /** LaunchStage enum. */ enum LaunchStage { LAUNCH_STAGE_UNSPECIFIED = 0, @@ -30957,6 +31190,7 @@ export namespace google { /** Edition enum. */ enum Edition { EDITION_UNKNOWN = 0, + EDITION_LEGACY = 900, EDITION_PROTO2 = 998, EDITION_PROTO3 = 999, EDITION_2023 = 1000, @@ -30987,6 +31221,9 @@ export namespace google { /** FileDescriptorProto weakDependency */ weakDependency?: (number[]|null); + /** FileDescriptorProto optionDependency */ + optionDependency?: (string[]|null); + /** FileDescriptorProto messageType */ messageType?: (google.protobuf.IDescriptorProto[]|null); @@ -31036,6 +31273,9 @@ export namespace google { /** FileDescriptorProto weakDependency. */ public weakDependency: number[]; + /** FileDescriptorProto optionDependency. */ + public optionDependency: string[]; + /** FileDescriptorProto messageType. */ public messageType: google.protobuf.IDescriptorProto[]; @@ -31170,6 +31410,9 @@ export namespace google { /** DescriptorProto reservedName */ reservedName?: (string[]|null); + + /** DescriptorProto visibility */ + visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents a DescriptorProto. */ @@ -31211,6 +31454,9 @@ export namespace google { /** DescriptorProto reservedName. */ public reservedName: string[]; + /** DescriptorProto visibility. */ + public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); + /** * Creates a new DescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -32058,6 +32304,9 @@ export namespace google { /** EnumDescriptorProto reservedName */ reservedName?: (string[]|null); + + /** EnumDescriptorProto visibility */ + visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents an EnumDescriptorProto. */ @@ -32084,6 +32333,9 @@ export namespace google { /** EnumDescriptorProto reservedName. */ public reservedName: string[]; + /** EnumDescriptorProto visibility. */ + public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); + /** * Creates a new EnumDescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -33018,6 +33270,9 @@ export namespace google { /** FieldOptions features */ features?: (google.protobuf.IFeatureSet|null); + /** FieldOptions featureSupport */ + featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** FieldOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -33073,6 +33328,9 @@ export namespace google { /** FieldOptions features. */ public features?: (google.protobuf.IFeatureSet|null); + /** FieldOptions featureSupport. */ + public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** FieldOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -33293,6 +33551,121 @@ export namespace google { */ public static getTypeUrl(typeUrlPrefix?: string): string; } + + /** Properties of a FeatureSupport. */ + interface IFeatureSupport { + + /** FeatureSupport editionIntroduced */ + editionIntroduced?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSupport editionDeprecated */ + editionDeprecated?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSupport deprecationWarning */ + deprecationWarning?: (string|null); + + /** FeatureSupport editionRemoved */ + editionRemoved?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + } + + /** Represents a FeatureSupport. */ + class FeatureSupport implements IFeatureSupport { + + /** + * Constructs a new FeatureSupport. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FieldOptions.IFeatureSupport); + + /** FeatureSupport editionIntroduced. */ + public editionIntroduced: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSupport editionDeprecated. */ + public editionDeprecated: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSupport deprecationWarning. */ + public deprecationWarning: string; + + /** FeatureSupport editionRemoved. */ + public editionRemoved: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** + * Creates a new FeatureSupport instance using the specified properties. + * @param [properties] Properties to set + * @returns FeatureSupport instance + */ + public static create(properties?: google.protobuf.FieldOptions.IFeatureSupport): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @param message FeatureSupport message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @param message FeatureSupport message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Verifies a FeatureSupport message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FeatureSupport + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. + * @param message FeatureSupport + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions.FeatureSupport, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FeatureSupport to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FeatureSupport + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } } /** Properties of an OneofOptions. */ @@ -33531,6 +33904,9 @@ export namespace google { /** EnumValueOptions debugRedact */ debugRedact?: (boolean|null); + /** EnumValueOptions featureSupport */ + featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** EnumValueOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -33553,6 +33929,9 @@ export namespace google { /** EnumValueOptions debugRedact. */ public debugRedact: boolean; + /** EnumValueOptions featureSupport. */ + public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** EnumValueOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -34148,6 +34527,12 @@ export namespace google { /** FeatureSet jsonFormat */ jsonFormat?: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat|null); + + /** FeatureSet enforceNamingStyle */ + enforceNamingStyle?: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle|null); + + /** FeatureSet defaultSymbolVisibility */ + defaultSymbolVisibility?: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null); } /** Represents a FeatureSet. */ @@ -34177,6 +34562,12 @@ export namespace google { /** FeatureSet jsonFormat. */ public jsonFormat: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat); + /** FeatureSet enforceNamingStyle. */ + public enforceNamingStyle: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle); + + /** FeatureSet defaultSymbolVisibility. */ + public defaultSymbolVisibility: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility); + /** * Creates a new FeatureSet instance using the specified properties. * @param [properties] Properties to set @@ -34299,6 +34690,116 @@ export namespace google { ALLOW = 1, LEGACY_BEST_EFFORT = 2 } + + /** EnforceNamingStyle enum. */ + enum EnforceNamingStyle { + ENFORCE_NAMING_STYLE_UNKNOWN = 0, + STYLE2024 = 1, + STYLE_LEGACY = 2 + } + + /** Properties of a VisibilityFeature. */ + interface IVisibilityFeature { + } + + /** Represents a VisibilityFeature. */ + class VisibilityFeature implements IVisibilityFeature { + + /** + * Constructs a new VisibilityFeature. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FeatureSet.IVisibilityFeature); + + /** + * Creates a new VisibilityFeature instance using the specified properties. + * @param [properties] Properties to set + * @returns VisibilityFeature instance + */ + public static create(properties?: google.protobuf.FeatureSet.IVisibilityFeature): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @param message VisibilityFeature message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @param message VisibilityFeature message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Verifies a VisibilityFeature message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns VisibilityFeature + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. + * @param message VisibilityFeature + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FeatureSet.VisibilityFeature, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this VisibilityFeature to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for VisibilityFeature + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace VisibilityFeature { + + /** DefaultSymbolVisibility enum. */ + enum DefaultSymbolVisibility { + DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0, + EXPORT_ALL = 1, + EXPORT_TOP_LEVEL = 2, + LOCAL_ALL = 3, + STRICT = 4 + } + } } /** Properties of a FeatureSetDefaults. */ @@ -34418,8 +34919,11 @@ export namespace google { /** FeatureSetEditionDefault edition */ edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); - /** FeatureSetEditionDefault features */ - features?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault overridableFeatures */ + overridableFeatures?: (google.protobuf.IFeatureSet|null); + + /** FeatureSetEditionDefault fixedFeatures */ + fixedFeatures?: (google.protobuf.IFeatureSet|null); } /** Represents a FeatureSetEditionDefault. */ @@ -34434,8 +34938,11 @@ export namespace google { /** FeatureSetEditionDefault edition. */ public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); - /** FeatureSetEditionDefault features. */ - public features?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault overridableFeatures. */ + public overridableFeatures?: (google.protobuf.IFeatureSet|null); + + /** FeatureSetEditionDefault fixedFeatures. */ + public fixedFeatures?: (google.protobuf.IFeatureSet|null); /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -34968,6 +35475,13 @@ export namespace google { } } + /** SymbolVisibility enum. */ + enum SymbolVisibility { + VISIBILITY_UNSET = 0, + VISIBILITY_LOCAL = 1, + VISIBILITY_EXPORT = 2 + } + /** Properties of a Duration. */ interface IDuration { diff --git a/protos/protos.js b/protos/protos.js index b8f776b25..f74b69963 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -69150,6 +69150,7 @@ * @interface ICommonLanguageSettings * @property {string|null} [referenceDocsUri] CommonLanguageSettings referenceDocsUri * @property {Array.|null} [destinations] CommonLanguageSettings destinations + * @property {google.api.ISelectiveGapicGeneration|null} [selectiveGapicGeneration] CommonLanguageSettings selectiveGapicGeneration */ /** @@ -69184,6 +69185,14 @@ */ CommonLanguageSettings.prototype.destinations = $util.emptyArray; + /** + * CommonLanguageSettings selectiveGapicGeneration. + * @member {google.api.ISelectiveGapicGeneration|null|undefined} selectiveGapicGeneration + * @memberof google.api.CommonLanguageSettings + * @instance + */ + CommonLanguageSettings.prototype.selectiveGapicGeneration = null; + /** * Creates a new CommonLanguageSettings instance using the specified properties. * @function create @@ -69216,6 +69225,8 @@ writer.int32(message.destinations[i]); writer.ldelim(); } + if (message.selectiveGapicGeneration != null && Object.hasOwnProperty.call(message, "selectiveGapicGeneration")) + $root.google.api.SelectiveGapicGeneration.encode(message.selectiveGapicGeneration, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -69267,6 +69278,10 @@ message.destinations.push(reader.int32()); break; } + case 3: { + message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -69318,6 +69333,11 @@ break; } } + if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) { + var error = $root.google.api.SelectiveGapicGeneration.verify(message.selectiveGapicGeneration); + if (error) + return "selectiveGapicGeneration." + error; + } return null; }; @@ -69360,6 +69380,11 @@ break; } } + if (object.selectiveGapicGeneration != null) { + if (typeof object.selectiveGapicGeneration !== "object") + throw TypeError(".google.api.CommonLanguageSettings.selectiveGapicGeneration: object expected"); + message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.fromObject(object.selectiveGapicGeneration); + } return message; }; @@ -69378,8 +69403,10 @@ var object = {}; if (options.arrays || options.defaults) object.destinations = []; - if (options.defaults) + if (options.defaults) { object.referenceDocsUri = ""; + object.selectiveGapicGeneration = null; + } if (message.referenceDocsUri != null && message.hasOwnProperty("referenceDocsUri")) object.referenceDocsUri = message.referenceDocsUri; if (message.destinations && message.destinations.length) { @@ -69387,6 +69414,8 @@ for (var j = 0; j < message.destinations.length; ++j) object.destinations[j] = options.enums === String ? $root.google.api.ClientLibraryDestination[message.destinations[j]] === undefined ? message.destinations[j] : $root.google.api.ClientLibraryDestination[message.destinations[j]] : message.destinations[j]; } + if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) + object.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.toObject(message.selectiveGapicGeneration, options); return object; }; @@ -71209,6 +71238,7 @@ * @memberof google.api * @interface IPythonSettings * @property {google.api.ICommonLanguageSettings|null} [common] PythonSettings common + * @property {google.api.PythonSettings.IExperimentalFeatures|null} [experimentalFeatures] PythonSettings experimentalFeatures */ /** @@ -71234,6 +71264,14 @@ */ PythonSettings.prototype.common = null; + /** + * PythonSettings experimentalFeatures. + * @member {google.api.PythonSettings.IExperimentalFeatures|null|undefined} experimentalFeatures + * @memberof google.api.PythonSettings + * @instance + */ + PythonSettings.prototype.experimentalFeatures = null; + /** * Creates a new PythonSettings instance using the specified properties. * @function create @@ -71260,6 +71298,8 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.experimentalFeatures != null && Object.hasOwnProperty.call(message, "experimentalFeatures")) + $root.google.api.PythonSettings.ExperimentalFeatures.encode(message.experimentalFeatures, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -71300,6 +71340,10 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } + case 2: { + message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -71340,6 +71384,11 @@ if (error) return "common." + error; } + if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) { + var error = $root.google.api.PythonSettings.ExperimentalFeatures.verify(message.experimentalFeatures); + if (error) + return "experimentalFeatures." + error; + } return null; }; @@ -71360,6 +71409,11 @@ throw TypeError(".google.api.PythonSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } + if (object.experimentalFeatures != null) { + if (typeof object.experimentalFeatures !== "object") + throw TypeError(".google.api.PythonSettings.experimentalFeatures: object expected"); + message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.fromObject(object.experimentalFeatures); + } return message; }; @@ -71376,10 +71430,14 @@ if (!options) options = {}; var object = {}; - if (options.defaults) + if (options.defaults) { object.common = null; + object.experimentalFeatures = null; + } if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) + object.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.toObject(message.experimentalFeatures, options); return object; }; @@ -71409,6 +71467,258 @@ return typeUrlPrefix + "/google.api.PythonSettings"; }; + PythonSettings.ExperimentalFeatures = (function() { + + /** + * Properties of an ExperimentalFeatures. + * @memberof google.api.PythonSettings + * @interface IExperimentalFeatures + * @property {boolean|null} [restAsyncIoEnabled] ExperimentalFeatures restAsyncIoEnabled + * @property {boolean|null} [protobufPythonicTypesEnabled] ExperimentalFeatures protobufPythonicTypesEnabled + * @property {boolean|null} [unversionedPackageDisabled] ExperimentalFeatures unversionedPackageDisabled + */ + + /** + * Constructs a new ExperimentalFeatures. + * @memberof google.api.PythonSettings + * @classdesc Represents an ExperimentalFeatures. + * @implements IExperimentalFeatures + * @constructor + * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set + */ + function ExperimentalFeatures(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExperimentalFeatures restAsyncIoEnabled. + * @member {boolean} restAsyncIoEnabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.restAsyncIoEnabled = false; + + /** + * ExperimentalFeatures protobufPythonicTypesEnabled. + * @member {boolean} protobufPythonicTypesEnabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.protobufPythonicTypesEnabled = false; + + /** + * ExperimentalFeatures unversionedPackageDisabled. + * @member {boolean} unversionedPackageDisabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.unversionedPackageDisabled = false; + + /** + * Creates a new ExperimentalFeatures instance using the specified properties. + * @function create + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures instance + */ + ExperimentalFeatures.create = function create(properties) { + return new ExperimentalFeatures(properties); + }; + + /** + * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @function encode + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExperimentalFeatures.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.restAsyncIoEnabled != null && Object.hasOwnProperty.call(message, "restAsyncIoEnabled")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.restAsyncIoEnabled); + if (message.protobufPythonicTypesEnabled != null && Object.hasOwnProperty.call(message, "protobufPythonicTypesEnabled")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.protobufPythonicTypesEnabled); + if (message.unversionedPackageDisabled != null && Object.hasOwnProperty.call(message, "unversionedPackageDisabled")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.unversionedPackageDisabled); + return writer; + }; + + /** + * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExperimentalFeatures.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer. + * @function decode + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExperimentalFeatures.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PythonSettings.ExperimentalFeatures(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.restAsyncIoEnabled = reader.bool(); + break; + } + case 2: { + message.protobufPythonicTypesEnabled = reader.bool(); + break; + } + case 3: { + message.unversionedPackageDisabled = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExperimentalFeatures.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExperimentalFeatures message. + * @function verify + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExperimentalFeatures.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) + if (typeof message.restAsyncIoEnabled !== "boolean") + return "restAsyncIoEnabled: boolean expected"; + if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) + if (typeof message.protobufPythonicTypesEnabled !== "boolean") + return "protobufPythonicTypesEnabled: boolean expected"; + if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) + if (typeof message.unversionedPackageDisabled !== "boolean") + return "unversionedPackageDisabled: boolean expected"; + return null; + }; + + /** + * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {Object.} object Plain object + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + */ + ExperimentalFeatures.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.PythonSettings.ExperimentalFeatures) + return object; + var message = new $root.google.api.PythonSettings.ExperimentalFeatures(); + if (object.restAsyncIoEnabled != null) + message.restAsyncIoEnabled = Boolean(object.restAsyncIoEnabled); + if (object.protobufPythonicTypesEnabled != null) + message.protobufPythonicTypesEnabled = Boolean(object.protobufPythonicTypesEnabled); + if (object.unversionedPackageDisabled != null) + message.unversionedPackageDisabled = Boolean(object.unversionedPackageDisabled); + return message; + }; + + /** + * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.ExperimentalFeatures} message ExperimentalFeatures + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExperimentalFeatures.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.restAsyncIoEnabled = false; + object.protobufPythonicTypesEnabled = false; + object.unversionedPackageDisabled = false; + } + if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) + object.restAsyncIoEnabled = message.restAsyncIoEnabled; + if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) + object.protobufPythonicTypesEnabled = message.protobufPythonicTypesEnabled; + if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) + object.unversionedPackageDisabled = message.unversionedPackageDisabled; + return object; + }; + + /** + * Converts this ExperimentalFeatures to JSON. + * @function toJSON + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + * @returns {Object.} JSON object + */ + ExperimentalFeatures.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ExperimentalFeatures + * @function getTypeUrl + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ExperimentalFeatures.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.PythonSettings.ExperimentalFeatures"; + }; + + return ExperimentalFeatures; + })(); + return PythonSettings; })(); @@ -72285,6 +72595,7 @@ * @memberof google.api * @interface IGoSettings * @property {google.api.ICommonLanguageSettings|null} [common] GoSettings common + * @property {Object.|null} [renamedServices] GoSettings renamedServices */ /** @@ -72296,6 +72607,7 @@ * @param {google.api.IGoSettings=} [properties] Properties to set */ function GoSettings(properties) { + this.renamedServices = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -72310,6 +72622,14 @@ */ GoSettings.prototype.common = null; + /** + * GoSettings renamedServices. + * @member {Object.} renamedServices + * @memberof google.api.GoSettings + * @instance + */ + GoSettings.prototype.renamedServices = $util.emptyObject; + /** * Creates a new GoSettings instance using the specified properties. * @function create @@ -72336,6 +72656,9 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.renamedServices != null && Object.hasOwnProperty.call(message, "renamedServices")) + for (var keys = Object.keys(message.renamedServices), i = 0; i < keys.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.renamedServices[keys[i]]).ldelim(); return writer; }; @@ -72366,7 +72689,7 @@ GoSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(), key, value; while (reader.pos < end) { var tag = reader.uint32(); if (tag === error) @@ -72376,6 +72699,29 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } + case 2: { + if (message.renamedServices === $util.emptyObject) + message.renamedServices = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.renamedServices[key] = value; + break; + } default: reader.skipType(tag & 7); break; @@ -72416,6 +72762,14 @@ if (error) return "common." + error; } + if (message.renamedServices != null && message.hasOwnProperty("renamedServices")) { + if (!$util.isObject(message.renamedServices)) + return "renamedServices: object expected"; + var key = Object.keys(message.renamedServices); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.renamedServices[key[i]])) + return "renamedServices: string{k:string} expected"; + } return null; }; @@ -72436,6 +72790,13 @@ throw TypeError(".google.api.GoSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } + if (object.renamedServices) { + if (typeof object.renamedServices !== "object") + throw TypeError(".google.api.GoSettings.renamedServices: object expected"); + message.renamedServices = {}; + for (var keys = Object.keys(object.renamedServices), i = 0; i < keys.length; ++i) + message.renamedServices[keys[i]] = String(object.renamedServices[keys[i]]); + } return message; }; @@ -72452,10 +72813,18 @@ if (!options) options = {}; var object = {}; + if (options.objects || options.defaults) + object.renamedServices = {}; if (options.defaults) object.common = null; if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + var keys2; + if (message.renamedServices && (keys2 = Object.keys(message.renamedServices)).length) { + object.renamedServices = {}; + for (var j = 0; j < keys2.length; ++j) + object.renamedServices[keys2[j]] = message.renamedServices[keys2[j]]; + } return object; }; @@ -73094,30 +73463,275 @@ return values; })(); - /** - * LaunchStage enum. - * @name google.api.LaunchStage - * @enum {number} - * @property {number} LAUNCH_STAGE_UNSPECIFIED=0 LAUNCH_STAGE_UNSPECIFIED value - * @property {number} UNIMPLEMENTED=6 UNIMPLEMENTED value - * @property {number} PRELAUNCH=7 PRELAUNCH value - * @property {number} EARLY_ACCESS=1 EARLY_ACCESS value - * @property {number} ALPHA=2 ALPHA value - * @property {number} BETA=3 BETA value - * @property {number} GA=4 GA value - * @property {number} DEPRECATED=5 DEPRECATED value - */ - api.LaunchStage = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "LAUNCH_STAGE_UNSPECIFIED"] = 0; - values[valuesById[6] = "UNIMPLEMENTED"] = 6; - values[valuesById[7] = "PRELAUNCH"] = 7; - values[valuesById[1] = "EARLY_ACCESS"] = 1; - values[valuesById[2] = "ALPHA"] = 2; - values[valuesById[3] = "BETA"] = 3; - values[valuesById[4] = "GA"] = 4; - values[valuesById[5] = "DEPRECATED"] = 5; - return values; + api.SelectiveGapicGeneration = (function() { + + /** + * Properties of a SelectiveGapicGeneration. + * @memberof google.api + * @interface ISelectiveGapicGeneration + * @property {Array.|null} [methods] SelectiveGapicGeneration methods + * @property {boolean|null} [generateOmittedAsInternal] SelectiveGapicGeneration generateOmittedAsInternal + */ + + /** + * Constructs a new SelectiveGapicGeneration. + * @memberof google.api + * @classdesc Represents a SelectiveGapicGeneration. + * @implements ISelectiveGapicGeneration + * @constructor + * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set + */ + function SelectiveGapicGeneration(properties) { + this.methods = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SelectiveGapicGeneration methods. + * @member {Array.} methods + * @memberof google.api.SelectiveGapicGeneration + * @instance + */ + SelectiveGapicGeneration.prototype.methods = $util.emptyArray; + + /** + * SelectiveGapicGeneration generateOmittedAsInternal. + * @member {boolean} generateOmittedAsInternal + * @memberof google.api.SelectiveGapicGeneration + * @instance + */ + SelectiveGapicGeneration.prototype.generateOmittedAsInternal = false; + + /** + * Creates a new SelectiveGapicGeneration instance using the specified properties. + * @function create + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration instance + */ + SelectiveGapicGeneration.create = function create(properties) { + return new SelectiveGapicGeneration(properties); + }; + + /** + * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @function encode + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SelectiveGapicGeneration.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.methods != null && message.methods.length) + for (var i = 0; i < message.methods.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.methods[i]); + if (message.generateOmittedAsInternal != null && Object.hasOwnProperty.call(message, "generateOmittedAsInternal")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.generateOmittedAsInternal); + return writer; + }; + + /** + * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SelectiveGapicGeneration.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. + * @function decode + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SelectiveGapicGeneration.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.SelectiveGapicGeneration(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.methods && message.methods.length)) + message.methods = []; + message.methods.push(reader.string()); + break; + } + case 2: { + message.generateOmittedAsInternal = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SelectiveGapicGeneration.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SelectiveGapicGeneration message. + * @function verify + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SelectiveGapicGeneration.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.methods != null && message.hasOwnProperty("methods")) { + if (!Array.isArray(message.methods)) + return "methods: array expected"; + for (var i = 0; i < message.methods.length; ++i) + if (!$util.isString(message.methods[i])) + return "methods: string[] expected"; + } + if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) + if (typeof message.generateOmittedAsInternal !== "boolean") + return "generateOmittedAsInternal: boolean expected"; + return null; + }; + + /** + * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {Object.} object Plain object + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + */ + SelectiveGapicGeneration.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.SelectiveGapicGeneration) + return object; + var message = new $root.google.api.SelectiveGapicGeneration(); + if (object.methods) { + if (!Array.isArray(object.methods)) + throw TypeError(".google.api.SelectiveGapicGeneration.methods: array expected"); + message.methods = []; + for (var i = 0; i < object.methods.length; ++i) + message.methods[i] = String(object.methods[i]); + } + if (object.generateOmittedAsInternal != null) + message.generateOmittedAsInternal = Boolean(object.generateOmittedAsInternal); + return message; + }; + + /** + * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.SelectiveGapicGeneration} message SelectiveGapicGeneration + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SelectiveGapicGeneration.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.methods = []; + if (options.defaults) + object.generateOmittedAsInternal = false; + if (message.methods && message.methods.length) { + object.methods = []; + for (var j = 0; j < message.methods.length; ++j) + object.methods[j] = message.methods[j]; + } + if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) + object.generateOmittedAsInternal = message.generateOmittedAsInternal; + return object; + }; + + /** + * Converts this SelectiveGapicGeneration to JSON. + * @function toJSON + * @memberof google.api.SelectiveGapicGeneration + * @instance + * @returns {Object.} JSON object + */ + SelectiveGapicGeneration.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SelectiveGapicGeneration + * @function getTypeUrl + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SelectiveGapicGeneration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.SelectiveGapicGeneration"; + }; + + return SelectiveGapicGeneration; + })(); + + /** + * LaunchStage enum. + * @name google.api.LaunchStage + * @enum {number} + * @property {number} LAUNCH_STAGE_UNSPECIFIED=0 LAUNCH_STAGE_UNSPECIFIED value + * @property {number} UNIMPLEMENTED=6 UNIMPLEMENTED value + * @property {number} PRELAUNCH=7 PRELAUNCH value + * @property {number} EARLY_ACCESS=1 EARLY_ACCESS value + * @property {number} ALPHA=2 ALPHA value + * @property {number} BETA=3 BETA value + * @property {number} GA=4 GA value + * @property {number} DEPRECATED=5 DEPRECATED value + */ + api.LaunchStage = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "LAUNCH_STAGE_UNSPECIFIED"] = 0; + values[valuesById[6] = "UNIMPLEMENTED"] = 6; + values[valuesById[7] = "PRELAUNCH"] = 7; + values[valuesById[1] = "EARLY_ACCESS"] = 1; + values[valuesById[2] = "ALPHA"] = 2; + values[valuesById[3] = "BETA"] = 3; + values[valuesById[4] = "GA"] = 4; + values[valuesById[5] = "DEPRECATED"] = 5; + return values; })(); /** @@ -74534,6 +75148,7 @@ * @name google.protobuf.Edition * @enum {number} * @property {number} EDITION_UNKNOWN=0 EDITION_UNKNOWN value + * @property {number} EDITION_LEGACY=900 EDITION_LEGACY value * @property {number} EDITION_PROTO2=998 EDITION_PROTO2 value * @property {number} EDITION_PROTO3=999 EDITION_PROTO3 value * @property {number} EDITION_2023=1000 EDITION_2023 value @@ -74548,6 +75163,7 @@ protobuf.Edition = (function() { var valuesById = {}, values = Object.create(valuesById); values[valuesById[0] = "EDITION_UNKNOWN"] = 0; + values[valuesById[900] = "EDITION_LEGACY"] = 900; values[valuesById[998] = "EDITION_PROTO2"] = 998; values[valuesById[999] = "EDITION_PROTO3"] = 999; values[valuesById[1000] = "EDITION_2023"] = 1000; @@ -74572,6 +75188,7 @@ * @property {Array.|null} [dependency] FileDescriptorProto dependency * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency + * @property {Array.|null} [optionDependency] FileDescriptorProto optionDependency * @property {Array.|null} [messageType] FileDescriptorProto messageType * @property {Array.|null} [enumType] FileDescriptorProto enumType * @property {Array.|null} [service] FileDescriptorProto service @@ -74594,6 +75211,7 @@ this.dependency = []; this.publicDependency = []; this.weakDependency = []; + this.optionDependency = []; this.messageType = []; this.enumType = []; this.service = []; @@ -74644,6 +75262,14 @@ */ FileDescriptorProto.prototype.weakDependency = $util.emptyArray; + /** + * FileDescriptorProto optionDependency. + * @member {Array.} optionDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.optionDependency = $util.emptyArray; + /** * FileDescriptorProto messageType. * @member {Array.} messageType @@ -74765,6 +75391,9 @@ writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 14, wireType 0 =*/112).int32(message.edition); + if (message.optionDependency != null && message.optionDependency.length) + for (var i = 0; i < message.optionDependency.length; ++i) + writer.uint32(/* id 15, wireType 2 =*/122).string(message.optionDependency[i]); return writer; }; @@ -74837,6 +75466,12 @@ message.weakDependency.push(reader.int32()); break; } + case 15: { + if (!(message.optionDependency && message.optionDependency.length)) + message.optionDependency = []; + message.optionDependency.push(reader.string()); + break; + } case 4: { if (!(message.messageType && message.messageType.length)) message.messageType = []; @@ -74939,6 +75574,13 @@ if (!$util.isInteger(message.weakDependency[i])) return "weakDependency: integer[] expected"; } + if (message.optionDependency != null && message.hasOwnProperty("optionDependency")) { + if (!Array.isArray(message.optionDependency)) + return "optionDependency: array expected"; + for (var i = 0; i < message.optionDependency.length; ++i) + if (!$util.isString(message.optionDependency[i])) + return "optionDependency: string[] expected"; + } if (message.messageType != null && message.hasOwnProperty("messageType")) { if (!Array.isArray(message.messageType)) return "messageType: array expected"; @@ -74993,6 +75635,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -75045,6 +75688,13 @@ for (var i = 0; i < object.weakDependency.length; ++i) message.weakDependency[i] = object.weakDependency[i] | 0; } + if (object.optionDependency) { + if (!Array.isArray(object.optionDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.optionDependency: array expected"); + message.optionDependency = []; + for (var i = 0; i < object.optionDependency.length; ++i) + message.optionDependency[i] = String(object.optionDependency[i]); + } if (object.messageType) { if (!Array.isArray(object.messageType)) throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); @@ -75108,6 +75758,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -75173,6 +75827,7 @@ object.extension = []; object.publicDependency = []; object.weakDependency = []; + object.optionDependency = []; } if (options.defaults) { object.name = ""; @@ -75229,6 +75884,11 @@ object.syntax = message.syntax; if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + if (message.optionDependency && message.optionDependency.length) { + object.optionDependency = []; + for (var j = 0; j < message.optionDependency.length; ++j) + object.optionDependency[j] = message.optionDependency[j]; + } return object; }; @@ -75277,6 +75937,7 @@ * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options * @property {Array.|null} [reservedRange] DescriptorProto reservedRange * @property {Array.|null} [reservedName] DescriptorProto reservedName + * @property {google.protobuf.SymbolVisibility|null} [visibility] DescriptorProto visibility */ /** @@ -75382,6 +76043,14 @@ */ DescriptorProto.prototype.reservedName = $util.emptyArray; + /** + * DescriptorProto visibility. + * @member {google.protobuf.SymbolVisibility} visibility + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.visibility = 0; + /** * Creates a new DescriptorProto instance using the specified properties. * @function create @@ -75434,6 +76103,8 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); + if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.visibility); return writer; }; @@ -75526,6 +76197,10 @@ message.reservedName.push(reader.string()); break; } + case 11: { + message.visibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -75639,6 +76314,15 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + switch (message.visibility) { + default: + return "visibility: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -75738,6 +76422,26 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } + switch (object.visibility) { + default: + if (typeof object.visibility === "number") { + message.visibility = object.visibility; + break; + } + break; + case "VISIBILITY_UNSET": + case 0: + message.visibility = 0; + break; + case "VISIBILITY_LOCAL": + case 1: + message.visibility = 1; + break; + case "VISIBILITY_EXPORT": + case 2: + message.visibility = 2; + break; + } return message; }; @@ -75767,6 +76471,7 @@ if (options.defaults) { object.name = ""; object.options = null; + object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -75812,6 +76517,8 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -77856,6 +78563,7 @@ * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName + * @property {google.protobuf.SymbolVisibility|null} [visibility] EnumDescriptorProto visibility */ /** @@ -77916,6 +78624,14 @@ */ EnumDescriptorProto.prototype.reservedName = $util.emptyArray; + /** + * EnumDescriptorProto visibility. + * @member {google.protobuf.SymbolVisibility} visibility + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.visibility = 0; + /** * Creates a new EnumDescriptorProto instance using the specified properties. * @function create @@ -77953,6 +78669,8 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); + if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.visibility); return writer; }; @@ -78015,6 +78733,10 @@ message.reservedName.push(reader.string()); break; } + case 6: { + message.visibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -78083,6 +78805,15 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + switch (message.visibility) { + default: + return "visibility: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -78132,6 +78863,26 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } + switch (object.visibility) { + default: + if (typeof object.visibility === "number") { + message.visibility = object.visibility; + break; + } + break; + case "VISIBILITY_UNSET": + case 0: + message.visibility = 0; + break; + case "VISIBILITY_LOCAL": + case 1: + message.visibility = 1; + break; + case "VISIBILITY_EXPORT": + case 2: + message.visibility = 2; + break; + } return message; }; @@ -78156,6 +78907,7 @@ if (options.defaults) { object.name = ""; object.options = null; + object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -78176,6 +78928,8 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -80494,6 +81248,7 @@ * @property {Array.|null} [targets] FieldOptions targets * @property {Array.|null} [editionDefaults] FieldOptions editionDefaults * @property {google.protobuf.IFeatureSet|null} [features] FieldOptions features + * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] FieldOptions featureSupport * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference @@ -80614,6 +81369,14 @@ */ FieldOptions.prototype.features = null; + /** + * FieldOptions featureSupport. + * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.featureSupport = null; + /** * FieldOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -80688,6 +81451,8 @@ $root.google.protobuf.FieldOptions.EditionDefault.encode(message.editionDefaults[i], writer.uint32(/* id 20, wireType 2 =*/162).fork()).ldelim(); if (message.features != null && Object.hasOwnProperty.call(message, "features")) $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 21, wireType 2 =*/170).fork()).ldelim(); + if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) + $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 22, wireType 2 =*/178).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -80789,6 +81554,10 @@ message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } + case 22: { + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -80924,6 +81693,11 @@ if (error) return "features." + error; } + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { + var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); + if (error) + return "featureSupport." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -81112,6 +81886,11 @@ throw TypeError(".google.protobuf.FieldOptions.features: object expected"); message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); } + if (object.featureSupport != null) { + if (typeof object.featureSupport !== "object") + throw TypeError(".google.protobuf.FieldOptions.featureSupport: object expected"); + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); @@ -81209,6 +81988,7 @@ object.debugRedact = false; object.retention = options.enums === String ? "RETENTION_UNKNOWN" : 0; object.features = null; + object.featureSupport = null; object[".google.api.resourceReference"] = null; } if (message.ctype != null && message.hasOwnProperty("ctype")) @@ -81241,6 +82021,8 @@ } if (message.features != null && message.hasOwnProperty("features")) object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) + object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -81513,6 +82295,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -81554,6 +82337,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -81564,93 +82351,575 @@ break; case "EDITION_2023": case 1000: - message.edition = 1000; + message.edition = 1000; + break; + case "EDITION_2024": + case 1001: + message.edition = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.edition = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.edition = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.edition = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.edition = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.edition = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.edition = 2147483647; + break; + } + if (object.value != null) + message.value = String(object.value); + return message; + }; + + /** + * Creates a plain object from an EditionDefault message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {google.protobuf.FieldOptions.EditionDefault} message EditionDefault + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EditionDefault.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.value = ""; + object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; + } + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; + if (message.edition != null && message.hasOwnProperty("edition")) + object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + return object; + }; + + /** + * Converts this EditionDefault to JSON. + * @function toJSON + * @memberof google.protobuf.FieldOptions.EditionDefault + * @instance + * @returns {Object.} JSON object + */ + EditionDefault.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EditionDefault + * @function getTypeUrl + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EditionDefault.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldOptions.EditionDefault"; + }; + + return EditionDefault; + })(); + + FieldOptions.FeatureSupport = (function() { + + /** + * Properties of a FeatureSupport. + * @memberof google.protobuf.FieldOptions + * @interface IFeatureSupport + * @property {google.protobuf.Edition|null} [editionIntroduced] FeatureSupport editionIntroduced + * @property {google.protobuf.Edition|null} [editionDeprecated] FeatureSupport editionDeprecated + * @property {string|null} [deprecationWarning] FeatureSupport deprecationWarning + * @property {google.protobuf.Edition|null} [editionRemoved] FeatureSupport editionRemoved + */ + + /** + * Constructs a new FeatureSupport. + * @memberof google.protobuf.FieldOptions + * @classdesc Represents a FeatureSupport. + * @implements IFeatureSupport + * @constructor + * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set + */ + function FeatureSupport(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FeatureSupport editionIntroduced. + * @member {google.protobuf.Edition} editionIntroduced + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionIntroduced = 0; + + /** + * FeatureSupport editionDeprecated. + * @member {google.protobuf.Edition} editionDeprecated + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionDeprecated = 0; + + /** + * FeatureSupport deprecationWarning. + * @member {string} deprecationWarning + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.deprecationWarning = ""; + + /** + * FeatureSupport editionRemoved. + * @member {google.protobuf.Edition} editionRemoved + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionRemoved = 0; + + /** + * Creates a new FeatureSupport instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport instance + */ + FeatureSupport.create = function create(properties) { + return new FeatureSupport(properties); + }; + + /** + * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSupport.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.editionIntroduced != null && Object.hasOwnProperty.call(message, "editionIntroduced")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.editionIntroduced); + if (message.editionDeprecated != null && Object.hasOwnProperty.call(message, "editionDeprecated")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.editionDeprecated); + if (message.deprecationWarning != null && Object.hasOwnProperty.call(message, "deprecationWarning")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.deprecationWarning); + if (message.editionRemoved != null && Object.hasOwnProperty.call(message, "editionRemoved")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.editionRemoved); + return writer; + }; + + /** + * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSupport.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSupport.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions.FeatureSupport(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.editionIntroduced = reader.int32(); + break; + } + case 2: { + message.editionDeprecated = reader.int32(); + break; + } + case 3: { + message.deprecationWarning = reader.string(); + break; + } + case 4: { + message.editionRemoved = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSupport.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FeatureSupport message. + * @function verify + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FeatureSupport.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) + switch (message.editionIntroduced) { + default: + return "editionIntroduced: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) + switch (message.editionDeprecated) { + default: + return "editionDeprecated: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) + if (!$util.isString(message.deprecationWarning)) + return "deprecationWarning: string expected"; + if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) + switch (message.editionRemoved) { + default: + return "editionRemoved: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + return null; + }; + + /** + * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + */ + FeatureSupport.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldOptions.FeatureSupport) + return object; + var message = new $root.google.protobuf.FieldOptions.FeatureSupport(); + switch (object.editionIntroduced) { + default: + if (typeof object.editionIntroduced === "number") { + message.editionIntroduced = object.editionIntroduced; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionIntroduced = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionIntroduced = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionIntroduced = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionIntroduced = 999; + break; + case "EDITION_2023": + case 1000: + message.editionIntroduced = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionIntroduced = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionIntroduced = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionIntroduced = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionIntroduced = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionIntroduced = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionIntroduced = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionIntroduced = 2147483647; + break; + } + switch (object.editionDeprecated) { + default: + if (typeof object.editionDeprecated === "number") { + message.editionDeprecated = object.editionDeprecated; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionDeprecated = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionDeprecated = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionDeprecated = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionDeprecated = 999; + break; + case "EDITION_2023": + case 1000: + message.editionDeprecated = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionDeprecated = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionDeprecated = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionDeprecated = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionDeprecated = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionDeprecated = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionDeprecated = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionDeprecated = 2147483647; + break; + } + if (object.deprecationWarning != null) + message.deprecationWarning = String(object.deprecationWarning); + switch (object.editionRemoved) { + default: + if (typeof object.editionRemoved === "number") { + message.editionRemoved = object.editionRemoved; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionRemoved = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionRemoved = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionRemoved = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionRemoved = 999; + break; + case "EDITION_2023": + case 1000: + message.editionRemoved = 1000; break; case "EDITION_2024": case 1001: - message.edition = 1001; + message.editionRemoved = 1001; break; case "EDITION_1_TEST_ONLY": case 1: - message.edition = 1; + message.editionRemoved = 1; break; case "EDITION_2_TEST_ONLY": case 2: - message.edition = 2; + message.editionRemoved = 2; break; case "EDITION_99997_TEST_ONLY": case 99997: - message.edition = 99997; + message.editionRemoved = 99997; break; case "EDITION_99998_TEST_ONLY": case 99998: - message.edition = 99998; + message.editionRemoved = 99998; break; case "EDITION_99999_TEST_ONLY": case 99999: - message.edition = 99999; + message.editionRemoved = 99999; break; case "EDITION_MAX": case 2147483647: - message.edition = 2147483647; + message.editionRemoved = 2147483647; break; } - if (object.value != null) - message.value = String(object.value); return message; }; /** - * Creates a plain object from an EditionDefault message. Also converts values to other types if specified. + * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.FieldOptions.EditionDefault + * @memberof google.protobuf.FieldOptions.FeatureSupport * @static - * @param {google.protobuf.FieldOptions.EditionDefault} message EditionDefault + * @param {google.protobuf.FieldOptions.FeatureSupport} message FeatureSupport * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - EditionDefault.toObject = function toObject(message, options) { + FeatureSupport.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) { - object.value = ""; - object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; - } - if (message.value != null && message.hasOwnProperty("value")) - object.value = message.value; - if (message.edition != null && message.hasOwnProperty("edition")) - object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + object.editionIntroduced = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.editionDeprecated = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.deprecationWarning = ""; + object.editionRemoved = options.enums === String ? "EDITION_UNKNOWN" : 0; + } + if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) + object.editionIntroduced = options.enums === String ? $root.google.protobuf.Edition[message.editionIntroduced] === undefined ? message.editionIntroduced : $root.google.protobuf.Edition[message.editionIntroduced] : message.editionIntroduced; + if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) + object.editionDeprecated = options.enums === String ? $root.google.protobuf.Edition[message.editionDeprecated] === undefined ? message.editionDeprecated : $root.google.protobuf.Edition[message.editionDeprecated] : message.editionDeprecated; + if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) + object.deprecationWarning = message.deprecationWarning; + if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) + object.editionRemoved = options.enums === String ? $root.google.protobuf.Edition[message.editionRemoved] === undefined ? message.editionRemoved : $root.google.protobuf.Edition[message.editionRemoved] : message.editionRemoved; return object; }; /** - * Converts this EditionDefault to JSON. + * Converts this FeatureSupport to JSON. * @function toJSON - * @memberof google.protobuf.FieldOptions.EditionDefault + * @memberof google.protobuf.FieldOptions.FeatureSupport * @instance * @returns {Object.} JSON object */ - EditionDefault.prototype.toJSON = function toJSON() { + FeatureSupport.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; /** - * Gets the default type url for EditionDefault + * Gets the default type url for FeatureSupport * @function getTypeUrl - * @memberof google.protobuf.FieldOptions.EditionDefault + * @memberof google.protobuf.FieldOptions.FeatureSupport * @static * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") * @returns {string} The default type url */ - EditionDefault.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + FeatureSupport.getTypeUrl = function getTypeUrl(typeUrlPrefix) { if (typeUrlPrefix === undefined) { typeUrlPrefix = "type.googleapis.com"; } - return typeUrlPrefix + "/google.protobuf.FieldOptions.EditionDefault"; + return typeUrlPrefix + "/google.protobuf.FieldOptions.FeatureSupport"; }; - return EditionDefault; + return FeatureSupport; })(); return FieldOptions; @@ -82245,6 +83514,7 @@ * @property {boolean|null} [deprecated] EnumValueOptions deprecated * @property {google.protobuf.IFeatureSet|null} [features] EnumValueOptions features * @property {boolean|null} [debugRedact] EnumValueOptions debugRedact + * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] EnumValueOptions featureSupport * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption */ @@ -82288,6 +83558,14 @@ */ EnumValueOptions.prototype.debugRedact = false; + /** + * EnumValueOptions featureSupport. + * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.featureSupport = null; + /** * EnumValueOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -82326,6 +83604,8 @@ $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.debugRedact != null && Object.hasOwnProperty.call(message, "debugRedact")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.debugRedact); + if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) + $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -82377,6 +83657,10 @@ message.debugRedact = reader.bool(); break; } + case 4: { + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -82429,6 +83713,11 @@ if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) if (typeof message.debugRedact !== "boolean") return "debugRedact: boolean expected"; + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { + var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); + if (error) + return "featureSupport." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -82462,6 +83751,11 @@ } if (object.debugRedact != null) message.debugRedact = Boolean(object.debugRedact); + if (object.featureSupport != null) { + if (typeof object.featureSupport !== "object") + throw TypeError(".google.protobuf.EnumValueOptions.featureSupport: object expected"); + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); @@ -82494,6 +83788,7 @@ object.deprecated = false; object.features = null; object.debugRedact = false; + object.featureSupport = null; } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; @@ -82501,6 +83796,8 @@ object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) object.debugRedact = message.debugRedact; + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) + object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -83996,6 +85293,8 @@ * @property {google.protobuf.FeatureSet.Utf8Validation|null} [utf8Validation] FeatureSet utf8Validation * @property {google.protobuf.FeatureSet.MessageEncoding|null} [messageEncoding] FeatureSet messageEncoding * @property {google.protobuf.FeatureSet.JsonFormat|null} [jsonFormat] FeatureSet jsonFormat + * @property {google.protobuf.FeatureSet.EnforceNamingStyle|null} [enforceNamingStyle] FeatureSet enforceNamingStyle + * @property {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null} [defaultSymbolVisibility] FeatureSet defaultSymbolVisibility */ /** @@ -84061,6 +85360,22 @@ */ FeatureSet.prototype.jsonFormat = 0; + /** + * FeatureSet enforceNamingStyle. + * @member {google.protobuf.FeatureSet.EnforceNamingStyle} enforceNamingStyle + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.enforceNamingStyle = 0; + + /** + * FeatureSet defaultSymbolVisibility. + * @member {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility} defaultSymbolVisibility + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.defaultSymbolVisibility = 0; + /** * Creates a new FeatureSet instance using the specified properties. * @function create @@ -84097,6 +85412,10 @@ writer.uint32(/* id 5, wireType 0 =*/40).int32(message.messageEncoding); if (message.jsonFormat != null && Object.hasOwnProperty.call(message, "jsonFormat")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jsonFormat); + if (message.enforceNamingStyle != null && Object.hasOwnProperty.call(message, "enforceNamingStyle")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.enforceNamingStyle); + if (message.defaultSymbolVisibility != null && Object.hasOwnProperty.call(message, "defaultSymbolVisibility")) + writer.uint32(/* id 8, wireType 0 =*/64).int32(message.defaultSymbolVisibility); return writer; }; @@ -84157,6 +85476,14 @@ message.jsonFormat = reader.int32(); break; } + case 7: { + message.enforceNamingStyle = reader.int32(); + break; + } + case 8: { + message.defaultSymbolVisibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -84247,6 +85574,26 @@ case 2: break; } + if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) + switch (message.enforceNamingStyle) { + default: + return "enforceNamingStyle: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) + switch (message.defaultSymbolVisibility) { + default: + return "defaultSymbolVisibility: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + break; + } return null; }; @@ -84386,6 +85733,54 @@ message.jsonFormat = 2; break; } + switch (object.enforceNamingStyle) { + default: + if (typeof object.enforceNamingStyle === "number") { + message.enforceNamingStyle = object.enforceNamingStyle; + break; + } + break; + case "ENFORCE_NAMING_STYLE_UNKNOWN": + case 0: + message.enforceNamingStyle = 0; + break; + case "STYLE2024": + case 1: + message.enforceNamingStyle = 1; + break; + case "STYLE_LEGACY": + case 2: + message.enforceNamingStyle = 2; + break; + } + switch (object.defaultSymbolVisibility) { + default: + if (typeof object.defaultSymbolVisibility === "number") { + message.defaultSymbolVisibility = object.defaultSymbolVisibility; + break; + } + break; + case "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": + case 0: + message.defaultSymbolVisibility = 0; + break; + case "EXPORT_ALL": + case 1: + message.defaultSymbolVisibility = 1; + break; + case "EXPORT_TOP_LEVEL": + case 2: + message.defaultSymbolVisibility = 2; + break; + case "LOCAL_ALL": + case 3: + message.defaultSymbolVisibility = 3; + break; + case "STRICT": + case 4: + message.defaultSymbolVisibility = 4; + break; + } return message; }; @@ -84409,6 +85804,8 @@ object.utf8Validation = options.enums === String ? "UTF8_VALIDATION_UNKNOWN" : 0; object.messageEncoding = options.enums === String ? "MESSAGE_ENCODING_UNKNOWN" : 0; object.jsonFormat = options.enums === String ? "JSON_FORMAT_UNKNOWN" : 0; + object.enforceNamingStyle = options.enums === String ? "ENFORCE_NAMING_STYLE_UNKNOWN" : 0; + object.defaultSymbolVisibility = options.enums === String ? "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN" : 0; } if (message.fieldPresence != null && message.hasOwnProperty("fieldPresence")) object.fieldPresence = options.enums === String ? $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] === undefined ? message.fieldPresence : $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] : message.fieldPresence; @@ -84422,6 +85819,10 @@ object.messageEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] === undefined ? message.messageEncoding : $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] : message.messageEncoding; if (message.jsonFormat != null && message.hasOwnProperty("jsonFormat")) object.jsonFormat = options.enums === String ? $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] === undefined ? message.jsonFormat : $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] : message.jsonFormat; + if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) + object.enforceNamingStyle = options.enums === String ? $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] === undefined ? message.enforceNamingStyle : $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] : message.enforceNamingStyle; + if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) + object.defaultSymbolVisibility = options.enums === String ? $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] === undefined ? message.defaultSymbolVisibility : $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] : message.defaultSymbolVisibility; return object; }; @@ -84549,6 +85950,219 @@ return values; })(); + /** + * EnforceNamingStyle enum. + * @name google.protobuf.FeatureSet.EnforceNamingStyle + * @enum {number} + * @property {number} ENFORCE_NAMING_STYLE_UNKNOWN=0 ENFORCE_NAMING_STYLE_UNKNOWN value + * @property {number} STYLE2024=1 STYLE2024 value + * @property {number} STYLE_LEGACY=2 STYLE_LEGACY value + */ + FeatureSet.EnforceNamingStyle = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "ENFORCE_NAMING_STYLE_UNKNOWN"] = 0; + values[valuesById[1] = "STYLE2024"] = 1; + values[valuesById[2] = "STYLE_LEGACY"] = 2; + return values; + })(); + + FeatureSet.VisibilityFeature = (function() { + + /** + * Properties of a VisibilityFeature. + * @memberof google.protobuf.FeatureSet + * @interface IVisibilityFeature + */ + + /** + * Constructs a new VisibilityFeature. + * @memberof google.protobuf.FeatureSet + * @classdesc Represents a VisibilityFeature. + * @implements IVisibilityFeature + * @constructor + * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set + */ + function VisibilityFeature(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new VisibilityFeature instance using the specified properties. + * @function create + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature instance + */ + VisibilityFeature.create = function create(properties) { + return new VisibilityFeature(properties); + }; + + /** + * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + VisibilityFeature.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + VisibilityFeature.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + VisibilityFeature.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSet.VisibilityFeature(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + VisibilityFeature.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a VisibilityFeature message. + * @function verify + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + VisibilityFeature.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + */ + VisibilityFeature.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FeatureSet.VisibilityFeature) + return object; + return new $root.google.protobuf.FeatureSet.VisibilityFeature(); + }; + + /** + * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.VisibilityFeature} message VisibilityFeature + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + VisibilityFeature.toObject = function toObject() { + return {}; + }; + + /** + * Converts this VisibilityFeature to JSON. + * @function toJSON + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @instance + * @returns {Object.} JSON object + */ + VisibilityFeature.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for VisibilityFeature + * @function getTypeUrl + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + VisibilityFeature.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FeatureSet.VisibilityFeature"; + }; + + /** + * DefaultSymbolVisibility enum. + * @name google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility + * @enum {number} + * @property {number} DEFAULT_SYMBOL_VISIBILITY_UNKNOWN=0 DEFAULT_SYMBOL_VISIBILITY_UNKNOWN value + * @property {number} EXPORT_ALL=1 EXPORT_ALL value + * @property {number} EXPORT_TOP_LEVEL=2 EXPORT_TOP_LEVEL value + * @property {number} LOCAL_ALL=3 LOCAL_ALL value + * @property {number} STRICT=4 STRICT value + */ + VisibilityFeature.DefaultSymbolVisibility = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN"] = 0; + values[valuesById[1] = "EXPORT_ALL"] = 1; + values[valuesById[2] = "EXPORT_TOP_LEVEL"] = 2; + values[valuesById[3] = "LOCAL_ALL"] = 3; + values[valuesById[4] = "STRICT"] = 4; + return values; + })(); + + return VisibilityFeature; + })(); + return FeatureSet; })(); @@ -84733,6 +86347,7 @@ default: return "minimumEdition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -84750,6 +86365,7 @@ default: return "maximumEdition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -84798,6 +86414,10 @@ case 0: message.minimumEdition = 0; break; + case "EDITION_LEGACY": + case 900: + message.minimumEdition = 900; + break; case "EDITION_PROTO2": case 998: message.minimumEdition = 998; @@ -84850,6 +86470,10 @@ case 0: message.maximumEdition = 0; break; + case "EDITION_LEGACY": + case 900: + message.maximumEdition = 900; + break; case "EDITION_PROTO2": case 998: message.maximumEdition = 998; @@ -84958,7 +86582,8 @@ * @memberof google.protobuf.FeatureSetDefaults * @interface IFeatureSetEditionDefault * @property {google.protobuf.Edition|null} [edition] FeatureSetEditionDefault edition - * @property {google.protobuf.IFeatureSet|null} [features] FeatureSetEditionDefault features + * @property {google.protobuf.IFeatureSet|null} [overridableFeatures] FeatureSetEditionDefault overridableFeatures + * @property {google.protobuf.IFeatureSet|null} [fixedFeatures] FeatureSetEditionDefault fixedFeatures */ /** @@ -84985,12 +86610,20 @@ FeatureSetEditionDefault.prototype.edition = 0; /** - * FeatureSetEditionDefault features. - * @member {google.protobuf.IFeatureSet|null|undefined} features + * FeatureSetEditionDefault overridableFeatures. + * @member {google.protobuf.IFeatureSet|null|undefined} overridableFeatures + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @instance + */ + FeatureSetEditionDefault.prototype.overridableFeatures = null; + + /** + * FeatureSetEditionDefault fixedFeatures. + * @member {google.protobuf.IFeatureSet|null|undefined} fixedFeatures * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault * @instance */ - FeatureSetEditionDefault.prototype.features = null; + FeatureSetEditionDefault.prototype.fixedFeatures = null; /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -85016,10 +86649,12 @@ FeatureSetEditionDefault.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.features != null && Object.hasOwnProperty.call(message, "features")) - $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.edition); + if (message.overridableFeatures != null && Object.hasOwnProperty.call(message, "overridableFeatures")) + $root.google.protobuf.FeatureSet.encode(message.overridableFeatures, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.fixedFeatures != null && Object.hasOwnProperty.call(message, "fixedFeatures")) + $root.google.protobuf.FeatureSet.encode(message.fixedFeatures, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); return writer; }; @@ -85060,8 +86695,12 @@ message.edition = reader.int32(); break; } - case 2: { - message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + case 4: { + message.overridableFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + case 5: { + message.fixedFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } default: @@ -85104,6 +86743,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -85116,10 +86756,15 @@ case 2147483647: break; } - if (message.features != null && message.hasOwnProperty("features")) { - var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) { + var error = $root.google.protobuf.FeatureSet.verify(message.overridableFeatures); + if (error) + return "overridableFeatures." + error; + } + if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) { + var error = $root.google.protobuf.FeatureSet.verify(message.fixedFeatures); if (error) - return "features." + error; + return "fixedFeatures." + error; } return null; }; @@ -85147,6 +86792,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -85188,10 +86837,15 @@ message.edition = 2147483647; break; } - if (object.features != null) { - if (typeof object.features !== "object") - throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features: object expected"); - message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + if (object.overridableFeatures != null) { + if (typeof object.overridableFeatures !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.overridableFeatures: object expected"); + message.overridableFeatures = $root.google.protobuf.FeatureSet.fromObject(object.overridableFeatures); + } + if (object.fixedFeatures != null) { + if (typeof object.fixedFeatures !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fixedFeatures: object expected"); + message.fixedFeatures = $root.google.protobuf.FeatureSet.fromObject(object.fixedFeatures); } return message; }; @@ -85210,13 +86864,16 @@ options = {}; var object = {}; if (options.defaults) { - object.features = null; object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.overridableFeatures = null; + object.fixedFeatures = null; } - if (message.features != null && message.hasOwnProperty("features")) - object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) + object.overridableFeatures = $root.google.protobuf.FeatureSet.toObject(message.overridableFeatures, options); + if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) + object.fixedFeatures = $root.google.protobuf.FeatureSet.toObject(message.fixedFeatures, options); return object; }; @@ -86431,6 +88088,22 @@ return GeneratedCodeInfo; })(); + /** + * SymbolVisibility enum. + * @name google.protobuf.SymbolVisibility + * @enum {number} + * @property {number} VISIBILITY_UNSET=0 VISIBILITY_UNSET value + * @property {number} VISIBILITY_LOCAL=1 VISIBILITY_LOCAL value + * @property {number} VISIBILITY_EXPORT=2 VISIBILITY_EXPORT value + */ + protobuf.SymbolVisibility = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "VISIBILITY_UNSET"] = 0; + values[valuesById[1] = "VISIBILITY_LOCAL"] = 1; + values[valuesById[2] = "VISIBILITY_EXPORT"] = 2; + return values; + })(); + protobuf.Duration = (function() { /** diff --git a/protos/protos.json b/protos/protos.json index cac4aeace..040ff7975 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -6866,8 +6866,7 @@ "java_multiple_files": true, "java_outer_classname": "RoutingProto", "java_package": "com.google.api", - "objc_class_prefix": "GAPI", - "cc_enable_arenas": true + "objc_class_prefix": "GAPI" }, "nested": { "http": { @@ -6991,6 +6990,10 @@ "rule": "repeated", "type": "ClientLibraryDestination", "id": 2 + }, + "selectiveGapicGeneration": { + "type": "SelectiveGapicGeneration", + "id": 3 } } }, @@ -7131,6 +7134,28 @@ "common": { "type": "CommonLanguageSettings", "id": 1 + }, + "experimentalFeatures": { + "type": "ExperimentalFeatures", + "id": 2 + } + }, + "nested": { + "ExperimentalFeatures": { + "fields": { + "restAsyncIoEnabled": { + "type": "bool", + "id": 1 + }, + "protobufPythonicTypesEnabled": { + "type": "bool", + "id": 2 + }, + "unversionedPackageDisabled": { + "type": "bool", + "id": 3 + } + } } } }, @@ -7188,6 +7213,11 @@ "common": { "type": "CommonLanguageSettings", "id": 1 + }, + "renamedServices": { + "keyType": "string", + "type": "string", + "id": 2 } } }, @@ -7249,6 +7279,19 @@ "PACKAGE_MANAGER": 20 } }, + "SelectiveGapicGeneration": { + "fields": { + "methods": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "generateOmittedAsInternal": { + "type": "bool", + "id": 2 + } + } + }, "LaunchStage": { "values": { "LAUNCH_STAGE_UNSPECIFIED": 0, @@ -7407,12 +7450,19 @@ "type": "FileDescriptorProto", "id": 1 } - } + }, + "extensions": [ + [ + 536000000, + 536000000 + ] + ] }, "Edition": { "edition": "proto2", "values": { "EDITION_UNKNOWN": 0, + "EDITION_LEGACY": 900, "EDITION_PROTO2": 998, "EDITION_PROTO3": 999, "EDITION_2023": 1000, @@ -7451,6 +7501,11 @@ "type": "int32", "id": 11 }, + "optionDependency": { + "rule": "repeated", + "type": "string", + "id": 15 + }, "messageType": { "rule": "repeated", "type": "DescriptorProto", @@ -7539,6 +7594,10 @@ "rule": "repeated", "type": "string", "id": 10 + }, + "visibility": { + "type": "SymbolVisibility", + "id": 11 } }, "nested": { @@ -7764,6 +7823,10 @@ "rule": "repeated", "type": "string", "id": 5 + }, + "visibility": { + "type": "SymbolVisibility", + "id": 6 } }, "nested": { @@ -7978,6 +8041,7 @@ 42, 42 ], + "php_generic_services", [ 38, 38 @@ -8113,7 +8177,8 @@ "type": "bool", "id": 10, "options": { - "default": false + "default": false, + "deprecated": true } }, "debugRedact": { @@ -8141,6 +8206,10 @@ "type": "FeatureSet", "id": 21 }, + "featureSupport": { + "type": "FeatureSupport", + "id": 22 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -8210,6 +8279,26 @@ "id": 2 } } + }, + "FeatureSupport": { + "fields": { + "editionIntroduced": { + "type": "Edition", + "id": 1 + }, + "editionDeprecated": { + "type": "Edition", + "id": 2 + }, + "deprecationWarning": { + "type": "string", + "id": 3 + }, + "editionRemoved": { + "type": "Edition", + "id": 4 + } + } } } }, @@ -8298,6 +8387,10 @@ "default": false } }, + "featureSupport": { + "type": "FieldOptions.FeatureSupport", + "id": 4 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -8440,6 +8533,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_2023", "edition_defaults.value": "EXPLICIT" } @@ -8450,6 +8544,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "OPEN" } @@ -8460,6 +8555,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "PACKED" } @@ -8470,6 +8566,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "VERIFY" } @@ -8480,7 +8577,8 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "EDITION_PROTO2", + "feature_support.edition_introduced": "EDITION_2023", + "edition_defaults.edition": "EDITION_LEGACY", "edition_defaults.value": "LENGTH_PREFIXED" } }, @@ -8490,27 +8588,38 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "ALLOW" } + }, + "enforceNamingStyle": { + "type": "EnforceNamingStyle", + "id": 7, + "options": { + "retention": "RETENTION_SOURCE", + "targets": "TARGET_TYPE_METHOD", + "feature_support.edition_introduced": "EDITION_2024", + "edition_defaults.edition": "EDITION_2024", + "edition_defaults.value": "STYLE2024" + } + }, + "defaultSymbolVisibility": { + "type": "VisibilityFeature.DefaultSymbolVisibility", + "id": 8, + "options": { + "retention": "RETENTION_SOURCE", + "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2024", + "edition_defaults.edition": "EDITION_2024", + "edition_defaults.value": "EXPORT_TOP_LEVEL" + } } }, "extensions": [ [ 1000, - 1000 - ], - [ - 1001, - 1001 - ], - [ - 1002, - 1002 - ], - [ - 9990, - 9990 + 9994 ], [ 9995, @@ -8555,7 +8664,13 @@ "UTF8_VALIDATION_UNKNOWN": 0, "VERIFY": 2, "NONE": 3 - } + }, + "reserved": [ + [ + 1, + 1 + ] + ] }, "MessageEncoding": { "values": { @@ -8570,6 +8685,33 @@ "ALLOW": 1, "LEGACY_BEST_EFFORT": 2 } + }, + "EnforceNamingStyle": { + "values": { + "ENFORCE_NAMING_STYLE_UNKNOWN": 0, + "STYLE2024": 1, + "STYLE_LEGACY": 2 + } + }, + "VisibilityFeature": { + "fields": {}, + "reserved": [ + [ + 1, + 536870911 + ] + ], + "nested": { + "DefaultSymbolVisibility": { + "values": { + "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": 0, + "EXPORT_ALL": 1, + "EXPORT_TOP_LEVEL": 2, + "LOCAL_ALL": 3, + "STRICT": 4 + } + } + } } } }, @@ -8597,11 +8739,26 @@ "type": "Edition", "id": 3 }, - "features": { + "overridableFeatures": { "type": "FeatureSet", - "id": 2 + "id": 4 + }, + "fixedFeatures": { + "type": "FeatureSet", + "id": 5 } - } + }, + "reserved": [ + [ + 1, + 1 + ], + [ + 2, + 2 + ], + "features" + ] } } }, @@ -8614,6 +8771,12 @@ "id": 1 } }, + "extensions": [ + [ + 536000000, + 536000000 + ] + ], "nested": { "Location": { "fields": { @@ -8699,6 +8862,14 @@ } } }, + "SymbolVisibility": { + "edition": "proto2", + "values": { + "VISIBILITY_UNSET": 0, + "VISIBILITY_LOCAL": 1, + "VISIBILITY_EXPORT": 2 + } + }, "Duration": { "fields": { "seconds": { @@ -8825,13 +8996,13 @@ "nested": { "v1": { "options": { - "cc_enable_arenas": true, "csharp_namespace": "Google.Cloud.Iam.V1", "go_package": "cloud.google.com/go/iam/apiv1/iampb;iampb", "java_multiple_files": true, "java_outer_classname": "PolicyProto", "java_package": "com.google.iam.v1", - "php_namespace": "Google\\Cloud\\Iam\\V1" + "php_namespace": "Google\\Cloud\\Iam\\V1", + "cc_enable_arenas": true }, "nested": { "IAMPolicy": { @@ -9172,6 +9343,7 @@ "java_multiple_files": true, "java_outer_classname": "OperationsProto", "java_package": "com.google.longrunning", + "objc_class_prefix": "GLRUN", "php_namespace": "Google\\LongRunning" }, "nested": { diff --git a/protos/test_proxy.proto b/protos/test_proxy.proto index 551dd4d8b..f919dc476 100644 --- a/protos/test_proxy.proto +++ b/protos/test_proxy.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -22,12 +22,43 @@ import "google/bigtable/v2/data.proto"; import "google/protobuf/duration.proto"; import "google/rpc/status.proto"; -option go_package = "./testproxypb"; +option go_package = "cloud.google.com/go/bigtable/testproxy/testproxypb;testproxypb"; option java_multiple_files = true; option java_package = "com.google.cloud.bigtable.testproxy"; +// A config flag that dictates how the optional features should be enabled +// during the client creation. The optional features customize how the client +// interacts with the server, and are defined in +// https://github.com/googleapis/googleapis/blob/master/google/bigtable/v2/feature_flags.proto +enum OptionalFeatureConfig { + OPTIONAL_FEATURE_CONFIG_DEFAULT = 0; + + OPTIONAL_FEATURE_CONFIG_ENABLE_ALL = 1; +} + // Request to test proxy service to create a client object. message CreateClientRequest { + message SecurityOptions { + // Access token to use for client credentials. If empty, the client will not + // use any call credentials. Certain implementations may require `use_ssl` + // to be set when using this. + string access_token = 1; + + // Whether to use SSL channel credentials when connecting to the data + // endpoint. + bool use_ssl = 2; + + // If using SSL channel credentials, override the SSL endpoint to match the + // host that is specified in the backend's certificate. Also sets the + // client's authority header value. + string ssl_endpoint_override = 3; + + // PEM encoding of the server root certificates. If not set, the default + // root certs will be used instead. The default can be overridden via the + // GRPC_DEFAULT_SSL_ROOTS_FILE_PATH env var. + string ssl_root_certs_pem = 4; + } + // A unique ID associated with the client object to be created. string client_id = 1; @@ -52,6 +83,21 @@ message CreateClientRequest { // the created client. Otherwise, the default timeout from the client library // will be used. Note that the override applies to all the methods. google.protobuf.Duration per_operation_timeout = 6; + + // Optional config that dictates how the optional features should be enabled + // during the client creation. Please check the enum type's docstring above. + OptionalFeatureConfig optional_feature_config = 7; + + // Options to allow connecting to backends with channel and/or call + // credentials. This is needed internally by Cloud Bigtable's own testing + // frameworks.It is not necessary to support these fields for client + // conformance testing. + // + // WARNING: this allows the proxy to connect to a real production + // CBT backend with the right options, however, the proxy itself is insecure + // so it is not recommended to use it with real credentials or outside testing + // contexts. + SecurityOptions security_options = 8; } // Response from test proxy service for CreateClientRequest. @@ -203,6 +249,39 @@ message ReadModifyWriteRowRequest { google.bigtable.v2.ReadModifyWriteRowRequest request = 2; } +// Request to test proxy service to execute a query. +message ExecuteQueryRequest { + // The ID of the target client object. + string client_id = 1; + + // The raw request to the Bigtable server. + google.bigtable.v2.ExecuteQueryRequest request = 2; +} + +// Response from test proxy service for ExecuteQueryRequest. +message ExecuteQueryResult { + // The RPC status from the client binding. + google.rpc.Status status = 1; + + // Name and type information for the query result. + ResultSetMetadata metadata = 4; + + // Encoded version of the ResultSet. Should not contain type information. + repeated SqlRow rows = 3; +} + +// Schema information for the query result. +message ResultSetMetadata { + // Column metadata for each column inthe query result. + repeated google.bigtable.v2.ColumnMetadata columns = 1; +} + +// Representation of a single row in the query result. +message SqlRow { + // Columnar values returned by the query. + repeated google.bigtable.v2.Value values = 1; +} + // Note that all RPCs are unary, even when the equivalent client binding call // may be streaming. This is an intentional simplification. // @@ -265,4 +344,7 @@ service CloudBigtableV2TestProxy { // Performs a read-modify-write operation with the client. rpc ReadModifyWriteRow(ReadModifyWriteRowRequest) returns (RowResult) {} -} + + // Executes a BTQL query with the client. + rpc ExecuteQuery(ExecuteQueryRequest) returns (ExecuteQueryResult) {} +} \ No newline at end of file diff --git a/samples/api-reference-doc-snippets/instance.js b/samples/api-reference-doc-snippets/instance.js index 7894c8be4..5683ea94c 100644 --- a/samples/api-reference-doc-snippets/instance.js +++ b/samples/api-reference-doc-snippets/instance.js @@ -362,6 +362,97 @@ const snippets = { }); // [END bigtable_api_del_instance] }, + + executeQuery: (instanceId, tableId) => { + // [START bigtable_api_execute_query] + const {Bigtable} = require('@google-cloud/bigtable'); + const bigtable = new Bigtable(); + const instance = bigtable.instance(instanceId); + + const query = `SELECT + _key + from \`${tableId}\` WHERE _key=@row_key`; + const parameters = { + row_key: 'alincoln', + }; + + const parameterTypes = { + row_key: Bigtable.SqlTypes.String(), + }; + + const prepareStatementOptions = { + query, + parameterTypes, + }; + + instance + .prepareStatement(prepareStatementOptions) + .then(([preparedStatement]) => + instance.executeQuery({ + preparedStatement, + parameters, + }), + ) + .then(result => { + const rows = result[0]; + }) + .catch(err => { + // Handle errors + }); + + // [END bigtable_api_execute_query] + }, + + createExecuteQueryStream: (instanceId, tableId) => { + // [START bigtable_api_create_query_stream] + const {Bigtable} = require('@google-cloud/bigtable'); + const bigtable = new Bigtable(); + const instance = bigtable.instance(instanceId); + + const query = `SELECT + _key + from \`${tableId}\` WHERE _key=@row_key`; + const parameters = { + row_key: 'alincoln', + }; + const parameterTypes = { + row_key: Bigtable.ExecuteQueryTypes.String(), + }; + + const prepareStatementOptions = { + query, + parameterTypes, + }; + instance + .prepareStatement(prepareStatementOptions) + .then(preparedStatement => { + instance + .createExecuteQueryStream({ + preparedStatement, + parameters, + }) + .on('error', err => { + // Handle the error. + }) + .on('data', row => { + // `row` is a QueryResultRow object. + }) + .on('end', () => { + // All rows retrieved. + }); + }); + + // If you anticipate many results, you can end a stream early to prevent + // unnecessary processing. + //- + // instance + // .createExecuteQueryStream(options) + // .on('data', function (row) { + // this.end(); + // }); + + // [END bigtable_api_create_query_stream] + }, }; module.exports = snippets; diff --git a/samples/package.json b/samples/package.json index b970a8513..26f229a44 100644 --- a/samples/package.json +++ b/samples/package.json @@ -14,7 +14,7 @@ "node": ">=18" }, "dependencies": { - "@google-cloud/bigtable": "^6.1.0", + "@google-cloud/bigtable": "^6.2.0", "uuid": "^9.0.0", "yargs": "^16.0.0" }, diff --git a/src/client-side-metrics/client-side-metrics-attributes.ts b/src/client-side-metrics/client-side-metrics-attributes.ts index f5fbf911d..1946cacdd 100644 --- a/src/client-side-metrics/client-side-metrics-attributes.ts +++ b/src/client-side-metrics/client-side-metrics-attributes.ts @@ -25,6 +25,7 @@ export enum StreamingState { * metrics, allowing for differentiation of performance by method. */ export enum MethodName { + READ_ROW = 'Bigtable.ReadRow', READ_ROWS = 'Bigtable.ReadRows', MUTATE_ROW = 'Bigtable.MutateRow', CHECK_AND_MUTATE_ROW = 'Bigtable.CheckAndMutateRow', diff --git a/src/client-side-metrics/exporter.ts b/src/client-side-metrics/exporter.ts index cf72dadd8..4f373ef36 100644 --- a/src/client-side-metrics/exporter.ts +++ b/src/client-side-metrics/exporter.ts @@ -19,11 +19,10 @@ import { Histogram, ResourceMetrics, } from '@opentelemetry/sdk-metrics'; -import {grpc, ServiceError} from 'google-gax'; +import {ClientOptions, ServiceError} from 'google-gax'; import {MetricServiceClient} from '@google-cloud/monitoring'; import {google} from '@google-cloud/monitoring/build/protos/protos'; import ICreateTimeSeriesRequest = google.monitoring.v3.ICreateTimeSeriesRequest; -import {RetryOptions} from 'google-gax'; export interface ExportResult { code: number; @@ -119,7 +118,7 @@ function getIntegerPoints(dataPoint: DataPoint) { * getResource gets the resource object which is used for building the timeseries * object that will be sent to Google Cloud Monitoring dashboard * - * @param {string} metricName The backend name of the metric that we want to record + * @param {string} projectId The name of the project * @param {DataPoint} dataPoint The datapoint containing the data we wish to * send to the Google Cloud Monitoring dashboard */ @@ -184,6 +183,7 @@ function getMetric( * metric attributes, data points, and aggregation information, into an object * that conforms to the expected request format of the Cloud Monitoring API. * + * @param projectId * @param {ResourceMetrics} exportArgs - The OpenTelemetry metrics data to be converted. This * object contains resource attributes, scope information, and a list of * metrics with their associated data points. @@ -211,14 +211,10 @@ function getMetric( * * */ -export function metricsToRequest(exportArgs: ResourceMetrics) { - type WithSyncAttributes = {_syncAttributes: {[index: string]: string}}; - const resourcesWithSyncAttributes = - exportArgs.resource as unknown as WithSyncAttributes; - const projectId = - resourcesWithSyncAttributes._syncAttributes[ - 'monitored_resource.project_id' - ]; +export function metricsToRequest( + projectId: string, + exportArgs: ResourceMetrics, +) { const timeSeriesArray = []; for (const scopeMetrics of exportArgs.scopeMetrics) { for (const scopeMetric of scopeMetrics.metrics) { @@ -297,49 +293,33 @@ export function metricsToRequest(exportArgs: ResourceMetrics) { * @beta */ export class CloudMonitoringExporter extends MetricExporter { - private monitoringClient = new MetricServiceClient(); + private client: MetricServiceClient; - export( + constructor(options: ClientOptions) { + super(); + if (options && options.apiEndpoint) { + // We want the MetricServiceClient to always hit its default endpoint. + delete options.apiEndpoint; + } + this.client = new MetricServiceClient(options); + } + + async export( metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void, - ): void { + ): Promise { (async () => { try { - const request = metricsToRequest(metrics); - // In order to manage the "One or more points were written more - // frequently than the maximum sampling period configured for the - // metric." error we should have the metric service client retry a few - // times to ensure the metrics do get written. - // - // We use all the usual retry codes plus INVALID_ARGUMENT (code 3) - // because INVALID ARGUMENT (code 3) corresponds to the maximum - // sampling error. - const retry = new RetryOptions( - [ - grpc.status.INVALID_ARGUMENT, - grpc.status.DEADLINE_EXCEEDED, - grpc.status.RESOURCE_EXHAUSTED, - grpc.status.ABORTED, - grpc.status.UNAVAILABLE, - ], - { - initialRetryDelayMillis: 5000, - retryDelayMultiplier: 2, - maxRetryDelayMillis: 50000, - }, - ); - await this.monitoringClient.createTimeSeries( + const projectId = await this.client.getProjectId(); + const request = metricsToRequest(projectId, metrics); + await this.client.createServiceTimeSeries( request as ICreateTimeSeriesRequest, - { - retry, - }, ); // The resultCallback typically accepts a value equal to {code: x} // for some value x along with other info. When the code is equal to 0 // then the operation completed successfully. When the code is not equal - // to 0 then the operation failed. Open telemetry logs errors to the - // console when the resultCallback passes in non-zero code values and - // logs nothing when the code is 0. + // to 0 then the operation failed. The resultCallback will not log + // anything to the console whether the error code was 0 or not. resultCallback({code: 0}); } catch (error) { resultCallback(error as ServiceError); diff --git a/src/client-side-metrics/gcp-metrics-handler.ts b/src/client-side-metrics/gcp-metrics-handler.ts index 37fa4adea..73ebaf37a 100644 --- a/src/client-side-metrics/gcp-metrics-handler.ts +++ b/src/client-side-metrics/gcp-metrics-handler.ts @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +import {CloudMonitoringExporter} from './exporter'; import { IMetricsHandler, OnAttemptCompleteData, @@ -20,6 +21,7 @@ import { import * as Resources from '@opentelemetry/resources'; import * as ResourceUtil from '@google-cloud/opentelemetry-resource-util'; import {PushMetricExporter, View} from '@opentelemetry/sdk-metrics'; +import {ClientOptions} from 'google-gax'; const { Aggregation, ExplicitBucketHistogramAggregation, @@ -27,6 +29,31 @@ const { Histogram, PeriodicExportingMetricReader, } = require('@opentelemetry/sdk-metrics'); +import * as os from 'os'; +import * as crypto from 'crypto'; + +/** + * Generates a unique client identifier string. + * + * This function creates a client identifier that incorporates the hostname, + * process ID, and a UUID to ensure uniqueness across different client instances + * and processes. The identifier follows the pattern: + * + * `node--` + * + * where: + * - `` is a randomly generated UUID (version 4). + * - `` is the process ID of the current Node.js process. + * - `` is the hostname of the machine. + * + * @returns {string} A unique client identifier string. + */ +function generateClientUuid() { + const hostname = os.hostname() || 'localhost'; + const currentPid = process.pid || ''; + const uuid4 = crypto.randomUUID(); + return `node-${uuid4}-${currentPid}${hostname}`; +} /** * A collection of OpenTelemetry metric instruments used to record @@ -47,10 +74,9 @@ interface MetricsInstruments { * This method gets the open telemetry instruments that will store GCP metrics * for a particular project. * - * @param projectId The project for which the instruments will be stored. * @param exporter The exporter the metrics will be sent to. */ -function createInstruments(projectId: string, exporter: PushMetricExporter) { +function createInstruments(exporter: PushMetricExporter): MetricsInstruments { const latencyBuckets = [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0, 13.0, 16.0, 20.0, 25.0, 30.0, 40.0, 50.0, 65.0, 80.0, 100.0, 130.0, 160.0, 200.0, 250.0, 300.0, 400.0, @@ -71,7 +97,7 @@ function createInstruments(projectId: string, exporter: PushMetricExporter) { new View({ instrumentName: name, name, - aggregation: name.endsWith('latencies') + aggregation: !name.endsWith('latencies') ? Aggregation.Sum() : new ExplicitBucketHistogramAggregation(latencyBuckets), }), @@ -80,7 +106,6 @@ function createInstruments(projectId: string, exporter: PushMetricExporter) { views: viewList, resource: new Resources.Resource({ 'service.name': 'Cloud Bigtable Table', - 'monitored_resource.project_id': projectId, }).merge(new ResourceUtil.GcpDetectorSync().detect()), readers: [ // Register the exporter @@ -183,11 +208,8 @@ function createInstruments(projectId: string, exporter: PushMetricExporter) { * associating them with relevant attributes for detailed analysis in Cloud Monitoring. */ export class GCPMetricsHandler implements IMetricsHandler { - private exporter: PushMetricExporter; - // The variable below is the singleton map from projects to instrument stacks - // which exists so that we only create one instrument stack per project. This - // will eliminate errors due to the maximum sampling period. - static instrumentsForProject: {[projectId: string]: MetricsInstruments} = {}; + private otelInstruments: MetricsInstruments; + private clientUid: string; /** * The `GCPMetricsHandler` is responsible for managing and recording @@ -196,33 +218,11 @@ export class GCPMetricsHandler implements IMetricsHandler { * (histograms and counters) and exports them to Google Cloud Monitoring * through the provided `PushMetricExporter`. * - * @param exporter - The `PushMetricExporter` instance to use for exporting - * metrics to Google Cloud Monitoring. This exporter is responsible for - * sending the collected metrics data to the monitoring backend. The provided exporter must be fully configured, for example the projectId must have been set. */ - constructor(exporter: PushMetricExporter) { - this.exporter = exporter; - } - - /** - * Initializes the OpenTelemetry metrics instruments if they haven't been already. - * Creates and registers metric instruments (histograms and counters) for various Bigtable client metrics. - * Sets up a MeterProvider and configures a PeriodicExportingMetricReader for exporting metrics to Cloud Monitoring. - * - * which will be provided to the exporter in every export call. - * - */ - private getInstruments(projectId: string): MetricsInstruments { - // The projectId is needed per metrics handler because when the exporter is - // used it provides the project id for the name of the time series exported. - // ie. name: `projects/${....['monitored_resource.project_id']}`, - if (!GCPMetricsHandler.instrumentsForProject[projectId]) { - GCPMetricsHandler.instrumentsForProject[projectId] = createInstruments( - projectId, - this.exporter, - ); - } - return GCPMetricsHandler.instrumentsForProject[projectId]; + constructor(options: ClientOptions) { + this.clientUid = generateClientUuid(); + const exporter = new CloudMonitoringExporter(options); + this.otelInstruments = createInstruments(exporter); } /** @@ -231,11 +231,11 @@ export class GCPMetricsHandler implements IMetricsHandler { * @param {OnOperationCompleteData} data Data related to the completed operation. */ onOperationComplete(data: OnOperationCompleteData) { - const otelInstruments = this.getInstruments(data.projectId); + const otelInstruments = this.otelInstruments; const commonAttributes = { app_profile: data.metricsCollectorData.app_profile, method: data.metricsCollectorData.method, - client_uid: data.metricsCollectorData.client_uid, + client_uid: this.clientUid, client_name: data.client_name, instanceId: data.metricsCollectorData.instanceId, table: data.metricsCollectorData.table, @@ -271,11 +271,11 @@ export class GCPMetricsHandler implements IMetricsHandler { * @param {OnAttemptCompleteData} data Data related to the completed attempt. */ onAttemptComplete(data: OnAttemptCompleteData) { - const otelInstruments = this.getInstruments(data.projectId); + const otelInstruments = this.otelInstruments; const commonAttributes = { app_profile: data.metricsCollectorData.app_profile, method: data.metricsCollectorData.method, - client_uid: data.metricsCollectorData.client_uid, + client_uid: this.clientUid, status: data.status, client_name: data.client_name, instanceId: data.metricsCollectorData.instanceId, diff --git a/src/client-side-metrics/metrics-config-manager.ts b/src/client-side-metrics/metrics-config-manager.ts new file mode 100644 index 000000000..a28d7f14f --- /dev/null +++ b/src/client-side-metrics/metrics-config-manager.ts @@ -0,0 +1,44 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {IMetricsHandler} from './metrics-handler'; +import { + ITabularApiSurface, + OperationMetricsCollector, +} from './operation-metrics-collector'; +import {MethodName, StreamingState} from './client-side-metrics-attributes'; + +/** + * A class for tracing and recording client-side metrics related to Bigtable operations. + */ +export class ClientSideMetricsConfigManager { + private metricsHandlers: IMetricsHandler[]; + + constructor(handlers: IMetricsHandler[]) { + this.metricsHandlers = handlers; + } + + createOperation( + methodName: MethodName, + streaming: StreamingState, + table: ITabularApiSurface, + ): OperationMetricsCollector { + return new OperationMetricsCollector( + table, + methodName, + streaming, + this.metricsHandlers, + ); + } +} diff --git a/src/client-side-metrics/metrics-handler.ts b/src/client-side-metrics/metrics-handler.ts index 6b4f0053e..6ce5bce12 100644 --- a/src/client-side-metrics/metrics-handler.ts +++ b/src/client-side-metrics/metrics-handler.ts @@ -13,7 +13,6 @@ // limitations under the License. import {MethodName, StreamingState} from './client-side-metrics-attributes'; -import {grpc} from 'google-gax'; /** * The interfaces below use undefined instead of null to indicate a metric is @@ -28,11 +27,9 @@ type IMetricsCollectorData = { zone?: string; app_profile?: string; method: MethodName; - client_uid: string; }; interface StandardData { - projectId: string; metricsCollectorData: IMetricsCollectorData; client_name: string; streaming: StreamingState; diff --git a/src/client-side-metrics/operation-metrics-collector.ts b/src/client-side-metrics/operation-metrics-collector.ts index d0da7e474..4851eddbb 100644 --- a/src/client-side-metrics/operation-metrics-collector.ts +++ b/src/client-side-metrics/operation-metrics-collector.ts @@ -13,13 +13,22 @@ // limitations under the License. import * as fs from 'fs'; -import {IMetricsHandler} from './metrics-handler'; import {MethodName, StreamingState} from './client-side-metrics-attributes'; -import {grpc} from 'google-gax'; +import {grpc, ServiceError} from 'google-gax'; import * as gax from 'google-gax'; -const root = gax.protobuf.loadSync( - './protos/google/bigtable/v2/response_params.proto', +import {AbortableDuplex, BigtableOptions} from '../index'; +import * as path from 'path'; +import {IMetricsHandler} from './metrics-handler'; + +// When this environment variable is set then print any errors associated +// with failures in the metrics collector. +const METRICS_DEBUG = process.env.METRICS_DEBUG; + +const protoPath = path.join( + __dirname, + '../../protos/google/bigtable/v2/response_params.proto', ); +const root = gax.protobuf.loadSync(protoPath); const ResponseParams = root.lookupType('ResponseParams'); const {hrtime} = require('node:process'); @@ -32,8 +41,10 @@ export interface ITabularApiSurface { }; id: string; bigtable: { + metricsEnabled?: boolean; + projectId?: string; appProfileId?: string; - clientUid: string; + options: BigtableOptions; }; } @@ -59,10 +70,38 @@ enum MetricsCollectorState { OPERATION_COMPLETE, } +// This method displays warnings if METRICS_DEBUG is enabled. +function withMetricsDebug(fn: () => T): T | undefined { + try { + return fn(); + } catch (e) { + if (METRICS_DEBUG) { + console.warn('METRICS_DEBUG warning'); + console.warn((e as ServiceError).message); + } + } + return; +} + +// Checks that the state transition is valid and if not it throws a warning. +function checkState( + currentState: MetricsCollectorState, + allowedStates: MetricsCollectorState[], +): T | undefined { + if (allowedStates.includes(currentState)) { + return; + } else { + throw Error('Invalid state transition'); + } +} + /** * A class for tracing and recording client-side metrics related to Bigtable operations. */ export class OperationMetricsCollector { + // The following key corresponds to the key the instance information is + // stored in for the metadata that gets returned from the server. + private readonly INSTANCE_INFORMATION_KEY = 'x-goog-ext-425905942-bin'; private state: MetricsCollectorState; private operationStartTime: bigint | null; private attemptStartTime: bigint | null; @@ -71,7 +110,6 @@ export class OperationMetricsCollector { private tabularApiSurface: ITabularApiSurface; private methodName: MethodName; private attemptCount = 0; - private metricsHandlers: IMetricsHandler[]; private firstResponseLatency: number | null; private serverTimeRead: boolean; private serverTime: number | null; @@ -79,18 +117,19 @@ export class OperationMetricsCollector { private streamingOperation: StreamingState; private applicationLatencies: number[]; private lastRowReceivedTime: bigint | null; + private handlers: IMetricsHandler[]; /** * @param {ITabularApiSurface} tabularApiSurface Information about the Bigtable table being accessed. - * @param {IMetricsHandler[]} metricsHandlers The metrics handlers used for recording metrics. * @param {MethodName} methodName The name of the method being traced. * @param {StreamingState} streamingOperation Whether or not the call is a streaming operation. + * @param {IMetricsHandler[]} handlers The metrics handlers used to store the record the metrics. */ constructor( tabularApiSurface: ITabularApiSurface, - metricsHandlers: IMetricsHandler[], methodName: MethodName, streamingOperation: StreamingState, + handlers: IMetricsHandler[], ) { this.state = MetricsCollectorState.OPERATION_NOT_STARTED; this.zone = undefined; @@ -99,7 +138,6 @@ export class OperationMetricsCollector { this.methodName = methodName; this.operationStartTime = null; this.attemptStartTime = null; - this.metricsHandlers = metricsHandlers; this.firstResponseLatency = null; this.serverTimeRead = false; this.serverTime = null; @@ -107,6 +145,7 @@ export class OperationMetricsCollector { this.streamingOperation = streamingOperation; this.lastRowReceivedTime = null; this.applicationLatencies = []; + this.handlers = handlers; } private getMetricsCollectorData() { @@ -115,51 +154,71 @@ export class OperationMetricsCollector { { instanceId: this.tabularApiSurface.instance.id, table: this.tabularApiSurface.id, - cluster: this.cluster, - zone: this.zone, + cluster: this.cluster || '', + zone: this.zone || 'global', method: this.methodName, - client_uid: this.tabularApiSurface.bigtable.clientUid, }, appProfileId ? {app_profile: appProfileId} : {}, ); } + /** + * Called to add handlers to the stream so that we can observe + * header and trailer data for client side metrics. + * + * @param stream + */ + handleStatusAndMetadata(stream: AbortableDuplex) { + stream + .on( + 'metadata', + (metadata: {internalRepr: Map; options: {}}) => { + this.onMetadataReceived(metadata); + }, + ) + .on( + 'status', + (status: { + metadata: {internalRepr: Map; options: {}}; + }) => { + this.onStatusMetadataReceived(status); + }, + ); + } + /** * Called when the operation starts. Records the start time. */ onOperationStart() { - if (this.state === MetricsCollectorState.OPERATION_NOT_STARTED) { + withMetricsDebug(() => { + checkState(this.state, [MetricsCollectorState.OPERATION_NOT_STARTED]); this.operationStartTime = hrtime.bigint(); this.firstResponseLatency = null; this.applicationLatencies = []; this.state = MetricsCollectorState.OPERATION_STARTED_ATTEMPT_NOT_IN_PROGRESS; - } else { - console.warn('Invalid state transition'); - } + }); } /** * Called when an attempt (e.g., an RPC attempt) completes. Records attempt latencies. - * @param {string} projectId The id of the project. * @param {grpc.status} attemptStatus The grpc status for the attempt. */ - onAttemptComplete(projectId: string, attemptStatus: grpc.status) { - if ( - this.state === - MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_NO_ROWS_YET || - this.state === - MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_SOME_ROWS_RECEIVED - ) { + onAttemptComplete(attemptStatus: grpc.status) { + withMetricsDebug(() => { + checkState(this.state, [ + MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_NO_ROWS_YET, + MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_SOME_ROWS_RECEIVED, + ]); this.state = MetricsCollectorState.OPERATION_STARTED_ATTEMPT_NOT_IN_PROGRESS; this.attemptCount++; const endTime = hrtime.bigint(); - if (projectId && this.attemptStartTime) { + if (this.attemptStartTime) { const totalMilliseconds = Number( (endTime - this.attemptStartTime) / BigInt(1000000), ); - this.metricsHandlers.forEach(metricsHandler => { + this.handlers.forEach(metricsHandler => { if (metricsHandler.onAttemptComplete) { metricsHandler.onAttemptComplete({ attemptLatency: totalMilliseconds, @@ -169,24 +228,23 @@ export class OperationMetricsCollector { status: attemptStatus.toString(), client_name: `nodejs-bigtable/${version}`, metricsCollectorData: this.getMetricsCollectorData(), - projectId, }); } }); + } else { + console.warn('Start time should always be provided'); } - } else { - console.warn('Invalid state transition attempted'); - } + }); } /** * Called when a new attempt starts. Records the start time of the attempt. */ onAttemptStart() { - if ( - this.state === - MetricsCollectorState.OPERATION_STARTED_ATTEMPT_NOT_IN_PROGRESS - ) { + withMetricsDebug(() => { + checkState(this.state, [ + MetricsCollectorState.OPERATION_STARTED_ATTEMPT_NOT_IN_PROGRESS, + ]); this.state = MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_NO_ROWS_YET; this.attemptStartTime = hrtime.bigint(); @@ -194,60 +252,59 @@ export class OperationMetricsCollector { this.serverTimeRead = false; this.connectivityErrorCount = 0; this.lastRowReceivedTime = null; - } else { - console.warn('Invalid state transition attempted'); - } + }); } /** * Called when the first response is received. Records first response latencies. */ - onResponse(projectId: string) { - if (!this.firstResponseLatency) { - // Check firstResponseLatency first to improve latency for calls with many rows - if ( - this.state === - MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_NO_ROWS_YET - ) { + onResponse() { + withMetricsDebug(() => { + if (!this.firstResponseLatency) { + checkState(this.state, [ + MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_NO_ROWS_YET, + ]); this.state = MetricsCollectorState.OPERATION_STARTED_ATTEMPT_IN_PROGRESS_SOME_ROWS_RECEIVED; const endTime = hrtime.bigint(); - if (projectId && this.operationStartTime) { - // first response latency is measured in total milliseconds. + if (this.operationStartTime) { this.firstResponseLatency = Number( (endTime - this.operationStartTime) / BigInt(1000000), ); + } else { + console.warn( + 'ProjectId and operationStartTime should always be provided', + ); } } - } + }); } /** * Called when an operation completes (successfully or unsuccessfully). * Records operation latencies, retry counts, and connectivity error counts. - * @param {string} projectId The id of the project. * @param {grpc.status} finalOperationStatus Information about the completed operation. */ - onOperationComplete(projectId: string, finalOperationStatus: grpc.status) { - if ( - this.state === - MetricsCollectorState.OPERATION_STARTED_ATTEMPT_NOT_IN_PROGRESS - ) { + onOperationComplete(finalOperationStatus: grpc.status) { + this.onAttemptComplete(finalOperationStatus); + withMetricsDebug(() => { + checkState(this.state, [ + MetricsCollectorState.OPERATION_STARTED_ATTEMPT_NOT_IN_PROGRESS, + ]); this.state = MetricsCollectorState.OPERATION_COMPLETE; const endTime = hrtime.bigint(); - if (projectId && this.operationStartTime) { + if (this.operationStartTime) { const totalMilliseconds = Number( (endTime - this.operationStartTime) / BigInt(1000000), ); { - this.metricsHandlers.forEach(metricsHandler => { + this.handlers.forEach(metricsHandler => { if (metricsHandler.onOperationComplete) { metricsHandler.onOperationComplete({ status: finalOperationStatus.toString(), streaming: this.streamingOperation, metricsCollectorData: this.getMetricsCollectorData(), client_name: `nodejs-bigtable/${version}`, - projectId, operationLatency: totalMilliseconds, retryCount: this.attemptCount - 1, firstResponseLatency: this.firstResponseLatency ?? undefined, @@ -256,10 +313,10 @@ export class OperationMetricsCollector { } }); } + } else { + console.warn('operation start time should always be available here'); } - } else { - console.warn('Invalid state transition attempted'); - } + }); } /** @@ -334,29 +391,32 @@ export class OperationMetricsCollector { onStatusMetadataReceived(status: { metadata: {internalRepr: Map; options: {}}; }) { - if (!this.zone || !this.cluster) { - const INSTANCE_INFORMATION_KEY = 'x-goog-ext-425905942-bin'; - const mappedValue = status.metadata.internalRepr.get( - INSTANCE_INFORMATION_KEY, - ) as Buffer[]; - const decodedValue = ResponseParams.decode( - mappedValue[0], - mappedValue[0].length, - ); - if ( - decodedValue && - (decodedValue as unknown as {zoneId: string}).zoneId - ) { - this.zone = (decodedValue as unknown as {zoneId: string}).zoneId; - } - if ( - decodedValue && - (decodedValue as unknown as {clusterId: string}).clusterId - ) { - this.cluster = ( - decodedValue as unknown as {clusterId: string} - ).clusterId; + withMetricsDebug(() => { + if (!this.zone || !this.cluster) { + const mappedValue = status.metadata.internalRepr.get( + this.INSTANCE_INFORMATION_KEY, + ) as Buffer[]; + if (mappedValue && mappedValue[0] && ResponseParams) { + const decodedValue = ResponseParams.decode( + mappedValue[0], + mappedValue[0].length, + ); + if ( + decodedValue && + (decodedValue as unknown as {zoneId: string}).zoneId + ) { + this.zone = (decodedValue as unknown as {zoneId: string}).zoneId; + } + if ( + decodedValue && + (decodedValue as unknown as {clusterId: string}).clusterId + ) { + this.cluster = ( + decodedValue as unknown as {clusterId: string} + ).clusterId; + } + } } - } + }); } } diff --git a/src/execute-query/bytebuffertransformer.ts b/src/execute-query/bytebuffertransformer.ts new file mode 100644 index 000000000..c2222fa6e --- /dev/null +++ b/src/execute-query/bytebuffertransformer.ts @@ -0,0 +1,150 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {Transform, TransformCallback} from 'stream'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +import {google} from '../../protos/protos'; +import * as SqlValues from './values'; + +/** + * stream.Transform which buffers bytes from `ExecuteQuery` responses until + * resumeToken is received. At that point all buffered messages are passed + * forward. + */ +export class ByteBufferTransformer extends Transform { + private messageQueue: Buffer[] = []; + private messageBuffer: Uint8Array[] = []; + private protoBytesEncoding?: BufferEncoding; + + constructor(protoBytesEncoding?: BufferEncoding) { + super({objectMode: true, highWaterMark: 0}); + this.protoBytesEncoding = protoBytesEncoding; + } + + private resetQueueAndBuffer = ( + estimatedBatchSize: number | null | undefined, + ): void => { + this.messageQueue = []; + this.messageBuffer = new Array(estimatedBatchSize || 0); + }; + + private flushMessageBuffer = ( + batchChecksum: number, + estimatedBatchSize: number | null | undefined, + ): void => { + if (this.messageBuffer.length === 0) { + throw new Error('Recieved empty batch with non-zero checksum.'); + } + const newBatch = Buffer.concat(this.messageBuffer); + if (!SqlValues.checksumValid(newBatch, batchChecksum)) { + throw new Error('Failed to validate next batch of results'); + } + this.messageQueue.push(newBatch); + this.messageBuffer = new Array(estimatedBatchSize || 0); + }; + + private pushMessages = (resumeToken: string | Uint8Array): void => { + const token = SqlValues.ensureUint8Array( + resumeToken, + this.protoBytesEncoding, + ); + if (this.messageBuffer.length !== 0) { + throw new Error('Recieved incomplete batch of rows.'); + } + this.push([this.messageQueue, token]); + this.messageBuffer = []; + this.messageQueue = []; + }; + + /** + * Process a `PartialResultSet` message from the server. + * For more info refer to the PartialResultSet protobuf definition. + * @param partialResultSet The `PartialResultSet` message to process. + */ + private processProtoRowsBatch = ( + partialResultSet: google.bigtable.v2.IPartialResultSet, + ): void => { + let handled = false; + if (partialResultSet.reset) { + this.resetQueueAndBuffer(partialResultSet.estimatedBatchSize); + handled = true; + } + + if (partialResultSet.protoRowsBatch?.batchData?.length) { + this.messageBuffer.push( + SqlValues.ensureUint8Array( + partialResultSet.protoRowsBatch.batchData, + this.protoBytesEncoding, + ), + ); + handled = true; + } + + if (partialResultSet.batchChecksum) { + this.flushMessageBuffer( + partialResultSet.batchChecksum, + partialResultSet.estimatedBatchSize, + ); + handled = true; + } + + if ( + partialResultSet.resumeToken && + partialResultSet.resumeToken.length > 0 + ) { + this.pushMessages(partialResultSet.resumeToken); + handled = true; + } + + if (!handled) { + throw new Error('Response did not contain any results!'); + } + }; + + _transform( + chunk: google.bigtable.v2.ExecuteQueryResponse, + _encoding: BufferEncoding, + callback: TransformCallback, + ) { + let maybeError: Error | null = null; + const reponse = chunk as google.bigtable.v2.ExecuteQueryResponse; + try { + switch (reponse.response) { + case 'results': { + this.processProtoRowsBatch(reponse.results!); + break; + } + default: + throw Error(`Response contains unknown type ${reponse.response}`); + } + } catch (error) { + maybeError = new Error( + `Internal Error. Failed to process response: ${error}`, + ); + } + callback(maybeError); + } + + _flush(callback: TransformCallback): void { + if (this.messageBuffer.length > 0) { + callback( + new Error( + 'Internal Error. Last message did not contain a resumeToken.', + ), + ); + return; + } + callback(null); + } +} diff --git a/src/execute-query/executequerystatemachine.ts b/src/execute-query/executequerystatemachine.ts new file mode 100644 index 000000000..1f2b89a1b --- /dev/null +++ b/src/execute-query/executequerystatemachine.ts @@ -0,0 +1,589 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { + PreparedStatement, + PreparedStatementDataCallback, +} from './preparedstatement'; +import {Bigtable} from '..'; +import {ServiceError, RetryOptions} from 'google-gax'; +import {google} from '../../protos/protos'; +import * as SqlTypes from './types'; +import {AbortableDuplex} from '..'; +import {ByteBufferTransformer} from './bytebuffertransformer'; +import { + DEFAULT_RETRY_COUNT, + isExpiredQueryError, + RETRYABLE_STATUS_CODES, +} from '../utils/retry-options'; +import {ExecuteQueryStreamWithMetadata} from './values'; +import {ProtobufReaderTransformer} from './protobufreadertransformer'; +import {MetadataConsumer} from './metadataconsumer'; +import {DEFAULT_BACKOFF_SETTINGS} from '../tabular-api-surface'; +import {isRstStreamError} from '../utils/createReadStreamInternal'; +const pumpify = require('pumpify'); + +/** + * Creates a stream with some additional functionalities used by + * the ExecuteQueryStateMachine. + */ +interface CallerStream extends ExecuteQueryStreamWithMetadata { + /** + * Sets the metadata used to parse the executeQuery responses. + */ + updateMetadata: (metadata: SqlTypes.ResultSetMetadata) => void; + /** + * @returns the latest resumeToken before which all data was processed. + */ + getLatestResumeToken: () => Uint8Array | string | null; + /** + * @param callback guaranteed to be called *after* the last message + * was processed. + */ + onDrain: (callback: () => void) => void; + /** + * No other data event will be emitted after this method is called. + */ + close: () => void; + /** + * keeps a reference to the state machine. + */ + _stateMachine: ExecuteQueryStateMachine; +} + +interface StreamRetryOptions { + maxRetries: number; + totalTimeout: number; + retryCodes: Set; + initialRetryDelayMillis: number; + retryDelayMultiplier: number; + maxRetryDelayMillis: number; +} + +export type State = + /** + * This is the starting state. When the executeQuery starts we try to + * fetch the query plan from the PreparedStatement object. It is done via a callback + * If the query plan is expired, it might take time to refresh and the callback + * won't be called immediately. + */ + | 'AwaitingQueryPlan' + /** + * We may have recieved some data from the server, but no resumeToken has beed reached. + * This is an important distinction, because before the resumeToken, if the server + * returns an "expired query plan" error, we can still try to refresh it. + */ + | 'BeforeFirstResumeToken' + /** + * After the first resumeToken has been reached, we can't refresh the query plan, + * because the schema could have changed in the mean time. This would cause the + * new rows to be parsed differently than the previous ones. That's why, in this + * state, we treat the "expired query plan" error as non-retryable. + */ + | 'AfterFirstResumeToken' + /** + * When we need to properly dispose of the* old responseStream and buteBuffer, we + * enter a "Draining..." state. Depending on what we want to do next, we have + * DrainAndRefreshQueryPlan - moves to AwaitingQueryPlan after draining completed + * DrainingBeforeResumeToken - moves to BeforeFirstResumeToken after draining completed + * DrainingAfterResumeToken - moves to AfterFirstResumeToken after draining completed + * + * We want to make a new request only when all requests already written to the Reader + * by our previous active request stream were processed. + * + * For simplicity, we will drop the previous Bigtable stream and ByteBuffer transform + * and recreate them. We could also keep the ByteBuffer alive, but that would require + * us to clean up its internal state and still wait for the entire buffer to be + * read—just one step upstream. + * + * Please note that we cannot use gax's built-in streaming retries, as we have no way + * of informing it that we'd like to wait for an event before retrying. An alternative + * approach would be to purge all buffers of all streams before making a request, but + * there is no standard API for that. Additionally, this would not help us with gax, + * as we cannot traverse all streams upstream of our ByteBuffer to purge their buffers, + * nor can we rely on implementation details. + * + * We cannot simply wait until all events are processed by ByteBuffer's _transform() method, + * as there might still be events left in ByteBuffer's readable buffer that we do not want + * to discard. + * + * Our solution is to wait until all events in the Reader's writable buffer are processed + * and use the last resumeToken seen by the Reader to make a new request. + * + * We will detach (unpipe) the ByteBuffer from the Reader and wait until all requests + * written to theReader by the ByteBuffer are processed using the _transform() method. + * This ensures that all events written before detachment are handled by _transform(), + * and the last resumption token seen by the Reader is the correct one to use. + * + * Thus, we will wait for the buffer to clear before making a new request and use the + * last resumeToken seen by the Reader to determine the correct token for the retry request. + * + * This guarantees that no responses will be lost—everything processed by the + * Reader's `_transform()` method has been pushed to the caller and won't be discarded. + * Additionally, no duplicates will occur, as no more responses will be seen by `_transform()` + * until a new request is made. + */ + | 'DrainAndRefreshQueryPlan' + /** + * Moves to BeforeFirstResumeToken after draining. For more info see 'DrainAndRefreshQueryPlan' state. + */ + | 'DrainingBeforeResumeToken' + /** + * Moves to AfterFirstResumeToken after draining. For more info see 'DrainAndRefreshQueryPlan' state. + */ + | 'DrainingAfterResumeToken' + /** + * This state indicates that the stream has finished without error. + */ + | 'Finished' + /** + * This state indicates that a non-retryable error occured and the stream + * cannot be recovered. + */ + | 'Failed'; + +const DEFAULT_TOTAL_TIMEOUT_MS = 60000; + +/** + * This object handles creating and piping the streams + * which are used to process the responses from the server. + * It's main purpose is to make sure that the callerStream, which + * the user gets as a result of Instance.executeQuery, behaves properly: + * - closes in case of a failure + * - doesn't close in case of a retryable error. + * + * We create the following streams: + * responseStream -> byteBuffer -> readerStream -> resultStream + * + * The last two (readerStream and resultStream) are connected + * and returned to the caller - hence called the callerStream. + * + * When a request is made responseStream and byteBuffer are created, + * connected and piped to the readerStream. + * + * On retry, the old responseStream-byteBuffer pair is discarded and a + * new pair is crated. + * + * For more info please refer to the `State` type + */ +export class ExecuteQueryStateMachine { + private bigtable: Bigtable; + private callerStream: CallerStream; + private originalEnd: (chunk?: any, encoding?: any, cb?: () => void) => void; + private retryOptions: StreamRetryOptions; + private valuesStream: AbortableDuplex | null; + private requestParams: any; + private lastPreparedStatementBytes?: Uint8Array | string; + private preparedStatement: PreparedStatement; + private state: State; + private deadlineTs: number; + private protoBytesEncoding?: BufferEncoding; + private numErrors: number; + private retryTimer: NodeJS.Timeout | null; + private timeoutTimer: NodeJS.Timeout | null; + + constructor( + bigtable: Bigtable, + callerStream: CallerStream, + preparedStatement: PreparedStatement, + requestParams: any, + retryOptions?: Partial | null, + protoBytesEncoding?: BufferEncoding, + ) { + this.bigtable = bigtable; + this.callerStream = callerStream; + this.originalEnd = callerStream.end.bind(callerStream); + this.callerStream.end = this.handleCallersEnd.bind(this); + this.requestParams = requestParams; + this.retryOptions = this.parseRetryOptions(retryOptions); + this.deadlineTs = Date.now() + this.retryOptions.totalTimeout; + this.valuesStream = null; + this.preparedStatement = preparedStatement; + this.protoBytesEncoding = protoBytesEncoding; + this.numErrors = 0; + this.retryTimer = null; + this.timeoutTimer = setTimeout( + this.handleTotalTimeout, + this.calculateTotalTimeout(), + ); + + this.state = 'AwaitingQueryPlan'; + this.preparedStatement.getData( + this.handleQueryPlan, + this.calculateTotalTimeout(), + ); + } + + private parseRetryOptions = ( + input?: Partial | null, + ): StreamRetryOptions => { + const rCodes = input?.retryCodes + ? new Set(input?.retryCodes) + : RETRYABLE_STATUS_CODES; + const backoffSettings = input?.backoffSettings; + const clientTotalTimeout = + this?.bigtable?.options?.BigtableClient?.clientConfig?.interfaces && + this?.bigtable?.options?.BigtableClient?.clientConfig?.interfaces[ + 'google.bigtable.v2.Bigtable' + ]?.methods['ExecuteQuery']?.timeout_millis; + return { + maxRetries: backoffSettings?.maxRetries || DEFAULT_RETRY_COUNT, + totalTimeout: + backoffSettings?.totalTimeoutMillis || + clientTotalTimeout || + DEFAULT_TOTAL_TIMEOUT_MS, + retryCodes: rCodes, + initialRetryDelayMillis: + backoffSettings?.initialRetryDelayMillis || + DEFAULT_BACKOFF_SETTINGS.initialRetryDelayMillis, + retryDelayMultiplier: + backoffSettings?.retryDelayMultiplier || + DEFAULT_BACKOFF_SETTINGS.retryDelayMultiplier, + maxRetryDelayMillis: + backoffSettings?.maxRetryDelayMillis || + DEFAULT_BACKOFF_SETTINGS.maxRetryDelayMillis, + }; + }; + + private calculateTotalTimeout = () => { + return Math.max(this.deadlineTs - Date.now(), 0); + }; + + private fail = (err: Error) => { + if (this.state !== 'Failed' && this.state !== 'Finished') { + this.state = 'Failed'; + this.clearTimers(); + this.callerStream.emit('error', err); + } + }; + + private createValuesStream = (): AbortableDuplex => { + const reqOpts: google.bigtable.v2.IExecuteQueryRequest = { + ...this.requestParams, + preparedQuery: this.lastPreparedStatementBytes, + resumeToken: this.callerStream.getLatestResumeToken(), + }; + + const retryOpts = { + currentRetryAttempt: 0, + // Handling retries in this client. + // Options below prevent gax from retrying. + noResponseRetries: 0, + shouldRetryFn: () => { + return false; + }, + }; + + const responseStream = this.bigtable.request({ + client: 'BigtableClient', + method: 'executeQuery', + reqOpts, + gaxOpts: retryOpts, + }); + + const byteBuffer = new ByteBufferTransformer(this.protoBytesEncoding); + const rowValuesStream = pumpify.obj([responseStream, byteBuffer]); + + let aborted = false; + const abort = () => { + if (!aborted) { + aborted = true; + responseStream.abort(); + } + }; + rowValuesStream.abort = abort; + + return rowValuesStream; + }; + + private makeNewRequest = ( + preparedStatementBytes?: Uint8Array | string, + metadata?: SqlTypes.ResultSetMetadata, + ) => { + if (this.valuesStream !== null) { + // assume old streams were scrached. + this.fail( + new Error( + 'Internal error: making a request before streams from the last one was cleaned up.', + ), + ); + } + + if (preparedStatementBytes) { + this.lastPreparedStatementBytes = preparedStatementBytes; + } + if (metadata) { + this.callerStream.updateMetadata(metadata); + } + this.valuesStream = this.createValuesStream(); + + this.valuesStream + .on('error', this.handleStreamError) + .on('data', this.handleStreamData) + .on('close', this.handleStreamEnd) + .on('end', this.handleStreamEnd); + + this.valuesStream.pipe(this.callerStream, {end: false}); + }; + + private discardOldValueStream = () => { + if (this.valuesStream) { + this.valuesStream.abort(); + this.valuesStream.unpipe(this.callerStream); + this.valuesStream.removeAllListeners('error'); + this.valuesStream.removeAllListeners('data'); + this.valuesStream.removeAllListeners('end'); + this.valuesStream.removeAllListeners('close'); + this.valuesStream.destroy(); + this.valuesStream = null; + } + }; + + private getNextRetryDelay = (): number => { + // 0 - 100 ms jitter + const jitter = Math.floor(Math.random() * 100); + const calculatedNextRetryDelay = + this.retryOptions.initialRetryDelayMillis * + Math.pow(this.retryOptions.retryDelayMultiplier, this.numErrors) + + jitter; + + return Math.min( + calculatedNextRetryDelay, + this.retryOptions.maxRetryDelayMillis, + ); + }; + + private clearTimers = (): void => { + if (this.retryTimer) { + clearTimeout(this.retryTimer); + this.retryTimer = null; + } + if (this.timeoutTimer) { + clearTimeout(this.timeoutTimer); + this.timeoutTimer = null; + } + }; + + // Transitions: + + private startNextAttempt = (): void => { + if (this.state === 'DrainAndRefreshQueryPlan') { + this.state = 'AwaitingQueryPlan'; + this.preparedStatement.getData( + this.handleQueryPlan, + this.calculateTotalTimeout(), + ); + } else if (this.state === 'DrainingBeforeResumeToken') { + this.state = 'BeforeFirstResumeToken'; + this.makeNewRequest(this.lastPreparedStatementBytes); + } else if (this.state === 'DrainingAfterResumeToken') { + this.state = 'AfterFirstResumeToken'; + this.makeNewRequest(this.lastPreparedStatementBytes); + } else { + this.fail( + new Error( + `startNextAttempt can't be invoked on a current state ${this.state}`, + ), + ); + } + }; + + private handleDrainingDone = (): void => { + if ( + this.state === 'DrainAndRefreshQueryPlan' || + this.state === 'DrainingBeforeResumeToken' || + this.state === 'DrainingAfterResumeToken' + ) { + this.retryTimer = setTimeout( + this.startNextAttempt, + this.getNextRetryDelay(), + ); + } else { + this.fail( + new Error( + `handleDrainingDone can't be invoked on a current state ${this.state}`, + ), + ); + } + }; + + private handleTotalTimeout = (): void => { + this.discardOldValueStream(); + if (this.retryTimer) { + clearTimeout(this.retryTimer); + this.retryTimer = null; + } + this.fail(new Error('Deadline exceeded.')); + }; + + private handleStreamError = (err: ServiceError): void => { + this.discardOldValueStream(); + if ( + this.retryOptions.retryCodes.has(err.code) || // retryable error + isRstStreamError(err) + ) { + // We want to make a new request only when all requests already written to the Reader by our + // previous active request stream were processed. + this.numErrors += 1; + if (this.numErrors <= this.retryOptions.maxRetries) { + if (this.state === 'AfterFirstResumeToken') { + this.state = 'DrainingAfterResumeToken'; + this.callerStream.onDrain(this.handleDrainingDone); + } else if (this.state === 'BeforeFirstResumeToken') { + this.state = 'DrainingBeforeResumeToken'; + this.callerStream.onDrain(this.handleDrainingDone); + } else { + this.fail( + new Error( + `Can't handle a stream error in the current state ${this.state}`, + ), + ); + } + } else { + this.fail( + new Error(`Maximum retry limit exeeded. Last error: ${err.message}`), + ); + } + } else if (isExpiredQueryError(err)) { + if (this.state === 'AfterFirstResumeToken') { + this.fail(new Error('Query plan expired during a retry attempt.')); + } else if (this.state === 'BeforeFirstResumeToken') { + this.state = 'DrainAndRefreshQueryPlan'; + // If the server returned the "expired query error" we mark it as expired. + this.preparedStatement.markAsExpired(); + this.callerStream.onDrain(this.handleDrainingDone); + } else { + this.fail( + new Error( + `Can't handle expired query error in the current state ${this.state}`, + ), + ); + } + } else { + this.fail(new Error(`Unexpected error: ${err.message}`)); + } + }; + + private handleQueryPlan: PreparedStatementDataCallback = ( + err?: Error, + preparedStatementBytes?: Uint8Array | string, + metadata?: SqlTypes.ResultSetMetadata, + ) => { + if (this.state === 'AwaitingQueryPlan') { + if (err) { + this.numErrors += 1; + if (this.numErrors <= this.retryOptions.maxRetries) { + this.preparedStatement.getData( + this.handleQueryPlan, + this.calculateTotalTimeout(), + ); + } else { + this.fail( + new Error( + `Failed to get query plan. Maximum retry limit exceeded. Last error: ${err.message}`, + ), + ); + } + } else { + this.state = 'BeforeFirstResumeToken'; + this.makeNewRequest(preparedStatementBytes, metadata); + } + } else { + this.fail( + new Error( + `handleQueryPlan can't be invoked on a current state ${this.state}`, + ), + ); + } + }; + + /** + * This method is called when the valuesStream emits data. + * The valuesStream yelds data only after the resume token + * is recieved, hence the state change. + */ + private handleStreamData = (data: any) => { + if ( + this.state === 'BeforeFirstResumeToken' || + this.state === 'AfterFirstResumeToken' + ) { + this.state = 'AfterFirstResumeToken'; + } else { + this.fail( + new Error( + `Internal Error: recieved data in an invalid state ${this.state}`, + ), + ); + } + }; + + private handleStreamEnd = (): void => { + if ( + this.state === 'AfterFirstResumeToken' || + this.state === 'BeforeFirstResumeToken' + ) { + this.clearTimers(); + this.state = 'Finished'; + this.originalEnd(); + } else if (this.state === 'Finished') { + // noop + } else { + this.fail( + new Error( + `Internal Error: Cannot handle stream end in state: ${this.state}`, + ), + ); + } + }; + + /** + * The caller should be able to call callerStream.end() to stop receiving + * more rows and cancel the stream prematurely. However this has a side effect + * the 'end' event will be emitted. + * We don't want that, because it also gets emitted if the stream ended + * normally. To tell these two situations apart, we'll overwrite the end + * function, but save the "original" end() function which will be called + * on valuesStream.on('end'). + */ + private handleCallersEnd = ( + chunk?: any, + encoding?: any, + cb?: () => void, + ): CallerStream => { + if (this.state !== 'Failed' && this.state !== 'Finished') { + this.clearTimers(); + this.discardOldValueStream(); + this.state = 'Finished'; + this.callerStream.close(); + } + return this.callerStream; + }; +} + +export function createCallerStream( + readerStream: ProtobufReaderTransformer, + resultStream: ExecuteQueryStreamWithMetadata, + metadataConsumer: MetadataConsumer, + setCallerCancelled: (v: boolean) => void, +): CallerStream { + const callerStream = pumpify.obj([readerStream, resultStream]); + callerStream.getMetadata = resultStream.getMetadata.bind(resultStream); + callerStream.updateMetadata = metadataConsumer.consume.bind(metadataConsumer); + callerStream.getLatestResumeToken = () => readerStream.resumeToken; + callerStream.onDrain = readerStream.onDrain.bind(readerStream); + callerStream.close = () => { + setCallerCancelled(true); + callerStream.destroy(); + }; + return callerStream; +} diff --git a/src/execute-query/metadataconsumer.ts b/src/execute-query/metadataconsumer.ts new file mode 100644 index 000000000..8fa3ecc2b --- /dev/null +++ b/src/execute-query/metadataconsumer.ts @@ -0,0 +1,122 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import * as Types from './types'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +import {google} from '../../protos/protos'; + +/** + * This class keeps and parses the metadata. + */ +export class MetadataConsumer { + private metadata: Types.ResultSetMetadata | null; + + constructor() { + this.metadata = null; + } + + getMetadata = (): Types.ResultSetMetadata | null => { + return this.metadata; + }; + + consume = (new_metadata: Types.ResultSetMetadata) => { + this.metadata = new_metadata; + }; + + static parsePBType(type: google.bigtable.v2.Type): Types.Type { + switch (type.kind) { + case 'bytesType': + return Types.Bytes(); + case 'stringType': + return Types.String(); + case 'int64Type': + return Types.Int64(); + case 'float64Type': + return Types.Float64(); + case 'float32Type': + return Types.Float32(); + case 'boolType': + return Types.Bool(); + case 'timestampType': + return Types.Timestamp(); + case 'dateType': + return Types.Date(); + case 'structType': + return Types.Struct( + ...type.structType!.fields!.map(field => ({ + name: field.fieldName as string | null, + type: MetadataConsumer.parsePBType( + field.type as google.bigtable.v2.Type, + ), + })), + ); + case 'arrayType': + return Types.Array( + MetadataConsumer.parsePBType( + type.arrayType!.elementType! as google.bigtable.v2.Type, + ), + ); + case 'mapType': { + const keyType = MetadataConsumer.parsePBType( + type.mapType!.keyType! as google.bigtable.v2.Type, + ); + if ( + keyType.type !== 'int64' && + keyType.type !== 'string' && + keyType.type !== 'bytes' + ) { + throw new Error( + `Unsupported type of map key received: ${keyType.type}`, + ); + } + return Types.Map( + keyType, + MetadataConsumer.parsePBType( + type.mapType!.valueType! as google.bigtable.v2.Type, + ), + ); + } + default: + throw new Error( + `Type ${type.kind} not supported by current client version`, + ); + } + } + + static parseMetadata( + metadata: google.bigtable.v2.IResultSetMetadata, + ): Types.ResultSetMetadata { + if (!metadata.protoSchema) { + throw new Error('Only protoSchemas are supported.'); + } + const columns = metadata.protoSchema.columns!; + if (columns.length === 0) { + throw new Error('Invalid empty ResultSetMetadata received.'); + } + + return Types.ResultSetMetadata.fromTuples( + columns.map(column => { + if (column.name === null || column.name === '') { + throw new Error(`Invalid column name "${column.name}"`); + } else { + return [ + column.name ?? null, + MetadataConsumer.parsePBType( + column.type! as google.bigtable.v2.Type, + ), + ]; + } + }), + ); + } +} diff --git a/src/execute-query/namedlist.ts b/src/execute-query/namedlist.ts new file mode 100644 index 000000000..6dd5dfa3c --- /dev/null +++ b/src/execute-query/namedlist.ts @@ -0,0 +1,98 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * Represents how field names correspond to field indexes in a NamedList. + */ +export type FieldMapping = { + validFieldNames: Map; + duplicateFieldNames: Map; + fieldNames: (string | null)[]; +}; + +function constructFieldMapping(values: (string | null)[]): FieldMapping { + const fieldMapping: Map = new Map(); + for (let i = 0; i < values.length; i++) { + const name = values[i]; + if (name) { + if (!fieldMapping.has(name)) { + fieldMapping.set(name, []); + } + fieldMapping.get(name)!.push(i); + } + } + const validFieldNames = new Map(); + const duplicateFieldNames = new Map(); + for (const [name, indexes] of fieldMapping.entries()) { + if (indexes.length > 1) { + duplicateFieldNames.set(name, indexes); + } else { + validFieldNames.set(name, indexes[0]); + } + } + return { + validFieldNames, + duplicateFieldNames, + fieldNames: values, + }; +} + +/** + * Class representing a list which allows retrieving elements both by index + * and by name. If multiple elements have the same name, they have to be + * retrieved by index. Otherwise an error is thrown. + */ +export class NamedList { + values: Array; + fieldMapping: FieldMapping; + + constructor(values: Array, fieldMapping: FieldMapping) { + this.values = values; + this.fieldMapping = fieldMapping; + } + + protected static _fromTuples, T>( + type: {new (values: Array, fieldMapping: FieldMapping): R}, + tuples: [string | null, T][], + ): R { + return new type( + tuples.map(tuple => tuple[1]), + constructFieldMapping(tuples.map(tuple => tuple[0])), + ); + } + + get(indexOrName: string | number): T { + let index; + if (typeof indexOrName === 'string') { + if (this.fieldMapping.duplicateFieldNames.has(indexOrName)) { + throw new Error( + `Cannot access ${indexOrName} by name because it is available on multiple indexes: ${this.fieldMapping.duplicateFieldNames + .get(indexOrName)! + .join(', ')}`, + ); + } + index = this.fieldMapping.validFieldNames.get(indexOrName); + if (index === undefined) { + throw new Error(`Unknown field name '${indexOrName}'.`); + } + } else { + index = indexOrName; + } + return this.values[index]; + } + + getFieldNameAtIndex(index: number): string | null { + return this.fieldMapping.fieldNames[index]; + } +} diff --git a/src/execute-query/parameterparsing.ts b/src/execute-query/parameterparsing.ts new file mode 100644 index 000000000..658c0b50c --- /dev/null +++ b/src/execute-query/parameterparsing.ts @@ -0,0 +1,308 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {PreciseDate} from '@google-cloud/precise-date'; +import {google} from '../../protos/protos'; +import {BigtableDate, ExecuteQueryParameterValue} from './values'; +import * as SqlTypes from './types'; +import * as is from 'is'; +import Long = require('long'); + +/** + * Creates protobuf objects with explicit types from passed parameters. + * The protobuf Value objects have a field describing their type explicitly. + * For each param we create a Value object based on the provided type. + * @param parameters map from parameter name to parameter value + * @param parameter_types map from parameter name to parameter type + * @returns map from parameter name to a Value object + */ +export function parseParameters( + parameters: {[param: string]: ExecuteQueryParameterValue}, + parameterTypes: {[param: string]: SqlTypes.Type}, +): {[param: string]: google.bigtable.v2.IValue} { + // Assert both objects contain the same keys: + const parameterKeys = Object.keys(parameters); + const parameterTypeKeys = Object.keys(parameterTypes); + if (parameterKeys.length !== parameterTypeKeys.length) { + throw new Error( + `Number of parameters (${parameterKeys.length}) does not match number of parameter types (${parameterTypeKeys.length}).`, + ); + } + // if the numbers of keys match, but keys differ we will catch it in the next step + const entries: [string, google.bigtable.v2.IValue][] = []; + for (const [key, value] of Object.entries(parameters)) { + let type: SqlTypes.Type; + if (Object.prototype.hasOwnProperty.call(parameterTypes, key)) { + type = parameterTypes[key]; + } else { + throw new Error(`Unrecognized parameter: ${key}`); + } + + entries.push([key, setTypeField(convertJsValueToValue(value, type), type)]); + } + return Object.fromEntries(entries); +} + +export function parseParameterTypes(parameter_types: { + [param: string]: SqlTypes.Type; +}): {[param: string]: google.bigtable.v2.IType} { + return Object.fromEntries( + Object.entries(parameter_types).map(([key, value]) => [ + key, + executeQueryTypeToPBType(value), + ]), + ); +} + +function inferType(value: ExecuteQueryParameterValue): SqlTypes.Type { + if (is.number(value)) { + return SqlTypes.Float64(); + } else if (typeof value === 'bigint') { + return SqlTypes.Int64(); + } else if (is.string(value)) { + return SqlTypes.String(); + } else if (is.boolean(value)) { + return SqlTypes.Bool(); + } else if (is.array(value)) { + // eslint-disable-next-line + throw new Error( + `Cannot infer type of an array ${value}. Please provide a type hint using parameter_types.`, + ); + } else if (typeof value === 'object') { + if (value instanceof Uint8Array) { + return SqlTypes.Bytes(); + } else if (value instanceof PreciseDate) { + return SqlTypes.Timestamp(); + } else if (value instanceof Date) { + throw new Error( + 'Date is not supported as a parameter type. Please use PreciseDate for Sql TIMESTAMP or BigtableDate for SQL DATE', + ); + } else if (value instanceof BigtableDate) { + return SqlTypes.Date(); + } + } + + const typeString = typeof value; + let prototypeString = null; + if (typeString === 'object') { + if (value === null) { + prototypeString = 'null'; + } else { + prototypeString = `constructor.name = ${value.constructor.name}`; + } + } + const typeInfo = `typeof == ${typeString}${ + prototypeString ? `, ${prototypeString}` : '' + })`; + throw new Error( + `Cannot infer type of ${value} (${typeInfo}). Please provide a type hint using parameter_types.`, + ); +} + +export function setTypeField( + value: google.bigtable.v2.IValue, + type: SqlTypes.Type, +): google.bigtable.v2.IValue { + value.type = executeQueryTypeToPBType(type); + return value; +} + +export function executeQueryTypeToPBType( + type: SqlTypes.Type, +): google.bigtable.v2.IType { + switch (type.type) { + case 'string': + return {stringType: {}}; + case 'int64': + return {int64Type: {}}; + case 'float32': + return {float32Type: {}}; + case 'float64': + return {float64Type: {}}; + case 'bytes': + return {bytesType: {}}; + case 'bool': + return {boolType: {}}; + case 'timestamp': + return {timestampType: {}}; + case 'date': + return {dateType: {}}; + case 'array': + return { + arrayType: {elementType: executeQueryTypeToPBType(type.elementType)}, + }; + case 'struct': + return { + structType: { + fields: type.values.map((value, index) => ({ + fieldName: type.getFieldNameAtIndex(index), + type: executeQueryTypeToPBType(value), + })), + }, + }; + case 'map': + return { + mapType: { + keyType: executeQueryTypeToPBType(type.keyType), + valueType: executeQueryTypeToPBType(type.valueType), + }, + }; + } +} + +export function convertJsValueToValue( + value: ExecuteQueryParameterValue, + type: SqlTypes.Type, +): google.bigtable.v2.IValue { + if (value === null) { + return {}; + } + + switch (type.type) { + case 'string': + return convertToString(value); + case 'int64': + return convertToInt64(value); + case 'float32': + return convertToFloat64(value); + case 'float64': + return convertToFloat64(value); + case 'bytes': + return convertToBytes(value); + case 'bool': + return convertToBool(value); + case 'timestamp': + return convertToTimestamp(value); + case 'date': + return convertToDate(value); + case 'array': + return convertToArray(value, type); + case 'struct': + throw new Error('Struct is not a supported query param type'); + case 'map': + throw new Error('Map is not a supported query param type'); + } +} + +function convertToString( + value: ExecuteQueryParameterValue, +): google.bigtable.v2.IValue { + if (is.string(value)) { + return {stringValue: value as string}; + } + throw new Error(`Value ${value} cannot be converted to string.`); +} + +const MAX_LONG = BigInt(Long.MAX_VALUE.toString()); +const MIN_LONG = BigInt(Long.MIN_VALUE.toString()); + +function bigintToLong(value: bigint): Long { + // Long fromString does not check this + if (value > MAX_LONG || value < MIN_LONG) { + throw new Error( + `Value ${value} cannot be converted to int64 - it is out of range.`, + ); + } + return Long.fromString(value.toString()); +} + +function convertToInt64( + value: ExecuteQueryParameterValue, +): google.bigtable.v2.IValue { + if (typeof value === 'bigint') { + return { + intValue: bigintToLong(value), + }; + } else if (typeof value === 'number') { + throw new Error( + `Value ${value} cannot be converted to int64 - argument of type INT64 should by passed as BigInt.`, + ); + } + throw new Error(`Value ${value} cannot be converted to int64.`); +} + +function convertToFloat64( + value: ExecuteQueryParameterValue, +): google.bigtable.v2.IValue { + if (typeof value === 'number') { + return {floatValue: value}; + } + throw new Error(`Value ${value} cannot be converted to float64.`); +} + +function convertToBytes( + value: ExecuteQueryParameterValue, +): google.bigtable.v2.IValue { + if (typeof value === 'object' && value instanceof Uint8Array) { + return {bytesValue: value}; + } + throw new Error(`Value ${value} cannot be converted to bytes.`); +} + +function convertToBool( + value: ExecuteQueryParameterValue, +): google.bigtable.v2.IValue { + if (typeof value === 'boolean') { + return {boolValue: value}; + } + throw new Error(`Value ${value} cannot be converted to boolean.`); +} + +function convertToTimestamp( + value: ExecuteQueryParameterValue, +): google.bigtable.v2.IValue { + if (typeof value === 'object' && value instanceof PreciseDate) { + return {timestampValue: value.toStruct()}; + } + throw new Error( + `Value ${value} cannot be converted to timestamp, please use PreciseDate instead.`, + ); +} + +function convertToDate( + value: ExecuteQueryParameterValue, +): google.bigtable.v2.IValue { + if (typeof value === 'object' && value instanceof BigtableDate) { + return {dateValue: value}; + } + throw new Error(`Value ${value} cannot be converted to date.`); +} + +function convertToArray( + value: ExecuteQueryParameterValue, + type: SqlTypes.ArrayType, +): google.bigtable.v2.IValue { + if (!is.array(value)) { + throw new Error(`Value ${value} cannot be converted to an array.`); + } + const arrayValue = value as Array; + return { + arrayValue: { + values: arrayValue.map((element, index) => { + try { + return convertJsValueToValue(element, type.elementType); + // eslint-disable-next-line + } catch (conversionError: any) { + if (conversionError instanceof Error) { + throw new Error( + `Error while converting element ${index} of an array: ${conversionError.message}`, + ); + } else { + throw conversionError; + } + } + }), + }, + }; +} diff --git a/src/execute-query/preparedstatement.ts b/src/execute-query/preparedstatement.ts new file mode 100644 index 000000000..8a2dbc040 --- /dev/null +++ b/src/execute-query/preparedstatement.ts @@ -0,0 +1,287 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import {Bigtable} from '..'; +import * as SqlTypes from './types'; +import {PreciseDate} from '@google-cloud/precise-date'; + +import {google} from '../../protos/protos'; +import {MetadataConsumer} from './metadataconsumer'; +import {EventEmitter} from 'events'; +import {ServiceError, CallOptions} from 'google-gax'; + +export const SHOULD_REFRESH_SOON_PERIOD_MS = 1000; + +export type PreparedStatementDataCallback = ( + err?: Error, + preparedQueryBytes?: Uint8Array | string, + metadata?: SqlTypes.ResultSetMetadata, +) => void; + +interface IRetryRequest { + client: string; + method: string; + reqOpts: google.bigtable.v2.IPrepareQueryRequest; + retryOpts?: CallOptions; +} + +/** + * This object keeps track of the query plan a.k.a. metadata and preparedQuery bytes. + * It provides a way of retrieving last retrieved query plan. + * If a query plan is marked as expired, it will be refreshed. + * You can get the query plan via the getData method. + * If the query plan is not expired, getData will return the value immediately. + * If the object is marked as expired, getting the query plan will wait for + * a refresh to happen. If the refresh fails, all awaiting getData calls + * also return an error. + */ +export class PreparedStatement extends EventEmitter { + private bigtable: Bigtable; + private retryRequest: IRetryRequest; + private metadata: SqlTypes.ResultSetMetadata; + private preparedQueryBytes: Uint8Array | string; + private validUntilTimestamp: number | null; + private forcedExpiration: boolean; + private isRefreshing: boolean; + private timer: NodeJS.Timeout | null; + private lastRefreshError: ServiceError | Error | null; + private parameterTypes: {[param: string]: SqlTypes.Type}; + + constructor( + bigtable: Bigtable, + response: google.bigtable.v2.PrepareQueryResponse, + retryRequest: IRetryRequest, + parameterTypes: {[param: string]: SqlTypes.Type}, + ) { + super(); + this.bigtable = bigtable; + this.metadata = MetadataConsumer.parseMetadata(response.metadata!); + this.preparedQueryBytes = response.preparedQuery; + this.validUntilTimestamp = timestampFromResponse(response); + this.timer = null; + this.isRefreshing = false; + this.lastRefreshError = null; + this.forcedExpiration = false; + this.retryRequest = retryRequest; + this.parameterTypes = parameterTypes; + } + + /** + * Returns true if the validUntilTimestamp is close, + * meaning less than SHOULD_REFRESH_SOON_PERIOD_MS away. + */ + private shouldRefreshSoon = (): boolean => { + if (!this.validUntilTimestamp) { + return false; + } + return ( + Date.now() > this.validUntilTimestamp - SHOULD_REFRESH_SOON_PERIOD_MS + ); + }; + + /** + * Schedules the refresh. It is deffered to the next tick to ensure + * that the current call stack is finished before a request to bigtable is made. + */ + private setupTimer = (): void => { + this.timer = setTimeout(this.handleTimerEnd, 0); + }; + + private discardTimer = (): void => { + if (this.timer) { + clearTimeout(this.timer); + this.timer = null; + } + }; + + /** + * Performs a request to bigtable to get a refreshed query plan. + */ + private startRefreshing = (): void => { + this.isRefreshing = true; + this.bigtable.request(this.retryRequest, this.handlePrepareQueryResponse); + }; + + /** + * Begins the refresh. + */ + private handleTimerEnd = (): void => { + if (!this.isRefreshing) { + this.discardTimer(); + this.startRefreshing(); + } + }; + + /** + * Callback for handling the call to bigtable. + */ + private handlePrepareQueryResponse = ( + err: ServiceError | null, + response?: google.bigtable.v2.PrepareQueryResponse, + ): void => { + if (this.isRefreshing) { + this.isRefreshing = false; + this.discardTimer(); + if (err) { + this.lastRefreshError = err; + this.emit('refreshDone'); + } else { + try { + this.lastRefreshError = null; + this.forcedExpiration = false; + this.validUntilTimestamp = timestampFromResponse(response!); + this.metadata = MetadataConsumer.parseMetadata(response!.metadata!); + this.preparedQueryBytes = response!.preparedQuery; + } catch (err: any) { + this.lastRefreshError = err as Error; + } + this.emit('refreshDone'); + } + } else { + const err = new Error( + 'Invalid state: PrepareQueryResponse recieved when not refreshing.', + ); + console.error(err); + throw err; + } + }; + + /** + * Invoked when the query plan is retrieved from this object. + */ + private scheduleRefreshIfNeeded = (): void => { + if (!this.isRefreshing && this.timer === null) { + if (this.isExpired() || this.shouldRefreshSoon()) { + this.setupTimer(); + } // else noop + } // else noop + }; + + /** + * This function should be called, when the server returns + * the FAILED_PRECONDITION error saying the query plan + * is expired. For more info refer to the ExecuteQueryStateMachine. + */ + markAsExpired = (): void => { + this.forcedExpiration = true; + }; + + /** + * Used for retrieveing the query plan (preparedQuery bytes and metadata) + * @param callback called when query plan is available + * @param timeoutMs when callback should be called with an error. + */ + getData = ( + callback: PreparedStatementDataCallback, + timeoutMs: number, + ): void => { + this.scheduleRefreshIfNeeded(); + if (this.isExpired()) { + const listener = new CallbackWithTimeout(callback, timeoutMs); + this.once('refreshDone', () => { + // If there are many listeners, the query plan could have expired again + // before we got to processing this one, so we have to check it again. + if (this.isExpired() || this.lastRefreshError) { + listener.tryInvoke( + this.lastRefreshError || + new Error('Getting a fresh query plan failed.'), + undefined, + undefined, + ); + } else { + listener.tryInvoke(undefined, this.preparedQueryBytes, this.metadata); + } + }); + } else { + // for the sake of consistency we should call the callback asynchornously + // regardless if the plan needs refreshing or not. + setTimeout( + () => callback(undefined, this.preparedQueryBytes, this.metadata), + 0, + ); + } + }; + + /** + * @returns parameter types used to create the query plan + */ + getParameterTypes = (): {[param: string]: SqlTypes.Type} => + this.parameterTypes; + + /** + * @returns true if the object has been marked as expired. + */ + isExpired = (): boolean => { + return this.forcedExpiration; + }; +} + +/** + * This class makes sure the callback is called only once. + * If the timeout expired, the callback is called with a "Timeout Expired" error. + * Otherwise it is called with provided args. + */ +class CallbackWithTimeout { + private callback: PreparedStatementDataCallback | null; + private timer: NodeJS.Timeout | null; + private isValid: boolean; + + constructor(callback: PreparedStatementDataCallback, timeout: number) { + this.callback = callback; + this.isValid = true; + this.timer = setTimeout(() => { + this.tryInvoke( + new Error( + 'Deadline Exceeded waiting for prepared statement to refresh.', + ), + ); + }, timeout); + } + + /** + * If this object has not yet been invalidated, the callback is called. + * @param args + */ + tryInvoke(...args: Parameters): void { + if (!this.isValid || !this.callback) { + return; + } + const callback = this.callback; + this.invalidate(); + callback(...args); + } + + /** + * After this method is called, the callback can no longer be invoked. + */ + private invalidate(): void { + if (this.timer) { + clearTimeout(this.timer); + this.timer = null; + } + this.callback = null; + this.isValid = false; + } +} + +function timestampFromResponse( + response: google.bigtable.v2.PrepareQueryResponse, +): number | null { + if (!response.validUntil?.seconds) { + return null; + } + return new PreciseDate({ + seconds: response.validUntil?.seconds ?? undefined, + nanos: response.validUntil?.nanos ?? undefined, + }).getTime(); +} diff --git a/src/execute-query/protobufreadertransformer.ts b/src/execute-query/protobufreadertransformer.ts new file mode 100644 index 000000000..f98517d83 --- /dev/null +++ b/src/execute-query/protobufreadertransformer.ts @@ -0,0 +1,135 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {Transform, TransformOptions, TransformCallback} from 'stream'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +import {google} from '../../protos/protos'; +import {MetadataConsumer} from './metadataconsumer'; + +class DrainGuard { + callback: () => void; + + constructor(callback: () => void) { + this.callback = callback; + } +} + +export type BatchAndToken = [Buffer[], Uint8Array]; + +/** + * This transformer is responsible for deserializing bytes sent from the + * server to an appropriate object which can be used to construct rows. + * Right now only google.bigtable.v2.ProtoRows is supported. + */ +export class ProtobufReaderTransformer extends Transform { + metadataConsumer: MetadataConsumer; + resumeToken: Uint8Array | null; + + constructor(metadataConsumer: MetadataConsumer, opts?: TransformOptions) { + super({...opts, objectMode: true, highWaterMark: 1024}); + this.metadataConsumer = metadataConsumer; + this.resumeToken = null; + } + + _transform( + batchAndToken: BatchAndToken | DrainGuard, + _encoding: BufferEncoding, + callback: TransformCallback, + ) { + if (batchAndToken instanceof DrainGuard) { + batchAndToken.callback(); + } else { + const maybeMetadata = this.metadataConsumer.getMetadata(); + if (maybeMetadata) { + const [batches, resumeToken] = batchAndToken; + this.resumeToken = resumeToken; + const valuesBuffer: google.bigtable.v2.IValue[] = []; + + const expectedLength = maybeMetadata.columns.length; + + for (let batchIdx = 0; batchIdx < batches.length; batchIdx++) { + const batch = batches[batchIdx]; + const protoRows = google.bigtable.v2.ProtoRows.decode(batch); + + if (protoRows.values.length % expectedLength) { + callback(new Error('Internal error - received incomplete row.')); + return; + } + + for ( + let valueIdx = 0; + valueIdx < protoRows.values.length; + valueIdx++ + ) { + valuesBuffer.push(protoRows.values[valueIdx]); + } + } + + if (valuesBuffer.length > 0) { + for (let i = 0; i < valuesBuffer.length; i += expectedLength) { + this.push(valuesBuffer.slice(i, i + expectedLength)); + } + } + } else { + return callback(new Error('Internal error - missing metadata')); + } + } + + callback(); + } + + /** + * @param callback guaranteed to be called **after** the last message + * was processed. + */ + onDrain = (callback: () => void) => { + // Writable streams keep a buffer of objects to + // process (in case of a Transform processing means calling _transform() method). Readable streams + // keep a buffer of objects to be read by downstream processors. Transforms are both Writable and + // Readable, thus they have one buffer for parameters to, and one buffer for results of + // _transform() method. + // Objects can end up in a writeable's buffer if they are written after previous write call + // returned false but before 'drain' event is emitted or when a write happens while another + // objects is processed. + // Objects can end up in a readable's buffer if there are no downstream processors ready to accept + // new objects (either there are none or at least one of them is paused). + // + // Our data pipeline looks as follows: + // bigtable stream -> ByteBuffer transform -> Reader transform -> ... + // + // But if we include buffers in this diagram this becomes more complicated: + // + // (bigtable stream -> [readable buffer]) + // -> ([writable buffer] -> ByteBuffer transform() -> [readable buffer]) + // -> ([writable buffer] -> Reader transform() -> [readable buffer]) + // -> ... + // + // and each of these buffers can buffer requests that were (in readable) or were not (in readable) + // already passed to _transform() method. + // During the retry we have to recreate bigtable stream and discard all data stored in + // the ByteBuffer, and perform a new request with an appropriate resumeToken. + // + // Passing an appropriate resumeToken is crucial to prevent duplicate or lost responses. + // + // So, how to obtain a resumption token? Let's try a few options: + // We cannot take last resumeToken that was seen by ByteBuffer's _transform() method + // - it is possible that there are some unprocessed events in ByteBuffer's writable buffer + // that will be processed at some point. + // The same applies to Reader's _transform(), writeable buffers are still there. + // + // Thus we see that we have to consider events waiting in the buffers and wait until they are + // processed. + this.write(new DrainGuard(callback)); + }; +} diff --git a/src/execute-query/queryresultrowtransformer.ts b/src/execute-query/queryresultrowtransformer.ts new file mode 100644 index 000000000..f2b3cdb25 --- /dev/null +++ b/src/execute-query/queryresultrowtransformer.ts @@ -0,0 +1,255 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// eslint-disable-next-line @typescript-eslint/no-var-requires +import {google} from '../../protos/protos'; +import Long = require('long'); + +import { + EncodedKeyMap, + BigtableDate, + ExecuteQueryStreamWithMetadata, + SqlValue, + QueryResultRow, + Struct, + ensureUint8Array, +} from './values'; +import {ResultSetMetadata, Type} from './types'; +import {PreciseDate} from '@google-cloud/precise-date'; +import assert = require('assert'); +import {Transform, TransformCallback, TransformOptions} from 'stream'; +import {MetadataConsumer} from './metadataconsumer'; +import {FieldMapping} from './namedlist'; + +/** + * Class representing a readable stream with ExecuteQuery results + * which also lets the caller get metadata. + */ +export class ExecuteQueryStreamTransformWithMetadata + extends Transform + implements ExecuteQueryStreamWithMetadata +{ + metadataConsumer: MetadataConsumer; + fieldMapping: FieldMapping | null; + hasCallerCancelled: () => boolean; + protoBytesEncoding?: BufferEncoding; + + constructor( + metadataConsumer: MetadataConsumer, + hasCallerCancelled: () => boolean, + protoBytesEncoding?: BufferEncoding, + opts?: TransformOptions, + ) { + super({...opts, objectMode: true, highWaterMark: 0}); + this.fieldMapping = null; + this.metadataConsumer = metadataConsumer; + this.hasCallerCancelled = hasCallerCancelled; + this.protoBytesEncoding = protoBytesEncoding; + } + + valueToJsType(value: google.bigtable.v2.Value, metadata: Type): SqlValue { + if (!value.kind) { + return null; + } + switch (metadata.type) { + case 'bytes': + if (value.kind === 'bytesValue') { + return ensureUint8Array(value.bytesValue!, this.protoBytesEncoding); + } + break; + case 'string': + if (value.kind === 'stringValue') { + return value.stringValue!; + } + break; + case 'int64': + if (value.kind === 'intValue') { + return intValueToBigInt(value.intValue!); + } + break; + case 'bool': + if (value.kind === 'boolValue') { + return value.boolValue!; + } + break; + case 'float32': + case 'float64': + if (value.kind === 'floatValue') { + return value.floatValue!; + } + break; + case 'timestamp': + if (value.kind === 'timestampValue') { + return new PreciseDate({ + seconds: value.timestampValue!.seconds ?? undefined, + nanos: value.timestampValue!.nanos ?? undefined, + }); + } + break; + case 'date': + if (value.kind === 'dateValue') { + return new BigtableDate( + value.dateValue!.year || 0, + value.dateValue!.month || 0, + value.dateValue!.day || 0, + ); + } + break; + case 'array': + return this.valueToJsArray(value, metadata); + case 'struct': + return this.valueToJsStruct(value, metadata); + case 'map': + return this.valueToJsMap(value, metadata); + default: + throw new Error( + `Unexpected type to parse: ${JSON.stringify(metadata)}`, + ); + } + throw new Error(`Metadata and Value not matching. + Metadata:${metadata} + Value:${value}`); + } + + valueToJsArray(value: google.bigtable.v2.Value, metadata: Type): SqlValue { + assert(metadata.type === 'array'); + if ( + value.arrayValue === null || + value.arrayValue === undefined || + value.arrayValue.values === null || + value.arrayValue.values === undefined + ) { + return null; + } + return value.arrayValue.values.map(value => + this.valueToJsType( + value as google.bigtable.v2.Value, + metadata.elementType, + ), + ); + } + + valueToJsStruct(value: google.bigtable.v2.Value, metadata: Type): SqlValue { + assert(metadata.type === 'struct'); + if ( + value.arrayValue === null || + value.arrayValue === undefined || + value.arrayValue.values === null || + value.arrayValue.values === undefined + ) { + return null; + } + if (value.arrayValue.values.length !== metadata.values.length) { + throw new Error( + `Internal error - received Struct with ${value.arrayValue.values.length} values, but metadata has ${metadata.values.length} fields.`, + ); + } + return new Struct( + value.arrayValue.values.map((value, index) => + this.valueToJsType( + value as google.bigtable.v2.Value, + metadata.get(index), + ), + ), + metadata.fieldMapping, + ); + } + + valueToJsMap(value: google.bigtable.v2.Value, metadata: Type): SqlValue { + assert(metadata.type === 'map'); + if ( + value.arrayValue === null || + value.arrayValue === undefined || + value.arrayValue.values === null || + value.arrayValue.values === undefined + ) { + return null; + } + if ( + metadata.keyType.type !== 'int64' && + metadata.keyType.type !== 'string' && + metadata.keyType.type !== 'bytes' + ) { + throw new Error( + `Internal error - unsupported type of key received: ${metadata.keyType.type}`, + ); + } + const values: google.bigtable.v2.Value[] = value.arrayValue + .values as google.bigtable.v2.Value[]; + return new EncodedKeyMap( + values.map(value => { + // Types are ensured by checking metadata.keyType.type earlier. + const pair = value?.arrayValue?.values as google.bigtable.v2.Value[]; + const keyValue = this.valueToJsType(pair[0], metadata.keyType) as + | bigint + | string + | Uint8Array + | null; + return [keyValue, this.valueToJsType(pair[1], metadata.valueType)]; + }), + ); + } + + getFieldMapping(): FieldMapping { + if (this.fieldMapping === null) { + const metadata = this.getMetadata(); + if (metadata === null) { + throw new Error('Metadata was not sent by the server.'); + } + this.fieldMapping = metadata.fieldMapping; + } + return this.fieldMapping; + } + + _transform( + chunk: Array, + _encoding: BufferEncoding, + callback: TransformCallback, + ) { + let error: Error | null = null; + try { + if (!this.hasCallerCancelled()) { + const maybeMetadata = this.metadataConsumer.getMetadata(); + if (maybeMetadata) { + this.push( + new QueryResultRow( + chunk.map((value, index) => + this.valueToJsType(value, maybeMetadata.get(index)), + ), + this.getFieldMapping(), + ), + ); + } else { + throw new Error( + 'Server error - expected to receive metadata by now.', + ); + } + } + } catch (e) { + error = e as Error; + } + callback(error); + } + + getMetadata(): ResultSetMetadata | null { + return this.metadataConsumer.getMetadata(); + } +} + +function intValueToBigInt(intValue: string | number | Long): bigint { + if (intValue instanceof Long) { + return BigInt(intValue.toString()); + } + return BigInt(intValue); +} diff --git a/src/execute-query/types.ts b/src/execute-query/types.ts new file mode 100644 index 000000000..012f7713d --- /dev/null +++ b/src/execute-query/types.ts @@ -0,0 +1,83 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {NamedList} from './namedlist'; + +export type ArrayType = {type: 'array'; elementType: Type}; +export type MapType = {type: 'map'; keyType: Type; valueType: Type}; +export type FieldType = {name: string | null; type: Type}; +export class StructType extends NamedList { + type: 'struct' = 'struct' as const; + + static fromTuples(tuples: [string | null, Type][]): StructType { + return NamedList._fromTuples(StructType, tuples); + } +} + +export type Int64Type = ReturnType; +export type Float64Type = ReturnType; +export type Float32Type = ReturnType; +export type BytesType = ReturnType; +export type StringType = ReturnType; +export type BoolType = ReturnType; +export type TimestampType = ReturnType; +export type DateType = ReturnType; + +/** + * Factory functions are provided instead of constants + * for all types for coherence and for extensibility + * (we need parameters at least for arrays, structs and maps) + */ +export const Int64 = () => ({type: 'int64' as const}); +export const Float64 = () => ({type: 'float64' as const}); +export const Float32 = () => ({type: 'float32' as const}); +export const Bytes = () => ({type: 'bytes' as const}); +export const String = () => ({type: 'string' as const}); +export const Bool = () => ({type: 'bool' as const}); +export const Timestamp = () => ({type: 'timestamp' as const}); +export const Date = () => ({type: 'date' as const}); +export const Struct = (...fields: FieldType[]): StructType => + StructType.fromTuples(fields.map(value => [value.name, value.type])); +export const Array = (elementType: Type): ArrayType => ({ + type: 'array' as const, + elementType, +}); +export const Map = (keyType: Type, valueType: Type): MapType => ({ + type: 'map' as const, + keyType, + valueType, +}); + +export type Type = + | Int64Type + | Float32Type + | Float64Type + | BytesType + | StringType + | BoolType + | TimestampType + | DateType + | ArrayType + | StructType + | MapType; + +export class ResultSetMetadata extends NamedList { + get columns(): Array { + return this.values; + } + + static fromTuples(tuples: [string | null, Type][]): ResultSetMetadata { + return NamedList._fromTuples(ResultSetMetadata, tuples); + } +} diff --git a/src/execute-query/values.ts b/src/execute-query/values.ts new file mode 100644 index 000000000..66eca4aa9 --- /dev/null +++ b/src/execute-query/values.ts @@ -0,0 +1,224 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {Duplex, Readable} from 'stream'; +import * as SqlTypes from './types'; +import {PreciseDate} from '@google-cloud/precise-date'; +import {NamedList} from './namedlist'; +const CRC32C = require('fast-crc32c'); + +export type BigtableMap = EncodedKeyMap; + +export type ExecuteQueryParameterValue = + | Uint8Array + | string + | bigint + | number + | boolean + | PreciseDate + | BigtableDate + | Array + | null; + +export type SqlValue = + | ExecuteQueryParameterValue + | Struct + | Array + | BigtableMap; + +/** + * A custom class created to allow setting year, month and date to 0. + */ +export class BigtableDate { + year: number; + month: number; + day: number; + + constructor(year: number, month: number, day: number) { + if (year < 0) { + throw new Error('Invalid year.'); + } + this.year = year; + + if (month < 0 || month > 12) { + throw new Error('Invalid month.'); + } + this.month = month; + + if (day < 0 || day > 31) { + throw new Error('Invalid month.'); + } + this.day = day; + } +} + +export class QueryResultRow extends NamedList {} + +export class Struct extends NamedList { + static fromTuples(tuples: [string | null, SqlValue][]): Struct { + return NamedList._fromTuples(Struct, tuples); + } +} + +export interface ExecuteQueryStreamWithMetadata extends Duplex { + getMetadata: () => SqlTypes.ResultSetMetadata | null; + end: () => this; +} + +export function checksumValid( + buffer: Buffer, + expectedChecksum: number, +): boolean { + return CRC32C.calculate(buffer) === expectedChecksum; +} + +export function ensureUint8Array( + bytes: Uint8Array | string, + encoding?: BufferEncoding, +): Uint8Array { + return bytes instanceof Uint8Array + ? bytes + : Buffer.from(bytes, encoding || 'base64'); +} + +function _parseBufferToMapKey( + key: bigint | string | Uint8Array | null, +): bigint | string | null { + // Uint8Array is always an instance of Buffer, + // but we keep it in the if condition for TS linter's sake + if (key instanceof Buffer || key instanceof Uint8Array) { + const base64Key = key.toString('base64'); + return base64Key; + } else if (key === null) { + return null; + } else { + return key; + } +} + +type MapKey = bigint | string | Uint8Array | null; +// internal representation of the map contains the original key and value tuple +type _MapValue = [MapKey, SqlValue]; + +export class EncodedKeyMap + implements Map +{ + private map_impl: Map; + /** + * Class representing a Map Value returned by ExecuteQuery. Native JS Map + * does not support Buffer comparison - it compares object id and not + * buffer values. This class solves this by encoding Buffer keys as + * base64 strings. + * Please note that an empty string and an empty buffer have the same + * representation, however, we do not ever use mixed key types (all keys are + * always all buffers or all strings) so we don't need to handle this. + */ + constructor( + entries?: ReadonlyArray< + [bigint | string | Uint8Array | null, SqlValue] + > | null, + ) { + if (entries) { + // Process entries to encode Buffer keys as base64 + const processedEntries = entries.map(([key, value]) => { + return [_parseBufferToMapKey(key), [key, value]]; + }) as ReadonlyArray<[bigint | string, _MapValue]>; + this.map_impl = new Map( + processedEntries, + ); + } else { + this.map_impl = new Map(); + } + } + + clear(): void { + this.map_impl.clear(); + } + + delete(key: string | bigint | Uint8Array | null): boolean { + return this.map_impl.delete(_parseBufferToMapKey(key)); + } + + forEach( + callbackfn: ( + value: SqlValue, + key: string | bigint | Uint8Array | null, + map: Map, + ) => void, + thisArg?: any, + ): void { + this.map_impl.forEach((value, key) => { + callbackfn.call(thisArg, value[1], value[0], this.map_impl); + }); + } + + has(key: string | bigint | Uint8Array | null): boolean { + return this.map_impl.has(_parseBufferToMapKey(key)); + } + + get size(): number { + return this.map_impl.size; + } + + entries(): MapIterator<[string | bigint | Uint8Array | null, SqlValue]> { + return this.map_impl.values(); + } + + keys(): MapIterator { + const iterator = this.map_impl.values(); + return { + next: () => { + const result = iterator.next(); + if (result.done) return result; + return {value: result.value[0], done: false}; + }, + [Symbol.iterator]() { + return this; + }, + }; + } + + values(): MapIterator { + const iterator = this.map_impl.values(); + return { + next: () => { + const result = iterator.next(); + if (result.done) return result; + return {value: result.value[1], done: false}; + }, + [Symbol.iterator]() { + return this; + }, + }; + } + + [Symbol.iterator](): MapIterator< + [string | bigint | Uint8Array | null, SqlValue] + > { + return this.entries(); + } + + get [Symbol.toStringTag](): string { + return 'EncodedKeyMap'; + } + + get(key: string | bigint | Uint8Array | null): SqlValue | undefined { + return this.map_impl.get(_parseBufferToMapKey(key))?.[1]; + } + + set(key: string | bigint | Uint8Array | null, value: SqlValue): this { + this.map_impl.set(_parseBufferToMapKey(key), [key, value]); + return this; + } +} diff --git a/src/index.ts b/src/index.ts index d8a501ea1..a5ccdf63d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -16,9 +16,15 @@ import {replaceProjectIdToken} from '@google-cloud/projectify'; import {promisifyAll} from '@google-cloud/promisify'; import arrify = require('arrify'); import * as extend from 'extend'; -import {GoogleAuth, CallOptions, grpc as gaxVendoredGrpc} from 'google-gax'; +import { + GoogleAuth, + CallOptions, + grpc as gaxVendoredGrpc, + ClientOptions, +} from 'google-gax'; import * as gax from 'google-gax'; import * as protos from '../protos/protos'; +import * as SqlTypes from './execute-query/types'; import {AppProfile} from './app-profile'; import {Cluster} from './cluster'; @@ -36,6 +42,8 @@ import * as v2 from './v2'; import {PassThrough, Duplex} from 'stream'; import grpcGcpModule = require('grpc-gcp'); import {ClusterUtils} from './utils/cluster'; +import {ClientSideMetricsConfigManager} from './client-side-metrics/metrics-config-manager'; +import {GCPMetricsHandler} from './client-side-metrics/gcp-metrics-handler'; // eslint-disable-next-line @typescript-eslint/no-var-requires const streamEvents = require('stream-events'); @@ -101,6 +109,8 @@ export interface BigtableOptions extends gax.GoogleAuthOptions { * Internal only. */ BigtableTableAdminClient?: gax.ClientOptions; + + metricsEnabled?: boolean; } /** @@ -474,6 +484,7 @@ export class Bigtable { static AppProfile: AppProfile; static Instance: Instance; static Cluster: Cluster; + _metricsConfigManager: ClientSideMetricsConfigManager; constructor(options: BigtableOptions = {}) { // Determine what scopes are needed. @@ -578,6 +589,12 @@ export class Bigtable { this.appProfileId = options.appProfileId; this.projectName = `projects/${this.projectId}`; this.shouldReplaceProjectIdToken = this.projectId === '{{projectId}}'; + + const handlers = + options.metricsEnabled === true + ? [new GCPMetricsHandler(options as ClientOptions)] + : []; + this._metricsConfigManager = new ClientSideMetricsConfigManager(handlers); } createInstance( @@ -970,6 +987,7 @@ export class Bigtable { gaxStream .on('error', stream.destroy.bind(stream)) .on('metadata', stream.emit.bind(stream, 'metadata')) + .on('status', stream.emit.bind(stream, 'status')) .on('request', stream.emit.bind(stream, 'request')) .pipe(stream); }); @@ -1120,6 +1138,7 @@ promisifyAll(Bigtable, { module.exports = Bigtable; module.exports.v2 = v2; module.exports.Bigtable = Bigtable; +module.exports.SqlTypes = SqlTypes; export {v2}; export {protos}; @@ -1345,3 +1364,4 @@ export { WaitForReplicationCallback, WaitForReplicationResponse, } from './table'; +export {SqlTypes}; diff --git a/src/instance.ts b/src/instance.ts index d1ca5890f..93984058c 100644 --- a/src/instance.ts +++ b/src/instance.ts @@ -19,6 +19,7 @@ import * as is from 'is'; import * as extend from 'extend'; // eslint-disable-next-line @typescript-eslint/no-var-requires const pumpify = require('pumpify'); +const concat = require('concat-stream'); import snakeCase = require('lodash.snakecase'); import { @@ -70,6 +71,25 @@ import {Backup, RestoreTableCallback, RestoreTableResponse} from './backup'; import {ClusterUtils} from './utils/cluster'; import {AuthorizedView} from './authorized-view'; +import * as SqlTypes from './execute-query/types'; +import { + ExecuteQueryParameterValue, + QueryResultRow, +} from './execute-query/values'; +import {ProtobufReaderTransformer} from './execute-query/protobufreadertransformer'; +import {ExecuteQueryStreamTransformWithMetadata} from './execute-query/queryresultrowtransformer'; +import {ExecuteQueryStreamWithMetadata} from './execute-query/values'; +import { + parseParameters, + parseParameterTypes, +} from './execute-query/parameterparsing'; +import {MetadataConsumer} from './execute-query/metadataconsumer'; +import { + createCallerStream, + ExecuteQueryStateMachine, +} from './execute-query/executequerystatemachine'; +import {PreparedStatement} from './execute-query/preparedstatement'; + export interface ClusterInfo extends BasicClusterConfig { id: string; } @@ -154,6 +174,32 @@ export interface CreateTableFromBackupConfig { gaxOptions?: CallOptions; } +export type ExecuteQueryCallback = ( + err: Error | null, + rows?: QueryResultRow[], +) => void; + +export interface ExecuteQueryOptions { + preparedStatement: PreparedStatement; + parameters?: {[param: string]: ExecuteQueryParameterValue}; + retryOptions?: CallOptions; + encoding?: BufferEncoding; +} +export type ExecuteQueryResponse = [QueryResultRow[]]; + +export type PrepareStatementCallback = ( + err: Error | null, + preparedStatement?: PreparedStatement, +) => void; + +export interface PrepareStatementOptions { + query: string; + parameterTypes?: {[param: string]: SqlTypes.Type}; + retryOptions?: CallOptions; + encoding?: BufferEncoding; +} +export type PrepareStatementResponse = [PreparedStatement]; + /** * Create an Instance object to interact with a Cloud Bigtable instance. * @@ -1486,6 +1532,206 @@ Please use the format 'my-instance' or '${bigtable.projectName}/instances/my-ins view(tableName: string, viewName: string): AuthorizedView { return new AuthorizedView(this, tableName, viewName); } + + prepareStatement( + options: PrepareStatementOptions, + ): Promise; + prepareStatement( + options: PrepareStatementOptions, + callback: PrepareStatementCallback, + ): void; + prepareStatement(query: string): Promise; + prepareStatement(query: string, callback: PrepareStatementCallback): void; + /** + * Prepare an SQL query to be executed on an instance. + * + * @param {?string} [query] PreparedStatement object representing a query + * to execute. + * @param {string} [opts.query] Query string for which we want to construct the preparedStatement object. + * @param {object} [opts.parameterTypes] Object mapping names of parameters to their types. + * Type hints should be constructed using factory functions such as {@link Int64} + * @param {CallOptions} [opts.retryOptions] gax's CallOptions wich are passed straight to gax. + * The same retry options are also used when automatically refreshing the PreparedStatement. + * + * @param {function} callback The callback function. + * @param {?error} callback.err An error returned while making this request. + * @param {?PreparedStatement} callback.preparedStatement The preparedStatement object used to perform the executeQuery operation. + * + */ + prepareStatement( + queryOrOpts: string | PrepareStatementOptions, + callback?: PrepareStatementCallback, + ): void | Promise { + const opts: PrepareStatementOptions = + typeof queryOrOpts === 'string' ? {query: queryOrOpts} : queryOrOpts; + + const protoParamTypes = parseParameterTypes(opts.parameterTypes || {}); + const request = { + client: 'BigtableClient', + method: 'prepareQuery', + reqOpts: { + instanceName: this.name, + appProfileId: this.bigtable.appProfileId, + query: opts.query, + paramTypes: protoParamTypes, + }, + gaxOpts: opts.retryOptions, + }; + this.bigtable.request(request, (...args) => { + if (args[0]) { + callback!(args[0]); + } + try { + callback!( + null, + new PreparedStatement( + this.bigtable, + args[1]!, + request, + opts.parameterTypes || {}, + ), + ); + } catch (err) { + callback!(err as any, undefined); + } + }); + } + + executeQuery(options: ExecuteQueryOptions): Promise; + executeQuery( + options: ExecuteQueryOptions, + callback: ExecuteQueryCallback, + ): void; + executeQuery( + preparedStatement: PreparedStatement, + ): Promise; + executeQuery( + preparedStatement: PreparedStatement, + callback: ExecuteQueryCallback, + ): void; + /** + * Execute a SQL query on an instance. + * + * + * @param {?PreparedStatement} [preparedStatement] PreparedStatement object representing a query + * to execute. + * @param {?object} [options] Configuration object. See + * {@link Instance#createExecuteQueryStream} for a complete list of options. + * + * @param {function} callback The callback function. + * @param {?error} callback.err An error returned while making this request. + * @param {?QueryResultRow[]} callback.rows List of rows. + * @param {?SqlTypes.ResultSetMetadata} callback.metadata Metadata for the response. + * + * @example include:samples/api-reference-doc-snippets/instance.js + * region_tag:bigtable_api_execute_query + */ + executeQuery( + preparedStatementOrOpts: PreparedStatement | ExecuteQueryOptions, + callback?: ExecuteQueryCallback, + ): void | Promise { + let opts: ExecuteQueryOptions; + if (preparedStatementOrOpts instanceof PreparedStatement) { + opts = {preparedStatement: preparedStatementOrOpts}; + } else { + opts = preparedStatementOrOpts; + } + const stream = this.createExecuteQueryStream(opts); + + stream.on('error', callback!).pipe( + concat((rows: QueryResultRow[]) => { + callback!(null, rows); + }), + ); + } + + /** + * Execute a SQL query on an instance. + * + * @param {PreparedStatement} [preparedStatement] SQL query to execute. Parameters can be specified using @name notation. + * @param {object} [opts] Configuration object. + * @param {object} [opts.parameters] Object mapping names of parameters used in the query to JS values. + * @param {object} [opts.retryOptions] Retry options used for executing the query. Note that the only values + * used are: + * - retryOptions.retry.retryCodes + * - retryOptions.retry.backoffSettings.maxRetries + * - retryOptions.retry.backoffSettings.totalTimeoutMillis + * - retryOptions.retry.backoffSettings.maxRetryDelayMillis + * - retryOptions.retry.backoffSettings.retryDelayMultiplier + * - retryOptions.retry.backoffSettings.initialRetryDelayMillis + * @returns {ExecuteQueryStreamWithMetadata} + * + * @example include:samples/api-reference-doc-snippets/instance.js + * region_tag:bigtable_api_create_query_stream + */ + createExecuteQueryStream( + opts: ExecuteQueryOptions, + ): ExecuteQueryStreamWithMetadata { + /** + * We create the following streams: + * responseStream -> byteBuffer -> readerStream -> resultStream + * + * The last two (readerStream and resultStream) are connected using pumpify + * and returned to the caller. + * + * When a request is made responseStream and byteBuffer are created, + * connected using pumpify and piped to the readerStream. + * + * On retry, the old responseStream-byteBuffer pair is discarded and a + * new pair is crated. + * + * For more info please refer to comments in setupRetries function. + * + */ + const metadataConsumer = new MetadataConsumer(); + + let callerCancelled = false; + const setCallerCancelled = (value: boolean) => { + callerCancelled = value; + }; + const hasCallerCancelled = () => callerCancelled; + + const resultStream = new ExecuteQueryStreamTransformWithMetadata( + metadataConsumer, + hasCallerCancelled, + opts.encoding, + ); + const protoParams: {[k: string]: google.bigtable.v2.IValue} | null = + parseParameters( + opts.parameters || {}, + opts.preparedStatement.getParameterTypes(), + ); + + const readerStream = new ProtobufReaderTransformer(metadataConsumer); + + const reqOpts: google.bigtable.v2.IExecuteQueryRequest = { + instanceName: this.name, + appProfileId: this.bigtable.appProfileId, + params: protoParams, + }; + + // This creates a row stream which is two streams connected in a series. + const callerStream = createCallerStream( + readerStream, + resultStream, + metadataConsumer, + setCallerCancelled, + ); + + const stateMachine = new ExecuteQueryStateMachine( + this.bigtable, + callerStream, + opts.preparedStatement, + reqOpts, + opts.retryOptions?.retry, + opts.encoding, + ); + + // make sure stateMachine is not garbage collected as long as the callerStream. + callerStream._stateMachine = stateMachine; + + return callerStream; + } } /*! Developer Documentation diff --git a/src/interceptor.ts b/src/interceptor.ts new file mode 100644 index 000000000..c019f6e5e --- /dev/null +++ b/src/interceptor.ts @@ -0,0 +1,86 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {CallOptions} from 'google-gax'; +import {OperationMetricsCollector} from './client-side-metrics/operation-metrics-collector'; + +// Mock Server Implementation +import * as grpcJs from '@grpc/grpc-js'; +import {status as GrpcStatus} from '@grpc/grpc-js'; + +export type ServerStatus = { + metadata: {internalRepr: Map; options: {}}; + code: number; + details: string; +}; + +// Helper to create interceptor provider for OperationMetricsCollector +function createMetricsInterceptorProvider( + collector: OperationMetricsCollector, +) { + return (options: grpcJs.InterceptorOptions, nextCall: grpcJs.NextCall) => { + // savedReceiveMetadata and savedReceiveStatus are not strictly needed here anymore for the interceptor's own state + // OperationStart and AttemptStart will be called by the calling code (`fakeReadModifyWriteRow`) + return new grpcJs.InterceptingCall(nextCall(options), { + start: (metadata, listener, next) => { + // AttemptStart is called by the orchestrating code + const newListener: grpcJs.Listener = { + onReceiveMetadata: (metadata, nextMd) => { + collector.onMetadataReceived( + metadata as unknown as { + internalRepr: Map; + options: {}; + }, + ); + nextMd(metadata); + }, + onReceiveStatus: (status, nextStat) => { + collector.onStatusMetadataReceived( + status as unknown as ServerStatus, + ); + nextStat(status); + }, + }; + next(metadata, newListener); + }, + }); + }; +} + +export function withInterceptors( + gaxOptions: CallOptions, + metricsCollector?: OperationMetricsCollector, +) { + if (metricsCollector) { + const interceptor = createMetricsInterceptorProvider(metricsCollector); + if (!gaxOptions.otherArgs) { + gaxOptions.otherArgs = {}; + } + if (!gaxOptions.otherArgs.options) { + gaxOptions.otherArgs.options = {}; + } + if (!gaxOptions.otherArgs.options.interceptors) { + gaxOptions.otherArgs.options.interceptors = [interceptor]; + } else { + if (Array.isArray(gaxOptions.otherArgs.options.interceptors)) { + // We check that interceptors is an array so that the code has no + // chance of throwing an error. + // Then, if the interceptors is an array, make sure it also includes the + // client side metrics interceptor. + gaxOptions.otherArgs.options.interceptors.push(interceptor); + } + } + } + return gaxOptions; +} diff --git a/src/row.ts b/src/row.ts index 9383f4703..39af7a32b 100644 --- a/src/row.ts +++ b/src/row.ts @@ -31,6 +31,11 @@ import {ServiceError} from 'google-gax'; import {google} from '../protos/protos'; import {RowDataUtils, RowProperties} from './row-data-utils'; import {TabularApiSurface} from './tabular-api-surface'; +import {getRowsInternal} from './utils/getRowsInternal'; +import { + MethodName, + StreamingState, +} from './client-side-metrics/client-side-metrics-attributes'; export interface Rule { column: string; @@ -666,31 +671,42 @@ export class Row { filter, }); - this.table.getRows(getRowsOptions, (err, rows) => { - if (err) { - callback(err); - return; - } + const metricsCollector = + this.table.bigtable._metricsConfigManager.createOperation( + MethodName.READ_ROW, + StreamingState.UNARY, + this.table, + ); + void getRowsInternal( + this.table, + metricsCollector, + getRowsOptions, + (err, rows) => { + if (err) { + callback(err); + return; + } - const row = rows![0]; + const row = rows![0]; - if (!row) { - const e = new RowError(this.id); - callback(e); - return; - } + if (!row) { + const e = new RowError(this.id); + callback(e); + return; + } - this.data = row.data; + this.data = row.data; - // If the user specifies column names, we'll return back the row data - // we received. Otherwise, we'll return the row "this" in a typical - // GrpcServiceObject#get fashion. - if (columns.length > 0) { - callback(null, row.data); - } else { - (callback as {} as GetRowCallback)(null, this); - } - }); + // If the user specifies column names, we'll return back the row data + // we received. Otherwise, we'll return the row "this" in a typical + // GrpcServiceObject#get fashion. + if (columns.length > 0) { + callback(null, row.data); + } else { + (callback as {} as GetRowCallback)(null, this); + } + }, + ); } getMetadata(options?: GetRowOptions): Promise; diff --git a/src/tabular-api-surface.ts b/src/tabular-api-surface.ts index 784f1f622..462658e92 100644 --- a/src/tabular-api-surface.ts +++ b/src/tabular-api-surface.ts @@ -17,28 +17,26 @@ import arrify = require('arrify'); import {Instance} from './instance'; import {Mutation} from './mutation'; import { - AbortableDuplex, Bigtable, Entry, MutateOptions, SampleRowKeysCallback, SampleRowsKeysResponse, } from './index'; -import {Filter, BoundData, RawFilter} from './filter'; +import {BoundData, RawFilter} from './filter'; import {Row} from './row'; -import { - ChunkPushData, - ChunkPushLastScannedRowData, - ChunkTransformer, - DataEvent, -} from './chunktransformer'; import {BackoffSettings} from 'google-gax/build/src/gax'; import {google} from '../protos/protos'; import {CallOptions, grpc, ServiceError} from 'google-gax'; -import {Duplex, PassThrough, Transform} from 'stream'; +import {Transform} from 'stream'; import * as is from 'is'; import {GoogleInnerError} from './table'; -import {TableUtils} from './utils/table'; +import {createReadStreamInternal} from './utils/createReadStreamInternal'; +import {getRowsInternal} from './utils/getRowsInternal'; +import { + MethodName, + StreamingState, +} from './client-side-metrics/client-side-metrics-attributes'; // See protos/google/rpc/code.proto // (4=DEADLINE_EXCEEDED, 8=RESOURCE_EXHAUSTED, 10=ABORTED, 14=UNAVAILABLE) @@ -159,7 +157,9 @@ export class TabularApiSurface { id: string; metadata?: google.bigtable.admin.v2.ITable; maxRetries?: number; - protected viewName?: string; + // We need viewName to be public because now we need it in Row class + // We need it in Row class because now we use getRowsInternal instead of getRows + viewName?: string; protected constructor(instance: Instance, id: string, viewName?: string) { this.bigtable = instance.bigtable; @@ -187,7 +187,6 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`); * Get {@link Row} objects for the rows currently in your table as a * readable object stream. * - * @param {object} [options] Configuration object. * @param {boolean} [options.decode=true] If set to `false` it will not decode * Buffer values returned from Bigtable. * @param {boolean} [options.encoding] The encoding to use when converting @@ -208,361 +207,16 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`); * * @example include:samples/api-reference-doc-snippets/table.js * region_tag:bigtable_api_table_readstream + * @param opts */ createReadStream(opts?: GetRowsOptions) { - const options = opts || {}; - const maxRetries = is.number(this.maxRetries) ? this.maxRetries! : 10; - let activeRequestStream: AbortableDuplex | null; - let rowKeys: string[]; - let filter: {} | null; - const rowsLimit = options.limit || 0; - const hasLimit = rowsLimit !== 0; - - let numConsecutiveErrors = 0; - let numRequestsMade = 0; - let retryTimer: NodeJS.Timeout | null; - - rowKeys = options.keys || []; - - /* - The following line of code sets the timeout if it was provided while - creating the client. This will be used to determine if the client should - retry on DEADLINE_EXCEEDED errors. Eventually, this will be handled - downstream in google-gax. - */ - const timeout = - opts?.gaxOptions?.timeout || - (this?.bigtable?.options?.BigtableClient?.clientConfig?.interfaces && - this?.bigtable?.options?.BigtableClient?.clientConfig?.interfaces[ - 'google.bigtable.v2.Bigtable' - ]?.methods['ReadRows']?.timeout_millis); - const callTimeMillis = new Date().getTime(); - - const ranges = TableUtils.getRanges(options); - - // If rowKeys and ranges are both empty, the request is a full table scan. - // Add an empty range to simplify the resumption logic. - if (rowKeys.length === 0 && ranges.length === 0) { - ranges.push({}); - } - - if (options.filter) { - filter = Filter.parse(options.filter); - } - - let chunkTransformer: ChunkTransformer; - let rowStream: Duplex; - - let userCanceled = false; - // The key of the last row that was emitted by the per attempt pipeline - // Note: this must be updated from the operation level userStream to avoid referencing buffered rows that will be - // discarded in the per attempt subpipeline (rowStream) - let lastRowKey = ''; - let rowsRead = 0; - const userStream = new PassThrough({ - objectMode: true, - readableHighWaterMark: 0, // We need to disable readside buffering to allow for acceptable behavior when the end user cancels the stream early. - writableHighWaterMark: 0, // We need to disable writeside buffering because in nodejs 14 the call to _transform happens after write buffering. This creates problems for tracking the last seen row key. - transform(event, _encoding, callback) { - if (userCanceled) { - callback(); - return; - } - if (event.eventType === DataEvent.LAST_ROW_KEY_UPDATE) { - /** - * This code will run when receiving an event containing - * lastScannedRowKey data that the chunk transformer sent. When the - * chunk transformer gets lastScannedRowKey data, this code - * updates the lastRowKey to ensure row ids with the lastScannedRowKey - * aren't re-requested in retries. The lastRowKey needs to be updated - * here and not in the chunk transformer to ensure the update is - * queued behind all events that deliver data to the user stream - * first. - */ - lastRowKey = event.lastScannedRowKey; - callback(); - return; - } - const row = event; - if (TableUtils.lessThanOrEqualTo(row.id, lastRowKey)) { - /* - Sometimes duplicate rows reach this point. To avoid delivering - duplicate rows to the user, rows are thrown away if they don't exceed - the last row key. We can expect each row to reach this point and rows - are delivered in order so if the last row key equals or exceeds the - row id then we know data for this row has already reached this point - and been delivered to the user. In this case we want to throw the row - away and we do not want to deliver this row to the user again. - */ - callback(); - return; - } - lastRowKey = row.id; - rowsRead++; - callback(null, row); - }, - }); - - // The caller should be able to call userStream.end() to stop receiving - // more rows and cancel the stream prematurely. But also, the 'end' event - // will be emitted if the stream ended normally. To tell these two - // situations apart, we'll save the "original" end() function, and - // will call it on rowStream.on('end'). - const originalEnd = userStream.end.bind(userStream); - - // Taking care of this extra listener when piping and unpiping userStream: - const rowStreamPipe = (rowStream: Duplex, userStream: PassThrough) => { - rowStream.pipe(userStream, {end: false}); - rowStream.on('end', originalEnd); - }; - const rowStreamUnpipe = (rowStream: Duplex, userStream: PassThrough) => { - rowStream?.unpipe(userStream); - rowStream?.removeListener('end', originalEnd); - }; - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - userStream.end = (chunk?: any, encoding?: any, cb?: () => void) => { - rowStreamUnpipe(rowStream, userStream); - userCanceled = true; - if (activeRequestStream) { - activeRequestStream.abort(); - } - if (retryTimer) { - clearTimeout(retryTimer); - } - return originalEnd(chunk, encoding, cb); - }; - - const makeNewRequest = () => { - // Avoid cancelling an expired timer if user - // cancelled the stream in the middle of a retry - retryTimer = null; - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - chunkTransformer = new ChunkTransformer({decode: options.decode} as any); - - // If the viewName is provided then request will be made for an - // authorized view. Otherwise, the request is made for a table. - const reqOpts = ( - this.viewName - ? { - authorizedViewName: `${this.name}/authorizedViews/${this.viewName}`, - appProfileId: this.bigtable.appProfileId, - } - : { - tableName: this.name, - appProfileId: this.bigtable.appProfileId, - } - ) as google.bigtable.v2.IReadRowsRequest; - - const retryOpts = { - currentRetryAttempt: 0, // was numConsecutiveErrors - // Handling retries in this client. Specify the retry options to - // make sure nothing is retried in retry-request. - noResponseRetries: 0, - shouldRetryFn: (_: any) => { - return false; - }, - }; - - if (lastRowKey) { - // Readjust and/or remove ranges based on previous valid row reads. - // Iterate backward since items may need to be removed. - for (let index = ranges.length - 1; index >= 0; index--) { - const range = ranges[index]; - const startValue = is.object(range.start) - ? (range.start as BoundData).value - : range.start; - const endValue = is.object(range.end) - ? (range.end as BoundData).value - : range.end; - const startKeyIsRead = - !startValue || - TableUtils.lessThanOrEqualTo( - startValue as string, - lastRowKey as string, - ); - const endKeyIsNotRead = - !endValue || - (endValue as Buffer).length === 0 || - TableUtils.lessThan(lastRowKey as string, endValue as string); - if (startKeyIsRead) { - if (endKeyIsNotRead) { - // EndKey is not read, reset the range to start from lastRowKey open - range.start = { - value: lastRowKey, - inclusive: false, - }; - } else { - // EndKey is read, remove this range - ranges.splice(index, 1); - } - } - } - - // Remove rowKeys already read. - rowKeys = rowKeys.filter(rowKey => - TableUtils.greaterThan(rowKey, lastRowKey as string), - ); - - // If there was a row limit in the original request and - // we've already read all the rows, end the stream and - // do not retry. - if (hasLimit && rowsLimit === rowsRead) { - userStream.end(); - return; - } - // If all the row keys and ranges are read, end the stream - // and do not retry. - if (rowKeys.length === 0 && ranges.length === 0) { - userStream.end(); - return; - } - } - - // Create the new reqOpts - reqOpts.rows = {}; - - // TODO: preprocess all the keys and ranges to Bytes - reqOpts.rows.rowKeys = rowKeys.map( - Mutation.convertToBytes, - ) as {} as Uint8Array[]; - - reqOpts.rows.rowRanges = ranges.map(range => - Filter.createRange( - range.start as BoundData, - range.end as BoundData, - 'Key', - ), - ); - - if (filter) { - reqOpts.filter = filter; - } - - if (hasLimit) { - reqOpts.rowsLimit = rowsLimit - rowsRead; - } - - const gaxOpts = populateAttemptHeader( - numRequestsMade, - options.gaxOptions, + const metricsCollector = + this.bigtable._metricsConfigManager.createOperation( + MethodName.READ_ROWS, + StreamingState.STREAMING, + this, ); - - const requestStream = this.bigtable.request({ - client: 'BigtableClient', - method: 'readRows', - reqOpts, - gaxOpts, - retryOpts, - }); - - activeRequestStream = requestStream!; - - const toRowStream = new Transform({ - transform: (rowData: ChunkPushData, _, next) => { - if ( - userCanceled || - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (userStream as any)._writableState.ended - ) { - return next(); - } - if ( - (rowData as ChunkPushLastScannedRowData).eventType === - DataEvent.LAST_ROW_KEY_UPDATE - ) { - /** - * If the data is the chunk transformer communicating that the - * lastScannedRow was received then this message is passed along - * to the user stream to update the lastRowKey. - */ - next(null, rowData); - } else { - /** - * If the data is just regular rows being pushed from the - * chunk transformer then the rows are encoded so that they - * can be consumed by the user stream. - */ - const row = this.row((rowData as Row).key as string); - row.data = (rowData as Row).data; - next(null, row); - } - }, - objectMode: true, - }); - - rowStream = pumpify.obj([requestStream, chunkTransformer, toRowStream]); - - // Retry on "received rst stream" errors - const isRstStreamError = (error: ServiceError): boolean => { - if (error.code === 13 && error.message) { - const error_message = (error.message || '').toLowerCase(); - return ( - error.code === 13 && - (error_message.includes('rst_stream') || - error_message.includes('rst stream')) - ); - } - return false; - }; - - rowStream - .on('error', (error: ServiceError) => { - rowStreamUnpipe(rowStream, userStream); - activeRequestStream = null; - if (IGNORED_STATUS_CODES.has(error.code)) { - // We ignore the `cancelled` "error", since we are the ones who cause - // it when the user calls `.abort()`. - userStream.end(); - return; - } - numConsecutiveErrors++; - numRequestsMade++; - if ( - numConsecutiveErrors <= maxRetries && - (RETRYABLE_STATUS_CODES.has(error.code) || - isRstStreamError(error)) && - !(timeout && timeout < new Date().getTime() - callTimeMillis) - ) { - const backOffSettings = - options.gaxOptions?.retry?.backoffSettings || - DEFAULT_BACKOFF_SETTINGS; - const nextRetryDelay = getNextDelay( - numConsecutiveErrors, - backOffSettings, - ); - retryTimer = setTimeout(makeNewRequest, nextRetryDelay); - } else { - if ( - !error.code && - error.message === 'The client has already been closed.' - ) { - // - // The TestReadRows_Generic_CloseClient conformance test requires - // a grpc code to be present when the client is closed. According - // to Gemini, the appropriate code for a closed client is - // CANCELLED since the user actually cancelled the call by closing - // the client. - // - error.code = grpc.status.CANCELLED; - } - userStream.emit('error', error); - } - }) - .on('data', _ => { - // Reset error count after a successful read so the backoff - // time won't keep increasing when as stream had multiple errors - numConsecutiveErrors = 0; - }) - .on('end', () => { - activeRequestStream = null; - }); - rowStreamPipe(rowStream, userStream); - }; - - makeNewRequest(); - return userStream; + return createReadStreamInternal(this, metricsCollector, opts); } getRows(options?: GetRowsOptions): Promise; @@ -575,32 +229,28 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`); * before returning the results. Instead we recommend using the streaming API * via {@link Table#createReadStream}. * - * @param {object} [options] Configuration object. See * {@link Table#createReadStream} for a complete list of options. * @param {object} [options.gaxOptions] Request configuration options, outlined * here: https://googleapis.github.io/gax-nodejs/CallSettings.html. - * @param {function} callback The callback function. * @param {?error} callback.err An error returned while making this request. * @param {Row[]} callback.rows List of Row objects. * * @example include:samples/api-reference-doc-snippets/table.js * region_tag:bigtable_api_get_rows + * @param optionsOrCallback + * @param cb */ getRows( optionsOrCallback?: GetRowsOptions | GetRowsCallback, cb?: GetRowsCallback, ): void | Promise { - const callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; - const options = - typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - this.createReadStream(options) - .on('error', callback) - .pipe( - concat((rows: Row[]) => { - callback(null, rows); - }), + const metricsCollector = + this.bigtable._metricsConfigManager.createOperation( + MethodName.READ_ROWS, + StreamingState.STREAMING, + this, ); + return getRowsInternal(this, metricsCollector, optionsOrCallback, cb); } insert( diff --git a/src/timed-stream.ts b/src/timed-stream.ts new file mode 100644 index 000000000..85e3a54a2 --- /dev/null +++ b/src/timed-stream.ts @@ -0,0 +1,113 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {PassThrough, TransformCallback, TransformOptions} from 'stream'; + +/** + * This interface is the usual options that can be passed into a Transform plus + * a hook for injecting code into the stream's transform function. This hook is + * useful for code running code that normally runs in the transform method if that + * code is different for each method that makes use of a stream. + */ +type TimedStreamOptions = TransformOptions & { + transformHook?: ( + event: any, + _encoding: BufferEncoding, + callback: TransformCallback, + ) => void; +}; + +class StreamTimer { + private startTime; + private totalDuration; + + constructor() { + this.startTime = 0n; + this.totalDuration = 0n; + } + + getTotalDurationMs() { + return Number(this.totalDuration / 1_000_000n); + } + + start() { + this.startTime = process.hrtime.bigint(); + } + + stop() { + const endTime = process.hrtime.bigint(); + const duration = endTime - this.startTime; + this.totalDuration += duration; + } +} + +/** + * The TimedStream class is used for measuring the time the user spends + * processing data from the stream. We need to measure this time for use cases + * like measuring the application latencies for client side metrics. + */ +export class TimedStream extends PassThrough { + private readTimer = new StreamTimer(); + private transformTimer = new StreamTimer(); + constructor(options?: TimedStreamOptions) { + // highWaterMark of 1 is needed to respond to each row + super({ + ...options, + objectMode: true, + highWaterMark: 0, + transform: (event, _encoding, callback) => { + /* When we iterate through a stream, time spent waiting for the user's + application is added to totalDurationTransform. When we use handlers, + time spent waiting for the user's application is added to + totalDurationTransform. We need two different timers to measure total + application blocking latencies because the streams behave differently + depending on whether the user is iterating through a stream or using + timers. + */ + this.transformTimer.start(); + if (options?.transformHook) { + options?.transformHook(event, _encoding, callback); + } + callback(null, event); + this.transformTimer.stop(); + }, + }); + } + + /** + * read code is called when a row is consumed. + */ + read(size: number) { + // calculate the time spent between iterations of read (i.e. processing the stream in a for loop) + const chunk = super.read(size); + if (chunk) { + this.readTimer.start(); + // Defer the after call to the next tick of the event loop + process.nextTick(() => { + this.readTimer.stop(); + }); + } + return chunk; + } + + /** + * Returns the total amount of time the user code spends handling data. + */ + getTotalDurationMs() { + return ( + this.readTimer.getTotalDurationMs() + + this.transformTimer.getTotalDurationMs() + ); + } +} diff --git a/src/utils/createReadStreamInternal.ts b/src/utils/createReadStreamInternal.ts new file mode 100644 index 000000000..9158bdc03 --- /dev/null +++ b/src/utils/createReadStreamInternal.ts @@ -0,0 +1,440 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {GetRowsOptions} from '../table'; +import {Row} from '../row'; +import * as is from 'is'; +import {Filter, BoundData} from '../filter'; +import {Mutation} from '../mutation'; +import {AbortableDuplex} from '../index'; +import { + ChunkPushData, + ChunkPushLastScannedRowData, + ChunkTransformer, + DataEvent, +} from '../chunktransformer'; +import {TableUtils} from './table'; +import {Duplex, PassThrough, Transform} from 'stream'; +import { + MethodName, + StreamingState, +} from '../client-side-metrics/client-side-metrics-attributes'; +import {google} from '../../protos/protos'; +const pumpify = require('pumpify'); +import {grpc, ServiceError} from 'google-gax'; +import { + DEFAULT_BACKOFF_SETTINGS, + getNextDelay, + IGNORED_STATUS_CODES, + populateAttemptHeader, + RETRYABLE_STATUS_CODES, + TabularApiSurface, +} from '../tabular-api-surface'; +import {OperationMetricsCollector} from '../client-side-metrics/operation-metrics-collector'; + +/** + * Creates a readable stream of rows from a Bigtable table or authorized view. + * + * This internal method handles the core logic for streaming rows from a Bigtable + * table. It supports various filtering, limiting, and retry mechanisms. It can + * be used to create a stream for either a whole table or an authorized view. + * + * @param {Table} table The Table instance to read rows from. + * @param metricsCollector + * @param {GetRowsOptions} [opts] Optional configuration for the read operation. + * @param {boolean} [opts.decode=true] If set to `false` it will not decode + * Buffer values returned from Bigtable. + * @param {boolean} [opts.encoding] The encoding to use when converting + * Buffer values to a string. + * @param {string} [opts.end] End value for key range. + * @param {Filter} [opts.filter] Row filters allow you to + * both make advanced queries and format how the data is returned. + * @param {object} [opts.gaxOptions] Request configuration options, outlined + * here: https://googleapis.github.io/gax-nodejs/CallSettings.html. + * @param {string[]} [opts.keys] A list of row keys. + * @param {number} [opts.limit] Maximum number of rows to be returned. + * @param {string} [opts.prefix] Prefix that the row key must match. + * @param {string[]} [opts.prefixes] List of prefixes that a row key must + * match. + * @param {object[]} [opts.ranges] A list of key ranges. + * @param {string} [opts.start] Start value for key range. + * @returns {stream} A readable stream of {@link Row} objects. + * + */ +export function createReadStreamInternal( + table: TabularApiSurface, + metricsCollector: OperationMetricsCollector, + opts?: GetRowsOptions, +) { + const options = opts || {}; + const maxRetries = is.number(table.maxRetries) ? table.maxRetries! : 10; + let activeRequestStream: AbortableDuplex | null; + let rowKeys: string[]; + let filter: {} | null; + const rowsLimit = options.limit || 0; + const hasLimit = rowsLimit !== 0; + + const viewName = table.viewName; + + let numConsecutiveErrors = 0; + let numRequestsMade = 0; + let retryTimer: NodeJS.Timeout | null; + + rowKeys = options.keys || []; + + /* + The following line of code sets the timeout if it was provided while + creating the client. This will be used to determine if the client should + retry on DEADLINE_EXCEEDED errors. Eventually, this will be handled + downstream in google-gax. + */ + const timeout = + opts?.gaxOptions?.timeout || + (table?.bigtable?.options?.BigtableClient?.clientConfig?.interfaces && + table?.bigtable?.options?.BigtableClient?.clientConfig?.interfaces[ + 'google.bigtable.v2.Bigtable' + ]?.methods['ReadRows']?.timeout_millis); + const callTimeMillis = new Date().getTime(); + + const ranges = TableUtils.getRanges(options); + + // If rowKeys and ranges are both empty, the request is a full table scan. + // Add an empty range to simplify the resumption logic. + if (rowKeys.length === 0 && ranges.length === 0) { + ranges.push({}); + } + + if (options.filter) { + filter = Filter.parse(options.filter); + } + + let chunkTransformer: ChunkTransformer; + let rowStream: Duplex; + + let userCanceled = false; + // The key of the last row that was emitted by the per attempt pipeline + // Note: this must be updated from the operation level userStream to avoid referencing buffered rows that will be + // discarded in the per attempt subpipeline (rowStream) + let lastRowKey = ''; + let rowsRead = 0; + const userStream = new PassThrough({ + objectMode: true, + readableHighWaterMark: 0, // We need to disable readside buffering to allow for acceptable behavior when the end user cancels the stream early. + writableHighWaterMark: 0, // We need to disable writeside buffering because in nodejs 14 the call to _transform happens after write buffering. This creates problems for tracking the last seen row key. + transform(event, _encoding, callback) { + if (userCanceled) { + callback(); + return; + } + if (event.eventType === DataEvent.LAST_ROW_KEY_UPDATE) { + /** + * This code will run when receiving an event containing + * lastScannedRowKey data that the chunk transformer sent. When the + * chunk transformer gets lastScannedRowKey data, this code + * updates the lastRowKey to ensure row ids with the lastScannedRowKey + * aren't re-requested in retries. The lastRowKey needs to be updated + * here and not in the chunk transformer to ensure the update is + * queued behind all events that deliver data to the user stream + * first. + */ + lastRowKey = event.lastScannedRowKey; + callback(); + return; + } + const row = event; + if (TableUtils.lessThanOrEqualTo(row.id, lastRowKey)) { + /* + Sometimes duplicate rows reach this point. To avoid delivering + duplicate rows to the user, rows are thrown away if they don't exceed + the last row key. We can expect each row to reach this point and rows + are delivered in order so if the last row key equals or exceeds the + row id then we know data for this row has already reached this point + and been delivered to the user. In this case we want to throw the row + away and we do not want to deliver this row to the user again. + */ + callback(); + return; + } + lastRowKey = row.id; + rowsRead++; + callback(null, row); + }, + }); + + // The caller should be able to call userStream.end() to stop receiving + // more rows and cancel the stream prematurely. But also, the 'end' event + // will be emitted if the stream ended normally. To tell these two + // situations apart, we'll save the "original" end() function, and + // will call it on rowStream.on('end'). + const originalEnd = userStream.end.bind(userStream); + + // Taking care of this extra listener when piping and unpiping userStream: + const rowStreamPipe = (rowStream: Duplex, userStream: PassThrough) => { + rowStream.pipe(userStream, {end: false}); + rowStream.on('end', originalEnd); + }; + const rowStreamUnpipe = (rowStream: Duplex, userStream: PassThrough) => { + rowStream?.unpipe(userStream); + rowStream?.removeListener('end', originalEnd); + }; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + userStream.end = (chunk?: any, encoding?: any, cb?: () => void) => { + rowStreamUnpipe(rowStream, userStream); + userCanceled = true; + if (activeRequestStream) { + activeRequestStream.abort(); + } + if (retryTimer) { + clearTimeout(retryTimer); + } + return originalEnd(chunk, encoding, cb); + }; + metricsCollector.onOperationStart(); + const makeNewRequest = () => { + metricsCollector.onAttemptStart(); + + // Avoid cancelling an expired timer if user + // cancelled the stream in the middle of a retry + retryTimer = null; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + chunkTransformer = new ChunkTransformer({ + decode: options.decode, + } as any); + + // If the viewName is provided then request will be made for an + // authorized view. Otherwise, the request is made for a table. + const reqOpts = ( + viewName + ? { + authorizedViewName: `${table.name}/authorizedViews/${viewName}`, + appProfileId: table.bigtable.appProfileId, + } + : { + tableName: table.name, + appProfileId: table.bigtable.appProfileId, + } + ) as google.bigtable.v2.IReadRowsRequest; + + const retryOpts = { + currentRetryAttempt: 0, // was numConsecutiveErrors + // Handling retries in this client. Specify the retry options to + // make sure nothing is retried in retry-request. + noResponseRetries: 0, + shouldRetryFn: (_: any) => { + return false; + }, + }; + + if (lastRowKey) { + // Readjust and/or remove ranges based on previous valid row reads. + // Iterate backward since items may need to be removed. + for (let index = ranges.length - 1; index >= 0; index--) { + const range = ranges[index]; + const startValue = is.object(range.start) + ? (range.start as BoundData).value + : range.start; + const endValue = is.object(range.end) + ? (range.end as BoundData).value + : range.end; + const startKeyIsRead = + !startValue || + TableUtils.lessThanOrEqualTo( + startValue as string, + lastRowKey as string, + ); + const endKeyIsNotRead = + !endValue || + (endValue as Buffer).length === 0 || + TableUtils.lessThan(lastRowKey as string, endValue as string); + if (startKeyIsRead) { + if (endKeyIsNotRead) { + // EndKey is not read, reset the range to start from lastRowKey open + range.start = { + value: lastRowKey, + inclusive: false, + }; + } else { + // EndKey is read, remove this range + ranges.splice(index, 1); + } + } + } + + // Remove rowKeys already read. + rowKeys = rowKeys.filter(rowKey => + TableUtils.greaterThan(rowKey, lastRowKey as string), + ); + + // If there was a row limit in the original request and + // we've already read all the rows, end the stream and + // do not retry. + if (hasLimit && rowsLimit === rowsRead) { + userStream.end(); + return; + } + // If all the row keys and ranges are read, end the stream + // and do not retry. + if (rowKeys.length === 0 && ranges.length === 0) { + userStream.end(); + return; + } + } + + // Create the new reqOpts + reqOpts.rows = {}; + + // TODO: preprocess all the keys and ranges to Bytes + reqOpts.rows.rowKeys = rowKeys.map( + Mutation.convertToBytes, + ) as {} as Uint8Array[]; + + reqOpts.rows.rowRanges = ranges.map(range => + Filter.createRange( + range.start as BoundData, + range.end as BoundData, + 'Key', + ), + ); + + if (filter) { + reqOpts.filter = filter; + } + + if (hasLimit) { + reqOpts.rowsLimit = rowsLimit - rowsRead; + } + + const gaxOpts = populateAttemptHeader(numRequestsMade, options.gaxOptions); + + const requestStream = table.bigtable.request({ + client: 'BigtableClient', + method: 'readRows', + reqOpts, + gaxOpts, + retryOpts, + }); + + activeRequestStream = requestStream!; + + const toRowStream = new Transform({ + transform: (rowData: ChunkPushData, _, next) => { + if ( + userCanceled || + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (userStream as any)._writableState.ended + ) { + return next(); + } + if ( + (rowData as ChunkPushLastScannedRowData).eventType === + DataEvent.LAST_ROW_KEY_UPDATE + ) { + /** + * If the data is the chunk transformer communicating that the + * lastScannedRow was received then this message is passed along + * to the user stream to update the lastRowKey. + */ + next(null, rowData); + } else { + /** + * If the data is just regular rows being pushed from the + * chunk transformer then the rows are encoded so that they + * can be consumed by the user stream. + */ + const row = table.row((rowData as Row).key as string); + row.data = (rowData as Row).data; + next(null, row); + } + }, + objectMode: true, + }); + + rowStream = pumpify.obj([requestStream, chunkTransformer, toRowStream]); + + metricsCollector.handleStatusAndMetadata(requestStream); + rowStream + .on('error', (error: ServiceError) => { + rowStreamUnpipe(rowStream, userStream); + activeRequestStream = null; + if (IGNORED_STATUS_CODES.has(error.code)) { + // We ignore the `cancelled` "error", since we are the ones who cause + // it when the user calls `.abort()`. + userStream.end(); + metricsCollector.onOperationComplete(error.code); + return; + } + numConsecutiveErrors++; + numRequestsMade++; + if ( + numConsecutiveErrors <= maxRetries && + (RETRYABLE_STATUS_CODES.has(error.code) || isRstStreamError(error)) && + !(timeout && timeout < new Date().getTime() - callTimeMillis) + ) { + const backOffSettings = + options.gaxOptions?.retry?.backoffSettings || + DEFAULT_BACKOFF_SETTINGS; + const nextRetryDelay = getNextDelay( + numConsecutiveErrors, + backOffSettings, + ); + metricsCollector.onAttemptComplete(error.code); + retryTimer = setTimeout(makeNewRequest, nextRetryDelay); + } else { + if ( + !error.code && + error.message === 'The client has already been closed.' + ) { + // + // The TestReadRows_Generic_CloseClient conformance test requires + // a grpc code to be present when the client is closed. The + // appropriate code for a closed client is CANCELLED since the + // user actually cancelled the call by closing the client. + // + error.code = grpc.status.CANCELLED; + } + metricsCollector.onOperationComplete(error.code); + userStream.emit('error', error); + } + }) + .on('data', _ => { + // Reset error count after a successful read so the backoff + // time won't keep increasing when as stream had multiple errors + numConsecutiveErrors = 0; + metricsCollector.onResponse(); + }) + .on('end', () => { + activeRequestStream = null; + metricsCollector.onOperationComplete(grpc.status.OK); + }); + rowStreamPipe(rowStream, userStream); + }; + + makeNewRequest(); + return userStream; +} + +// Retry on "received rst stream" errors +export function isRstStreamError(error: ServiceError): boolean { + if (error.code === 13 && error.message) { + const error_message = (error.message || '').toLowerCase(); + return ( + error.code === 13 && + (error_message.includes('rst_stream') || + error_message.includes('rst stream')) + ); + } + return false; +} diff --git a/src/utils/getRowsInternal.ts b/src/utils/getRowsInternal.ts new file mode 100644 index 000000000..d106b6de9 --- /dev/null +++ b/src/utils/getRowsInternal.ts @@ -0,0 +1,66 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + GetRowsCallback, + GetRowsOptions, + GetRowsResponse, + TabularApiSurface, +} from '../tabular-api-surface'; +import {createReadStreamInternal} from './createReadStreamInternal'; +import {Row} from '../row'; +import {OperationMetricsCollector} from '../client-side-metrics/operation-metrics-collector'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const concat = require('concat-stream'); + +/** + * Get {@link Row} objects for the rows currently in your table. + * + * This method is not recommended for large datasets as it will buffer all rows + * before returning the results. Instead we recommend using the streaming API + * via {@link Table#createReadStream}. + * + * @param {TabularApiSurface} table The table instance to get rows from. + * @param metricsCollector + * @param {object} [optionsOrCallback] Configuration object. See + * {@link Table#createReadStream} for a complete list of options. + * @param {object} [optionsOrCallback.gaxOptions] Request configuration options, outlined + * here: https://googleapis.github.io/gax-nodejs/CallSettings.html. + * @param {function} cb The callback function. + * @param {?error} cb.err An error returned while making this request. + * @param {Row[]} cb.rows List of Row objects. + * + * @returns {Promise|void} Returns a promise that resolves with the rows if no callback is provided, otherwise calls the callback with the rows. + * + * @example include:samples/api-reference-doc-snippets/table.js + * region_tag:bigtable_api_get_rows + */ +export function getRowsInternal( + table: TabularApiSurface, + metricsCollector: OperationMetricsCollector, + optionsOrCallback?: GetRowsOptions | GetRowsCallback, + cb?: GetRowsCallback, +): void | Promise { + const callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; + const options = + typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + createReadStreamInternal(table, metricsCollector, options) + .on('error', callback) + .pipe( + concat((rows: Row[]) => { + callback(null, rows); + }), + ); +} diff --git a/src/utils/retry-options.ts b/src/utils/retry-options.ts new file mode 100644 index 000000000..7fbb7fcbd --- /dev/null +++ b/src/utils/retry-options.ts @@ -0,0 +1,77 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {BackoffSettings} from 'google-gax/build/src/gax'; +import {GoogleError, grpc, ServiceError} from 'google-gax'; + +export const RETRYABLE_STATUS_CODES = new Set([ + grpc.status.DEADLINE_EXCEEDED.valueOf(), + grpc.status.UNAVAILABLE.valueOf(), +]); +export const DEFAULT_RETRY_COUNT = 10; +export const IGNORED_STATUS_CODES = new Set([grpc.status.CANCELLED.valueOf()]); + +interface Violation { + type: string; + description: string; +} + +interface ViolationsList { + violations?: Violation[]; +} + +function containsPreparedQueryExpired(violations: ViolationsList[]): boolean { + if (!Array.isArray(violations) || violations.length === 0) { + return false; + } + + for (const obj of violations) { + if (obj.violations && Array.isArray(obj.violations)) { + for (const violation of obj.violations) { + if (violation.type === 'PREPARED_QUERY_EXPIRED') { + return true; + } + } + } + } + + return false; +} + +/** + * Checks if the error is an "expired query plan" error. + * For more info refer to the ExecuteQueryStateMachine + * @param error + */ +export const isExpiredQueryError = ( + error: GoogleError | ServiceError, +): boolean => { + if ( + error.code === grpc.status.FAILED_PRECONDITION && + Object.prototype.hasOwnProperty.call(error, 'statusDetails') + ) { + const statusDetails = (error as GoogleError) + .statusDetails as ViolationsList[]; + return containsPreparedQueryExpired(statusDetails); + } + return false; +}; + +/** + * Checks if the error is a cancel error - caused by aborting the stream. + * @param error + */ +export function isCancelError(error: ServiceError) { + return error.code === grpc.status.CANCELLED.valueOf(); +} diff --git a/system-test/bigtable.ts b/system-test/bigtable.ts index 52c1369f2..ebcd3c02d 100644 --- a/system-test/bigtable.ts +++ b/system-test/bigtable.ts @@ -25,6 +25,7 @@ import { Instance, InstanceOptions, MutateOptions, + SqlTypes, } from '../src'; import {Mutation} from '../src/mutation'; import {AppProfile} from '../src/app-profile.js'; @@ -37,6 +38,7 @@ import {RawFilter} from '../src/filter'; import {generateId, PREFIX} from './common'; import {BigtableTableAdminClient} from '../src/v2'; import {ServiceError} from 'google-gax'; +import {BigtableDate, QueryResultRow} from '../src/execute-query/values'; describe('Bigtable', () => { const bigtable = new Bigtable(); @@ -788,6 +790,100 @@ describe('Bigtable', () => { }); describe('fetching data', () => { + it('should execute a query', async () => { + const [preparedStatement] = await INSTANCE.prepareStatement({ + query: + 'SELECT @stringParam AS strCol, @bytesParam as bytesCol, @int64Param AS intCol, @doubleParam AS doubleCol,\n' + + '@floatParam AS floatCol, @boolParam AS boolCol, @tsParam AS tsCol, @dateParam AS dateCol,\n' + + '@byteArrayParam AS byteArrayCol, @stringArrayParam AS stringArrayCol, @intArrayParam AS intArrayCol,\n' + + '@floatArrayParam AS floatArrayCol, @doubleArrayParam AS doubleArrayCol, @boolArrayParam AS boolArrayCol,\n' + + '@tsArrayParam AS tsArrayCol, @dateArrayParam AS dateArrayCol', + parameterTypes: { + bytesParam: SqlTypes.Bytes(), + intArrayParam: SqlTypes.Array(SqlTypes.Int64()), + dateArrayParam: SqlTypes.Array(SqlTypes.Date()), + stringParam: SqlTypes.String(), + byteArrayParam: SqlTypes.Array(SqlTypes.Bytes()), + doubleArrayParam: SqlTypes.Array(SqlTypes.Float64()), + boolArrayParam: SqlTypes.Array(SqlTypes.Bool()), + doubleParam: SqlTypes.Float64(), + floatParam: SqlTypes.Float32(), + dateParam: SqlTypes.Date(), + floatArrayParam: SqlTypes.Array(SqlTypes.Float32()), + tsArrayParam: SqlTypes.Array(SqlTypes.Timestamp()), + int64Param: SqlTypes.Int64(), + boolParam: SqlTypes.Bool(), + tsParam: SqlTypes.Timestamp(), + stringArrayParam: SqlTypes.Array(SqlTypes.String()), + }, + }); + const params = { + bytesParam: Buffer.from('test'), + intArrayParam: [BigInt(1), BigInt(2), BigInt(3)], + dateArrayParam: [ + new BigtableDate(2025, 5, 14), + new BigtableDate(2025, 5, 13), + ], + stringParam: 'test', + byteArrayParam: [Buffer.from('test')], + doubleArrayParam: [1.0, 2.0, 3.0], + boolArrayParam: [true, false], + doubleParam: 1.0, + floatParam: 1.0, + dateParam: new BigtableDate(2025, 5, 14), + floatArrayParam: [1.0, 2.0, 3.0], + tsArrayParam: [ + new PreciseDate(Date.now()), + new PreciseDate(Date.now()), + ], + int64Param: BigInt(123), + boolParam: true, + tsParam: new PreciseDate(Date.now()), + stringArrayParam: ['test', 'test'], + }; + const [rows] = (await INSTANCE.executeQuery({ + preparedStatement, + parameters: params, + })) as any as [Row[]]; + assert(rows[0] instanceof QueryResultRow); + assert.deepStrictEqual(rows[0].get('strCol'), params.stringParam); + assert.deepStrictEqual(rows[0].get('bytesCol'), params.bytesParam); + assert.deepStrictEqual(rows[0].get('intCol'), params.int64Param); + assert.deepStrictEqual(rows[0].get('doubleCol'), params.doubleParam); + assert.deepStrictEqual(rows[0].get('floatCol'), params.floatParam); + assert.deepStrictEqual(rows[0].get('boolCol'), params.boolParam); + assert.deepStrictEqual(rows[0].get('tsCol'), params.tsParam); + assert.deepStrictEqual(rows[0].get('dateCol'), params.dateParam); + assert.deepStrictEqual( + rows[0].get('byteArrayCol'), + params.byteArrayParam, + ); + assert.deepStrictEqual( + rows[0].get('stringArrayCol'), + params.stringArrayParam, + ); + assert.deepStrictEqual( + rows[0].get('intArrayCol'), + params.intArrayParam, + ); + assert.deepStrictEqual( + rows[0].get('floatArrayCol'), + params.floatArrayParam, + ); + assert.deepStrictEqual( + rows[0].get('doubleArrayCol'), + params.doubleArrayParam, + ); + assert.deepStrictEqual( + rows[0].get('boolArrayCol'), + params.boolArrayParam, + ); + assert.deepStrictEqual(rows[0].get('tsArrayCol'), params.tsArrayParam); + assert.deepStrictEqual( + rows[0].get('dateArrayCol'), + params.dateArrayParam, + ); + }); it('should get rows', async () => { const [rows] = await TABLE.getRows(); assert.strictEqual(rows.length, 4); diff --git a/system-test/client-side-metrics-setup-table.ts b/system-test/client-side-metrics-setup-table.ts new file mode 100644 index 000000000..003d9b3bf --- /dev/null +++ b/system-test/client-side-metrics-setup-table.ts @@ -0,0 +1,71 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {Bigtable} from '../src'; +export async function setupBigtable( + bigtable: Bigtable, + columnFamilyId: string, + instanceId: string, + tableIds: string[], +) { + const instance = bigtable.instance(instanceId); + const [instanceInfo] = await instance.exists(); + while (!instanceInfo) { + const [, operation] = await instance.create({ + clusters: { + id: 'fake-cluster3', + location: 'us-west1-c', + nodes: 1, + }, + }); + await operation.promise(); + /** + * For whatever reason, even after waiting for an operation.promise() + * call to complete, the instance still doesn't seem to be ready yet so + * we do another check to ensure the instance is ready. + */ + const [instanceInfoAgain] = await instance.exists(); + if (instanceInfoAgain) { + break; + } + } + const tables = tableIds.map(tableId => instance.table(tableId)); + for (const currentTable of tables) { + const [tableExists] = await currentTable.exists(); + if (!tableExists) { + await currentTable.create({families: [columnFamilyId]}); // Create column family + } else { + // Check if column family exists and create it if not. + const [families] = await currentTable.getFamilies(); + + if ( + !families.some((family: {id: string}) => family.id === columnFamilyId) + ) { + await currentTable.createFamily(columnFamilyId); + } + } + // Add some data so that a firstResponseLatency is recorded. + await currentTable.insert([ + { + key: 'rowId', + data: { + [columnFamilyId]: { + gwashington: 1, + tjefferson: 1, + }, + }, + }, + ]); + } +} diff --git a/system-test/client-side-metrics.ts b/system-test/client-side-metrics.ts new file mode 100644 index 000000000..3d6f9d450 --- /dev/null +++ b/system-test/client-side-metrics.ts @@ -0,0 +1,679 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {after, before, describe, it} from 'mocha'; +import * as mocha from 'mocha'; +import { + CloudMonitoringExporter, + ExportResult, +} from '../src/client-side-metrics/exporter'; +import {ResourceMetrics} from '@opentelemetry/sdk-metrics'; +import * as assert from 'assert'; +import {GCPMetricsHandler} from '../src/client-side-metrics/gcp-metrics-handler'; +import * as proxyquire from 'proxyquire'; +import {Bigtable} from '../src'; +import {Row} from '../src/row'; +import {setupBigtable} from './client-side-metrics-setup-table'; +import {TestMetricsHandler} from '../test-common/test-metrics-handler'; +import { + OnAttemptCompleteData, + OnOperationCompleteData, +} from '../src/client-side-metrics/metrics-handler'; +import {ClientOptions} from 'google-gax'; +import {ClientSideMetricsConfigManager} from '../src/client-side-metrics/metrics-config-manager'; +import {MetricServiceClient} from '@google-cloud/monitoring'; + +const SECOND_PROJECT_ID = 'cfdb-sdk-node-tests'; + +function getFakeBigtable( + projectId: string, + metricsHandlerClass: typeof GCPMetricsHandler | typeof TestMetricsHandler, + apiEndpoint?: string, +) { + // Normally the options passed into the client are passed into the metrics + // handler so when we mock out the metrics handler, it really should have + // the same options that are passed into the client. + const options = { + projectId, + apiEndpoint, + }; + const metricHandler = new metricsHandlerClass(options); + const newClient = new Bigtable(options); + newClient._metricsConfigManager = new ClientSideMetricsConfigManager([ + metricHandler, + ]); + return newClient; +} + +function getHandlerFromExporter(Exporter: typeof CloudMonitoringExporter) { + return proxyquire('../src/client-side-metrics/gcp-metrics-handler.js', { + './exporter': { + CloudMonitoringExporter: Exporter, + }, + }).GCPMetricsHandler; +} + +function readRowsAssertionCheck( + projectId: string, + requestsHandled: (OnOperationCompleteData | OnAttemptCompleteData)[] = [], + method: string, + streaming: string, +) { + assert.strictEqual(requestsHandled.length, 4); + const firstRequest = requestsHandled[0] as any; + // We would expect these parameters to be different every time so delete + // them from the comparison after checking they exist. + assert(firstRequest.attemptLatency); + assert(firstRequest.serverLatency); + delete firstRequest.attemptLatency; + delete firstRequest.serverLatency; + delete firstRequest.metricsCollectorData.appProfileId; + assert.deepStrictEqual(firstRequest, { + connectivityErrorCount: 0, + streaming, + status: '0', + client_name: 'nodejs-bigtable', + metricsCollectorData: { + instanceId: 'emulator-test-instance', + table: 'my-table', + cluster: 'fake-cluster3', + zone: 'us-west1-c', + method, + }, + projectId, + }); + const secondRequest = requestsHandled[1] as any; + // We would expect these parameters to be different every time so delete + // them from the comparison after checking they exist. + assert(secondRequest.operationLatency); + assert(secondRequest.firstResponseLatency); + assert(secondRequest.applicationLatencies); + delete secondRequest.operationLatency; + delete secondRequest.firstResponseLatency; + delete secondRequest.applicationLatencies; + delete secondRequest.metricsCollectorData.appProfileId; + assert.deepStrictEqual(secondRequest, { + status: '0', + streaming, + client_name: 'nodejs-bigtable', + metricsCollectorData: { + instanceId: 'emulator-test-instance', + cluster: 'fake-cluster3', + zone: 'us-west1-c', + method, + table: 'my-table', + }, + projectId, + retryCount: 0, + }); + // We would expect these parameters to be different every time so delete + // them from the comparison after checking they exist. + const thirdRequest = requestsHandled[2] as any; + assert(thirdRequest.attemptLatency); + assert(thirdRequest.serverLatency); + delete thirdRequest.attemptLatency; + delete thirdRequest.serverLatency; + delete thirdRequest.metricsCollectorData.appProfileId; + assert.deepStrictEqual(thirdRequest, { + connectivityErrorCount: 0, + streaming, + status: '0', + client_name: 'nodejs-bigtable', + metricsCollectorData: { + instanceId: 'emulator-test-instance', + table: 'my-table2', + cluster: 'fake-cluster3', + zone: 'us-west1-c', + method, + }, + projectId, + }); + const fourthRequest = requestsHandled[3] as any; + // We would expect these parameters to be different every time so delete + // them from the comparison after checking they exist. + assert(fourthRequest.operationLatency); + assert(fourthRequest.firstResponseLatency); + assert(fourthRequest.applicationLatencies); + delete fourthRequest.operationLatency; + delete fourthRequest.firstResponseLatency; + delete fourthRequest.applicationLatencies; + delete fourthRequest.metricsCollectorData.appProfileId; + assert.deepStrictEqual(fourthRequest, { + status: '0', + streaming, + client_name: 'nodejs-bigtable', + metricsCollectorData: { + instanceId: 'emulator-test-instance', + cluster: 'fake-cluster3', + zone: 'us-west1-c', + method, + table: 'my-table2', + }, + projectId, + retryCount: 0, + }); +} + +function checkMultiRowCall( + projectId: string, + requestsHandled: (OnOperationCompleteData | OnAttemptCompleteData)[] = [], +) { + readRowsAssertionCheck( + projectId, + requestsHandled, + 'Bigtable.ReadRows', + 'true', + ); +} + +function checkSingleRowCall( + projectId: string, + requestsHandled: (OnOperationCompleteData | OnAttemptCompleteData)[] = [], +) { + readRowsAssertionCheck( + projectId, + requestsHandled, + 'Bigtable.ReadRow', + 'false', + ); +} + +/** + * Checks if metrics have been published to Google Cloud Monitoring. + * + * This asynchronous function queries Google Cloud Monitoring to verify + * that the expected metrics from the Bigtable client library have been + * successfully published. It constructs a `MetricServiceClient` to + * interact with the Cloud Monitoring API and retrieves time series data + * for a predefined set of metrics. The test passes if time series data + * is found for each of the specified metrics within a defined time + * interval. + * + * @param {string} projectId The Google Cloud project ID where metrics are + * expected to be published. + * @throws {Error} If no time series data is found for any of the specified + * metrics, indicating that the metrics were not successfully published to + * Cloud Monitoring. + */ +async function checkForPublishedMetrics(projectId: string) { + const monitoringClient = new MetricServiceClient({projectId}); // Correct instantiation + const now = Math.floor(Date.now() / 1000); + const filters = [ + 'metric.type="bigtable.googleapis.com/client/attempt_latencies"', + 'metric.type="bigtable.googleapis.com/client/operation_latencies"', + 'metric.type="bigtable.googleapis.com/client/retry_count"', + 'metric.type="bigtable.googleapis.com/client/server_latencies"', + 'metric.type="bigtable.googleapis.com/client/first_response_latencies"', + ]; + for (let i = 0; i < filters.length; i++) { + const filter = filters[i]; + const [series] = await monitoringClient.listTimeSeries({ + name: `projects/${projectId}`, + interval: { + endTime: { + seconds: now, + nanos: 0, + }, + startTime: { + seconds: now - 1000 * 60 * 60 * 24, + nanos: 0, + }, + }, + filter, + }); + assert(series.length > 0); + } +} + +describe('Bigtable/ClientSideMetrics', () => { + const instanceId1 = 'emulator-test-instance'; + const instanceId2 = 'emulator-test-instance2'; + const tableId1 = 'my-table'; + const tableId2 = 'my-table2'; + const columnFamilyId = 'cf1'; + let defaultProjectId: string; + + before(async () => { + const bigtable = new Bigtable(); + for (const instanceId of [instanceId1, instanceId2]) { + await setupBigtable(bigtable, columnFamilyId, instanceId, [ + tableId1, + tableId2, + ]); + } + defaultProjectId = await new Promise((resolve, reject) => { + bigtable.getProjectId_((err: Error | null, projectId?: string) => { + if (err) { + reject(err); + } else { + resolve(projectId as string); + } + }); + }); + }); + + after(async () => { + const bigtable = new Bigtable(); + try { + // If the instance has been deleted already by another source, we don't + // want this after hook to block the continuous integration pipeline. + const instance = bigtable.instance(instanceId1); + await instance.delete({}); + } catch (e) { + console.warn('The instance has been deleted already'); + } + try { + // If the instance has been deleted already by another source, we don't + // want this after hook to block the continuous integration pipeline. + const instance = bigtable.instance(instanceId2); + await instance.delete({}); + } catch (e) { + console.warn('The instance has been deleted already'); + } + }); + + describe('Bigtable/ClientSideMetricsToGCM', () => { + // This test suite ensures that for each test all the export calls are + // successful even when multiple instances and tables are created. + async function mockBigtable( + projectId: string, + done: mocha.Done, + apiEndpoint?: string, + ) { + /* + The exporter is called every x seconds, but we only want to test the value + it receives once. Since done cannot be called multiple times in mocha, + exported variable ensures we only test the value export receives one time. + */ + let exported = false; + /* + We need to create a timeout here because if we don't then mocha shuts down + the test as it is sleeping before the GCPMetricsHandler has a chance to + export the data. + */ + const timeout = setTimeout(() => { + if (!exported) { + done( + new Error( + 'The exporters have not completed yet and the timeout is over', + ), + ); + } + }, 120000); + + class TestExporter extends CloudMonitoringExporter { + constructor(options: ClientOptions) { + super(options); + } + + async export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void, + ): Promise { + try { + await super.export(metrics, (result: ExportResult) => { + if (!exported) { + exported = true; + try { + clearTimeout(timeout); + // The test passes when the code is 0 because that means the + // result from calling export was successful. + assert.strictEqual(result.code, 0); + resultCallback({code: 0}); + void checkForPublishedMetrics(projectId) + .then(() => { + done(); + }) + .catch(err => { + done(new Error('Metrics have not been published')); + done(err); + }); + } catch (error) { + // The code here isn't 0 so we report the original error to the mocha test runner. + done(result); + done(error); + } + } else { + resultCallback({code: 0}); + } + }); + } catch (error) { + done(error); + } + } + } + + return getFakeBigtable( + projectId, + getHandlerFromExporter(TestExporter), + apiEndpoint, + ); + } + + it('should send the metrics to Google Cloud Monitoring for a ReadRows call', done => { + (async () => { + try { + const bigtable = await mockBigtable(defaultProjectId, done); + for (const instanceId of [instanceId1, instanceId2]) { + await setupBigtable(bigtable, columnFamilyId, instanceId, [ + tableId1, + tableId2, + ]); + const instance = bigtable.instance(instanceId); + const table = instance.table(tableId1); + await table.getRows(); + const table2 = instance.table(tableId2); + await table2.getRows(); + } + } catch (e) { + done(new Error('An error occurred while running the script')); + done(e); + } + })().catch(err => { + throw err; + }); + }); + it('should send the metrics to Google Cloud Monitoring for a custom endpoint', done => { + (async () => { + try { + const bigtable = await mockBigtable( + defaultProjectId, + done, + 'bogus-endpoint', + ); + const instance = bigtable.instance(instanceId1); + const table = instance.table(tableId1); + try { + // This call will fail because we are trying to hit a bogus endpoint. + // The idea here is that we just want to record at least one metric + // so that the exporter gets executed. + await table.getRows(); + } catch (e: unknown) { + // Try blocks just need a catch/finally block. + } + } catch (e) { + done(new Error('An error occurred while running the script')); + done(e); + } + })().catch(err => { + throw err; + }); + }); + it('should send the metrics to Google Cloud Monitoring for a ReadRows call with a second project', done => { + (async () => { + try { + // This is the second project the test is configured to work with: + const projectId = SECOND_PROJECT_ID; + const bigtable = await mockBigtable(projectId, done); + for (const instanceId of [instanceId1, instanceId2]) { + await setupBigtable(bigtable, columnFamilyId, instanceId, [ + tableId1, + tableId2, + ]); + const instance = bigtable.instance(instanceId); + const table = instance.table(tableId1); + await table.getRows(); + const table2 = instance.table(tableId2); + await table2.getRows(); + } + } catch (e) { + done(new Error('An error occurred while running the script')); + done(e); + } + })().catch(err => { + throw err; + }); + }); + }); + describe('Bigtable/ClientSideMetricsToGCMTimeout', () => { + // This test suite simulates a situation where the user creates multiple + // clients and ensures that the exporter doesn't produce any errors even + // when multiple clients are attempting an export. + async function mockBigtable( + projectId: string, + done: mocha.Done, + onExportSuccess?: () => void, + ) { + class TestExporter extends CloudMonitoringExporter { + constructor(options: ClientOptions) { + super(options); + } + + async export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void, + ): Promise { + try { + await super.export(metrics, async (result: ExportResult) => { + try { + // The code is expected to be 0 because the + // result from calling export was successful. + assert.strictEqual(result.code, 0); + resultCallback({code: 0}); + if (onExportSuccess) { + onExportSuccess(); + } + } catch (error) { + // The code here isn't 0 so we report the original error to the + // mocha test runner. + // The test fails here because it means that an export was + // unsuccessful. + done(result); + done(error); + resultCallback({code: 0}); + } + }); + } catch (error) { + done(error); + resultCallback({code: 0}); + } + } + } + + /* + Below we mock out the table so that it sends the metrics to a test exporter + that will still send the metrics to Google Cloud Monitoring, but then also + ensure the export was successful and pass the test with code 0 if it is + successful. + */ + return getFakeBigtable(projectId, getHandlerFromExporter(TestExporter)); + } + + it('should send the metrics to Google Cloud Monitoring for a ReadRows call', done => { + let testFinished = false; + /* + We need to create a timeout here because if we don't then mocha shuts down + the test as it is sleeping before the GCPMetricsHandler has a chance to + export the data. When the timeout is finished, if there were no export + errors then the test passes. + */ + setTimeout(() => { + testFinished = true; + done(); + }, 120000); + (async () => { + try { + const bigtable1 = await mockBigtable(defaultProjectId, done); + const bigtable2 = await mockBigtable(defaultProjectId, done); + for (const bigtable of [bigtable1, bigtable2]) { + for (const instanceId of [instanceId1, instanceId2]) { + await setupBigtable(bigtable, columnFamilyId, instanceId, [ + tableId1, + tableId2, + ]); + const instance = bigtable.instance(instanceId); + const table = instance.table(tableId1); + await table.getRows(); + const table2 = instance.table(tableId2); + await table2.getRows(); + } + } + } catch (e) { + done(new Error('An error occurred while running the script')); + done(e); + } + })().catch(err => { + throw err; + }); + }); + it('should send the metrics to Google Cloud Monitoring for a ReadRows call with thirty clients', done => { + /* + We need to create a timeout here because if we don't then mocha shuts down + the test as it is sleeping before the GCPMetricsHandler has a chance to + export the data. When the timeout is finished, if there were no export + errors then the test passes. + */ + const testTimeout = setTimeout(() => { + done(new Error('The test timed out')); + }, 480000); + let testComplete = false; + const numClients = 30; + (async () => { + try { + const bigtableList = []; + const completedSet = new Set(); + for ( + let bigtableCount = 0; + bigtableCount < numClients; + bigtableCount++ + ) { + const currentCount = bigtableCount; + const onExportSuccess = () => { + completedSet.add(currentCount); + if (completedSet.size === numClients) { + // If every client has completed the export then pass the test. + clearTimeout(testTimeout); + if (!testComplete) { + testComplete = true; + done(); + } + } + }; + bigtableList.push( + await mockBigtable(defaultProjectId, done, onExportSuccess), + ); + } + for (const bigtable of bigtableList) { + for (const instanceId of [instanceId1, instanceId2]) { + await setupBigtable(bigtable, columnFamilyId, instanceId, [ + tableId1, + tableId2, + ]); + const instance = bigtable.instance(instanceId); + const table = instance.table(tableId1); + await table.getRows(); + const table2 = instance.table(tableId2); + await table2.getRows(); + } + } + } catch (e) { + done(e); + done(new Error('An error occurred while running the script')); + } + })().catch(err => { + throw err; + }); + }); + }); + describe('Bigtable/ClientSideMetricsToMetricsHandler', () => { + async function mockBigtable( + projectId: string, + done: mocha.Done, + checkFn: ( + projectId: string, + requestsHandled: (OnOperationCompleteData | OnAttemptCompleteData)[], + ) => void, + ) { + let handlerRequestCount = 0; + class TestGCPMetricsHandler extends TestMetricsHandler { + projectId = projectId; + onOperationComplete(data: OnOperationCompleteData) { + handlerRequestCount++; + try { + super.onOperationComplete(data); + if (handlerRequestCount > 1) { + checkFn(projectId, this.requestsHandled); + done(); + } + } catch (e) { + done(e); + } + } + } + + const bigtable = getFakeBigtable(projectId, TestGCPMetricsHandler); + await setupBigtable(bigtable, columnFamilyId, instanceId1, [ + tableId1, + tableId2, + ]); + return bigtable; + } + + it('should send the metrics to the metrics handler for a ReadRows call', done => { + (async () => { + const bigtable = await mockBigtable( + defaultProjectId, + done, + checkMultiRowCall, + ); + const instance = bigtable.instance(instanceId1); + const table = instance.table(tableId1); + await table.getRows(); + const table2 = instance.table(tableId2); + await table2.getRows(); + })().catch(err => { + throw err; + }); + }); + it('should pass the projectId to the metrics handler properly', done => { + (async () => { + const bigtable = await mockBigtable( + defaultProjectId, + done, + checkMultiRowCall, + ); + const instance = bigtable.instance(instanceId1); + const table = instance.table(tableId1); + await table.getRows(); + const table2 = instance.table(tableId2); + await table2.getRows(); + })().catch(err => { + throw err; + }); + }); + it('should send the metrics to the metrics handler for a single row read', done => { + (async () => { + try { + const projectId = SECOND_PROJECT_ID; + const bigtable = await mockBigtable( + projectId, + done, + checkSingleRowCall, + ); + const instance = bigtable.instance(instanceId1); + const table = instance.table(tableId1); + const row = new Row(table, 'rowId'); + await row.get(); + const table2 = instance.table(tableId2); + const row2 = new Row(table2, 'rowId'); + await row2.get(); + } catch (e) { + done(e); + } + })().catch(err => { + throw err; + }); + }); + }); +}); diff --git a/system-test/cloud-monitoring-exporter.ts b/system-test/cloud-monitoring-exporter.ts index cbee08e21..f484c8e05 100644 --- a/system-test/cloud-monitoring-exporter.ts +++ b/system-test/cloud-monitoring-exporter.ts @@ -70,11 +70,15 @@ describe('Bigtable/CloudMonitoringExporter', () => { }); }); } - const exporter = new CloudMonitoringExporter(); - exporter.export( - transformedExportInput as unknown as ResourceMetrics, - resultCallback, - ); + const exporter = new CloudMonitoringExporter({}); // Pass empty object as options + exporter + .export( + transformedExportInput as unknown as ResourceMetrics, + resultCallback, + ) + .catch(err => { + throw err; + }); })().catch(err => { throw err; }); diff --git a/system-test/data/read-rows-retry-test.json b/system-test/data/read-rows-retry-test.json index aad5178c6..d97291a2f 100644 --- a/system-test/data/read-rows-retry-test.json +++ b/system-test/data/read-rows-retry-test.json @@ -149,7 +149,7 @@ { "startKeyClosed": "a", "endKeyClosed": "c" }, { "startKeyClosed": "x", "endKeyClosed": "z" } ] }, - { "rowKeys": [], + { "rowKeys": [], "rowRanges": [ { "startKeyClosed": "x", "endKeyClosed": "z" } ] } ], "responses": [ diff --git a/system-test/gcp-metrics-handler.ts b/system-test/gcp-metrics-handler.ts index 93025d0a8..2608d6e19 100644 --- a/system-test/gcp-metrics-handler.ts +++ b/system-test/gcp-metrics-handler.ts @@ -13,7 +13,6 @@ // limitations under the License. import {describe} from 'mocha'; -import {GCPMetricsHandler} from '../src/client-side-metrics/gcp-metrics-handler'; import {expectedRequestsHandled} from '../test-common/metrics-handler-fixture'; import { OnAttemptCompleteData, @@ -26,8 +25,20 @@ import { import {Bigtable} from '../src'; import {ResourceMetrics} from '@opentelemetry/sdk-metrics'; import * as assert from 'assert'; -import {expectedOtelHundredExportInputs} from '../test-common/expected-otel-export-input'; -import {replaceTimestamps} from '../test-common/replace-timestamps'; +import {ClientOptions} from 'google-gax'; +import * as proxyquire from 'proxyquire'; + +function getHandler(Exporter: typeof CloudMonitoringExporter) { + const FakeCGPMetricsHandler = proxyquire( + '../src/client-side-metrics/gcp-metrics-handler.js', + { + './exporter': { + CloudMonitoringExporter: Exporter, + }, + }, + ).GCPMetricsHandler; + return new FakeCGPMetricsHandler(); +} describe('Bigtable/GCPMetricsHandler', () => { it('Should export a value to the GCPMetricsHandler', done => { @@ -64,13 +75,17 @@ describe('Bigtable/GCPMetricsHandler', () => { }; } class MockExporter extends CloudMonitoringExporter { - export( + constructor(options: ClientOptions) { + super(options); + } + + async export( metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void, - ): void { + ): Promise { const testResultCallback = getTestResultCallback(resultCallback); if (!exported) { - super.export(metrics, testResultCallback); + await super.export(metrics, testResultCallback); } else { resultCallback({code: 0}); } @@ -87,10 +102,7 @@ describe('Bigtable/GCPMetricsHandler', () => { } }); }); - // projectToInstruments argument is set to {} because we want a fresh - // instrument stack each time this test is run. - GCPMetricsHandler.instrumentsForProject = {}; - const handler = new GCPMetricsHandler(new MockExporter({projectId})); + const handler = getHandler(MockExporter); const transformedRequestsHandled = JSON.parse( JSON.stringify(expectedRequestsHandled).replace( /my-project/g, @@ -105,7 +117,7 @@ describe('Bigtable/GCPMetricsHandler', () => { } } })().catch(err => { - throw err; + done(err); }); }); it('Should export a value to two GCPMetricsHandlers', done => { @@ -152,14 +164,18 @@ describe('Bigtable/GCPMetricsHandler', () => { }; } class MockExporter extends CloudMonitoringExporter { - export( + constructor(options: ClientOptions) { + super(options); + } + + async export( metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void, - ): void { + ): Promise { if (exportedCount < 1) { // The code below uses the test callback to ensure the export was successful. const testResultCallback = getTestResultCallback(resultCallback); - super.export(metrics, testResultCallback); + await super.export(metrics, testResultCallback); } else { // After the test is complete the periodic exporter may still be // running in which case we don't want to do any checks. We just @@ -180,11 +196,8 @@ describe('Bigtable/GCPMetricsHandler', () => { } }); }); - // projectToInstruments argument is set to {} because we want a fresh - // instrument stack each time this test is run. - GCPMetricsHandler.instrumentsForProject = {}; - const handler = new GCPMetricsHandler(new MockExporter({projectId})); - const handler2 = new GCPMetricsHandler(new MockExporter({projectId})); + const handler = getHandler(MockExporter); + const handler2 = handler; const transformedRequestsHandled = JSON.parse( JSON.stringify(expectedRequestsHandled).replace( /my-project/g, @@ -206,142 +219,7 @@ describe('Bigtable/GCPMetricsHandler', () => { } } })().catch(err => { - throw err; - }); - }); - it('Should export a value to a hundred GCPMetricsHandlers', done => { - // This test ensures that when we create multiple GCPMetricsHandlers much like - // what we would be doing when calling readRows on separate tables that - // the data doesn't store duplicates in the same place and export twice as - // much data as it should. - (async () => { - /* - We need to create a timeout here because if we don't then mocha shuts down - the test as it is sleeping before the GCPMetricsHandler has a chance to - export the data. - */ - const timeout = setTimeout(() => { - done(new Error('The export never happened')); - }, 120000); - /* - The exporter is called every x seconds, but we only want to test the value - it receives once. Since done cannot be called multiple times in mocha, - exported variable ensures we only test the value export receives one time. - */ - let exportedCount = 0; - function getTestResultCallback( - resultCallback: (result: ExportResult) => void, - ) { - return (result: ExportResult) => { - exportedCount++; - try { - assert.strictEqual(result.code, 0); - } catch (error) { - // Code isn't 0 so report the original error. - done(result); - done(error); - } - if (exportedCount === 1) { - // We are expecting one call to an exporter. - clearTimeout(timeout); - done(); - } - // The resultCallback needs to be called to end the exporter operation - // so that the test shuts down in mocha. - resultCallback({code: 0}); - }; - } - class MockExporter extends CloudMonitoringExporter { - export( - metrics: ResourceMetrics, - resultCallback: (result: ExportResult) => void, - ): void { - if (exportedCount < 1) { - try { - // This code block ensures the metrics are correct. Mainly, the metrics - // shouldn't contain two copies of the data. It should only contain - // one. - // - // For this test since we are still writing a time series with - // metrics variable we don't want to modify the metrics variable - // to have artificial times because then sending the data to the - // metric service client will fail. Therefore, we must make a copy - // of the metrics and use that. - const parsedExportInput: ResourceMetrics = JSON.parse( - JSON.stringify(metrics), - ); - replaceTimestamps( - parsedExportInput as unknown as typeof expectedOtelHundredExportInputs, - [123, 789], - [456, 789], - ); - assert.deepStrictEqual( - parsedExportInput.scopeMetrics[0].metrics.length, - expectedOtelHundredExportInputs.scopeMetrics[0].metrics.length, - ); - for ( - let index = 0; - index < parsedExportInput.scopeMetrics[0].metrics.length; - index++ - ) { - // We need to compare pointwise because mocha truncates to an 8192 character limit. - assert.deepStrictEqual( - parsedExportInput.scopeMetrics[0].metrics[index], - expectedOtelHundredExportInputs.scopeMetrics[0].metrics[ - index - ], - ); - } - } catch (e) { - // The error needs to be caught so it can be reported to the mocha - // test runner. - done(e); - } - // The code below uses the test callback to ensure the export was successful. - const testResultCallback = getTestResultCallback(resultCallback); - super.export(metrics, testResultCallback); - } else { - // After the test is complete the periodic exporter may still be - // running in which case we don't want to do any checks. We just - // want to call the resultCallback so that there are no hanging - // threads. - resultCallback({code: 0}); - } - } - } - - const bigtable = new Bigtable(); - const projectId: string = await new Promise((resolve, reject) => { - bigtable.getProjectId_((err, projectId) => { - if (err) { - reject(err); - } else { - resolve(projectId as string); - } - }); - }); - const transformedRequestsHandled = JSON.parse( - JSON.stringify(expectedRequestsHandled).replace( - /my-project/g, - projectId, - ), - ); - const handlers = []; - // projectToInstruments argument is set to {} because we want a fresh - // instrument stack each time this test is run. - GCPMetricsHandler.instrumentsForProject = {}; - for (let i = 0; i < 100; i++) { - handlers.push(new GCPMetricsHandler(new MockExporter({projectId}))); - for (const request of transformedRequestsHandled) { - if (request.attemptLatency) { - handlers[i].onAttemptComplete(request as OnAttemptCompleteData); - } else { - handlers[i].onOperationComplete(request as OnOperationCompleteData); - } - } - } - })().catch(err => { - throw err; + done(err); }); }); it('Should write two duplicate points inserted into the metrics handler', done => { @@ -378,13 +256,17 @@ describe('Bigtable/GCPMetricsHandler', () => { }; } class MockExporter extends CloudMonitoringExporter { - export( + constructor(options: ClientOptions) { + super(options); + } + + async export( metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void, - ): void { + ): Promise { const testResultCallback = getTestResultCallback(resultCallback); if (!exported) { - super.export(metrics, testResultCallback); + await super.export(metrics, testResultCallback); } else { resultCallback({code: 0}); } @@ -401,10 +283,7 @@ describe('Bigtable/GCPMetricsHandler', () => { } }); }); - // projectToInstruments argument is set to {} because we want a fresh - // instrument stack each time this test is run. - GCPMetricsHandler.instrumentsForProject = {}; - const handler = new GCPMetricsHandler(new MockExporter({projectId})); + const handler = getHandler(MockExporter); // Pass options with exporter const transformedRequestsHandled = JSON.parse( JSON.stringify(expectedRequestsHandled).replace( /my-project/g, @@ -421,7 +300,7 @@ describe('Bigtable/GCPMetricsHandler', () => { } } })().catch(err => { - throw err; + done(err); }); }); }); diff --git a/system-test/read-modify-write-row-interceptors.ts b/system-test/read-modify-write-row-interceptors.ts new file mode 100644 index 000000000..2996e0b3d --- /dev/null +++ b/system-test/read-modify-write-row-interceptors.ts @@ -0,0 +1,294 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {describe, it, before, after} from 'mocha'; +import {Bigtable} from '../src'; +import {ServiceError} from 'google-gax'; +import {ClientSideMetricsConfigManager} from '../src/client-side-metrics/metrics-config-manager'; +import {TestMetricsHandler} from '../test-common/test-metrics-handler'; +import { + OnAttemptCompleteData, + OnOperationCompleteData, +} from '../src/client-side-metrics/metrics-handler'; +import {OperationMetricsCollector} from '../src/client-side-metrics/operation-metrics-collector'; +import { + MethodName, + StreamingState, +} from '../src/client-side-metrics/client-side-metrics-attributes'; +import * as assert from 'assert'; +import {status as GrpcStatus} from '@grpc/grpc-js'; +import {withInterceptors} from '../src/interceptor'; + +const INSTANCE_ID = 'isolated-rmw-instance'; +const TABLE_ID = 'isolated-rmw-table'; +const ZONE = 'us-central1-a'; +const CLUSTER = 'fake-cluster'; +const COLUMN_FAMILY = 'traits'; +const COLUMN_FAMILIES = [COLUMN_FAMILY]; +const ROW_KEY = 'gwashington'; +const COLUMN = 'teeth'; + +/** + * Creates a Bigtable instance if it does not already exist. + * + * @param bigtable - The Bigtable client. + * @param instanceId - The ID of the instance to create. + * @param clusterId - The ID of the initial cluster in the instance. + * @param locationId - The location (region) for the initial cluster. + * @returns The created instance object if successful, otherwise logs a message and returns the existing instance. + */ +async function createInstance( + bigtable: Bigtable, + instanceId: string, + clusterId: string, + locationId: string, +) { + const instance = bigtable.instance(instanceId); + + const [exists] = await instance.exists(); + if (exists) { + console.log(`Instance ${instanceId} already exists.`); + return instance; + } + + const [i, operation] = await instance.create({ + clusters: [ + { + id: clusterId, + location: locationId, + nodes: 3, + }, + ], + labels: { + time_created: Date.now(), + }, + }); + await operation.promise(); + console.log(`Created instance ${instanceId}`); + return i; +} + +/** + * Creates a Bigtable table if it does not already exist. + * + * @param bigtable - The Bigtable client. + * @param instanceId - The ID of the instance containing the table. + * @param tableId - The ID of the table to create. + * @param families - An array of column family names to create in the table. + * @returns A promise that resolves with the created Table object. + */ +async function createTable( + bigtable: Bigtable, + instanceId: string, + tableId: string, + families: string[], +) { + const instance = bigtable.instance(instanceId); + const table = instance.table(tableId); + + const [exists] = await table.exists(); + if (exists) { + console.log(`Table ${tableId} already exists.`); + return table; + } + + const [t] = await table.create({ + families: families, + }); + const row = table.row(ROW_KEY); + await row.save({ + [COLUMN_FAMILY]: { + [COLUMN]: 'shiny', + }, + }); + console.log(`Created table ${tableId}`); + return t; +} + +/** + * Creates and returns a TestMetricsHandler instance for testing purposes. + * + * @returns A TestMetricsHandler instance with the projectId set to 'test-project-id'. + */ +function getTestMetricsHandler() { + const testMetricsHandler = new TestMetricsHandler(); + testMetricsHandler.projectId = 'test-project-id'; + return testMetricsHandler; +} + +/** + * Asynchronously retrieves the project ID associated with the Bigtable client. + * + * @param bigtable - The Bigtable client instance. + * @returns A promise that resolves with the project ID as a string. + * @throws An error if the project ID cannot be retrieved. + */ +async function getProjectIdFromClient(bigtable: Bigtable): Promise { + return new Promise((resolve, reject) => { + bigtable.getProjectId_((err, projectId) => { + if (err) { + reject(err); + } else { + resolve(projectId!); + } + }); + }); +} + +describe('Bigtable/ReadModifyWriteRowInterceptorMetrics', () => { + let bigtable: Bigtable; + let testMetricsHandler: TestMetricsHandler; + + before(async () => { + bigtable = new Bigtable(); + await getProjectIdFromClient(bigtable); + await createInstance(bigtable, INSTANCE_ID, CLUSTER, ZONE); + await createTable(bigtable, INSTANCE_ID, TABLE_ID, COLUMN_FAMILIES); + testMetricsHandler = getTestMetricsHandler(); + bigtable._metricsConfigManager = new ClientSideMetricsConfigManager([ + testMetricsHandler, + ]); + }); + + after(async () => { + const instance = bigtable.instance(INSTANCE_ID); + await instance.delete(); + }); + + it('should record and export correct metrics for ReadModifyWriteRow via interceptors', async () => { + const instance = bigtable.instance(INSTANCE_ID); + + const table = instance.table(TABLE_ID); + + /* + fakeReadModifyWriteRowMethod is just a fake method on a table that makes a + call to the readWriteModifyRow grpc endpoint. It demonstrates what a method + might look like when trying to make a unary call while extracting + information from the headers and trailers that the server returns so that + the extracted information can be recorded in client side metrics. + */ + (table as any).fakeReadModifyWriteRowMethod = async () => { + // 1. Create a metrics collector. + const metricsCollector = new OperationMetricsCollector( + table, + MethodName.READ_MODIFY_WRITE_ROW, + StreamingState.UNARY, + (table as any).bigtable._metricsConfigManager!.metricsHandlers, + ); + // 2. Tell the metrics collector an attempt has been started. + metricsCollector.onOperationStart(); + metricsCollector.onAttemptStart(); + // 3. Make a unary call with gax options that include interceptors. The + // interceptors are built from a method that hooks them up to the + // metrics collector + const responseArray = await new Promise((resolve, reject) => { + bigtable.request( + { + client: 'BigtableClient', + method: 'readModifyWriteRow', + reqOpts: { + tableName: table.name, + rowKey: Buffer.from(ROW_KEY), + rules: [ + { + familyName: COLUMN_FAMILY, + columnQualifier: Buffer.from(COLUMN), + appendValue: Buffer.from('-wood'), + }, + ], + appProfileId: undefined, + }, + gaxOpts: withInterceptors({}, metricsCollector), + }, + (err: ServiceError | null, resp?: any) => { + if (err) { + reject(err); + } else { + resolve(resp); + } + }, + ); + }); + // 4. Tell the metrics collector the attempt is over + metricsCollector.onAttemptComplete(GrpcStatus.OK); + metricsCollector.onOperationComplete(GrpcStatus.OK); + // 5. Return results of method call to the user + return responseArray; + }; + + await (table as any).fakeReadModifyWriteRowMethod(); + + assert.strictEqual(testMetricsHandler.requestsHandled.length, 2); + + const attemptCompleteData = testMetricsHandler.requestsHandled.find( + m => (m as {attemptLatency?: number}).attemptLatency !== undefined, + ) as OnAttemptCompleteData | undefined; + const operationCompleteData = testMetricsHandler.requestsHandled.find( + m => (m as {operationLatency?: number}).operationLatency !== undefined, + ) as OnOperationCompleteData | undefined; + + assert.ok(attemptCompleteData, 'OnAttemptCompleteData should be present'); + assert.ok( + operationCompleteData, + 'OnOperationCompleteData should be present', + ); + if (!attemptCompleteData || !operationCompleteData) { + throw new Error('Metrics data is missing'); // Should be caught by asserts above + } + assert.strictEqual( + attemptCompleteData.metricsCollectorData.method, + MethodName.READ_MODIFY_WRITE_ROW, + ); + assert.strictEqual(attemptCompleteData.status, '0'); + assert.strictEqual( + attemptCompleteData.metricsCollectorData.table, + TABLE_ID, + ); + assert.strictEqual( + attemptCompleteData.metricsCollectorData.instanceId, + INSTANCE_ID, + ); + assert.ok(attemptCompleteData.attemptLatency >= 0); + assert(attemptCompleteData.serverLatency); + assert.ok(attemptCompleteData.serverLatency >= 0); + assert.strictEqual(attemptCompleteData.metricsCollectorData.zone, ZONE); + assert.strictEqual( + attemptCompleteData.metricsCollectorData.cluster, + CLUSTER, + ); + assert.strictEqual(attemptCompleteData.streaming, StreamingState.UNARY); + + assert.strictEqual( + operationCompleteData.metricsCollectorData.method, + MethodName.READ_MODIFY_WRITE_ROW, + ); + assert.strictEqual(operationCompleteData.status, '0'); + assert.strictEqual( + operationCompleteData.metricsCollectorData.table, + TABLE_ID, + ); + assert.strictEqual( + operationCompleteData.metricsCollectorData.instanceId, + INSTANCE_ID, + ); + assert.ok(operationCompleteData.operationLatency >= 0); + assert.strictEqual(operationCompleteData.retryCount, 0); + assert.strictEqual(operationCompleteData.metricsCollectorData.zone, ZONE); + assert.strictEqual( + operationCompleteData.metricsCollectorData.cluster, + CLUSTER, + ); + assert.strictEqual(operationCompleteData.streaming, StreamingState.UNARY); + }); +}); diff --git a/system-test/read-rows-acceptance-tests.ts b/system-test/read-rows-acceptance-tests.ts index 93b9af01c..8d70db610 100644 --- a/system-test/read-rows-acceptance-tests.ts +++ b/system-test/read-rows-acceptance-tests.ts @@ -25,6 +25,37 @@ import * as fs from 'fs'; import * as path from 'path'; import {Instance} from '../src/instance'; import {Bigtable, AbortableDuplex} from '../src'; +import {ClientSideMetricsConfigManager} from '../src/client-side-metrics/metrics-config-manager'; +import { + ITabularApiSurface, + OperationMetricsCollector, +} from '../src/client-side-metrics/operation-metrics-collector'; +import { + MethodName, + StreamingState, +} from '../src/client-side-metrics/client-side-metrics-attributes'; + +class FakeOperationMetricsCollector extends OperationMetricsCollector { + onOperationComplete() {} + onResponse() {} + onAttemptStart() {} + onAttemptComplete() {} + onOperationStart() {} + handleStatusAndMetadata() {} + onMetadataReceived() {} + onRowReachesUser() {} + onStatusMetadataReceived() {} +} + +class FakeMetricsConfigManager extends ClientSideMetricsConfigManager { + createOperation( + methodName: MethodName, + streaming: StreamingState, + table: ITabularApiSurface, + ): OperationMetricsCollector { + return new FakeOperationMetricsCollector(table, methodName, streaming, []); + } +} const protosJson = path.resolve(__dirname, '../protos/protos.json'); const root = protobuf.Root.fromJSON( @@ -67,6 +98,7 @@ describe('Read Row Acceptance tests', () => { }); table.bigtable = {} as Bigtable; + table.bigtable._metricsConfigManager = new FakeMetricsConfigManager([]); // eslint-disable-next-line @typescript-eslint/no-explicit-any (table.bigtable.request as any) = () => { const stream = new PassThrough({ diff --git a/system-test/read-rows.ts b/system-test/read-rows.ts index fe75d7352..bdb91a0ac 100644 --- a/system-test/read-rows.ts +++ b/system-test/read-rows.ts @@ -25,6 +25,9 @@ import {EventEmitter} from 'events'; import {Test} from './testTypes'; import {ServiceError, GrpcClient, GoogleError, CallOptions} from 'google-gax'; import {PassThrough} from 'stream'; +import * as proxyquire from 'proxyquire'; +import {TabularApiSurface} from '../src/tabular-api-surface'; +import * as mocha from 'mocha'; const {grpc} = new GrpcClient(); @@ -76,7 +79,32 @@ function rowResponse(rowKey: {}) { } describe('Bigtable/Table', () => { - const bigtable = new Bigtable(); + /** + * We have to mock out the metrics handler because the metrics handler with + * open telemetry causes clock.runAll() to throw an infinite loop error. This + * is most likely because of the periodic reader as it schedules pending + * events on the node event loop which conflicts with the sinon clock. + */ + class TestGCPMetricsHandler { + onOperationComplete() {} + onAttemptComplete() {} + } + const FakeTabularApiSurface = proxyquire('../src/tabular-api-surface.js', { + './client-side-metrics/gcp-metrics-handler': { + GCPMetricsHandler: TestGCPMetricsHandler, + }, + }).TabularApiSurface; + const FakeTable: TabularApiSurface = proxyquire('../src/table.js', { + './tabular-api-surface.js': {TabularApiSurface: FakeTabularApiSurface}, + }).Table; + const FakeInstance = proxyquire('../src/instance.js', { + './table.js': {Table: FakeTable}, + }).Instance; + const FakeBigtable = proxyquire('../src/index.js', { + './instance.js': {Instance: FakeInstance}, + }).Bigtable; + + const bigtable = new FakeBigtable(); const INSTANCE_NAME = 'fake-instance2'; // eslint-disable-next-line @typescript-eslint/no-explicit-any (bigtable as any).grpcCredentials = grpc.credentials.createInsecure(); @@ -124,7 +152,6 @@ describe('Bigtable/Table', () => { }); describe('createReadStream', () => { - let clock: sinon.SinonFakeTimers; let endCalled: boolean; let error: ServiceError | null; let requestedOptions: Array<{}>; @@ -133,25 +160,13 @@ describe('Bigtable/Table', () => { let stub: sinon.SinonStub; beforeEach(() => { - clock = sinon.useFakeTimers({ - toFake: [ - 'setTimeout', - 'clearTimeout', - 'setImmediate', - 'clearImmediate', - 'setInterval', - 'clearInterval', - 'Date', - 'nextTick', - ], - }); endCalled = false; error = null; responses = null; rowKeysRead = []; requestedOptions = []; stub = sinon.stub(bigtable, 'request').callsFake(cfg => { - const reqOpts = cfg.reqOpts; + const reqOpts = (cfg as any).reqOpts; const requestOptions = {} as google.bigtable.v2.IRowSet; if (reqOpts.rows && reqOpts.rows.rowRanges) { requestOptions.rowRanges = reqOpts.rows.rowRanges.map( @@ -186,34 +201,48 @@ describe('Bigtable/Table', () => { }); afterEach(() => { - clock.restore(); stub.restore(); }); tests.forEach(test => { - it(test.name, () => { + it(test.name, (done: mocha.Done) => { responses = test.responses; TABLE.maxRetries = test.max_retries; TABLE.createReadStream(test.createReadStream_options) - .on('data', row => rowKeysRead[rowKeysRead.length - 1].push(row.id)) - .on('end', () => (endCalled = true)) - .on('error', err => (error = err as ServiceError)); - clock.runAll(); + .on('data', (row: any) => + rowKeysRead[rowKeysRead.length - 1].push(row.id), + ) + .on('end', () => { + endCalled = true; + doAssertionChecks(); + }) + .on('error', (err: any) => { + error = err as ServiceError; + doAssertionChecks(); + }); - if (test.error) { - assert(!endCalled, ".on('end') should not have been invoked"); - assert.strictEqual(error!.code, test.error); - } else { - assert(endCalled, ".on('end') shoud have been invoked"); - assert.ifError(error); + function doAssertionChecks() { + try { + if (test.error) { + assert(!endCalled, ".on('end') should not have been invoked"); + assert.strictEqual(error!.code, test.error); + } else { + assert(endCalled, ".on('end') shoud have been invoked"); + assert.ifError(error); + } + assert.deepStrictEqual(rowKeysRead, test.row_keys_read); + assert(responses); + assert.strictEqual( + responses.length, + 0, + 'not all the responses were used', + ); + assert.deepStrictEqual(requestedOptions, test.request_options); + done(); + } catch (e) { + done(e); + } } - assert.deepStrictEqual(rowKeysRead, test.row_keys_read); - assert.strictEqual( - responses.length, - 0, - 'not all the responses were used', - ); - assert.deepStrictEqual(requestedOptions, test.request_options); }); }); }); diff --git a/test-common/expected-otel-export-input.ts b/test-common/expected-otel-export-input.ts index 5a8ac0c82..7561dda38 100644 --- a/test-common/expected-otel-export-input.ts +++ b/test-common/expected-otel-export-input.ts @@ -895,7 +895,6 @@ export const expectedOtelExportInput = { 'telemetry.sdk.language': 'nodejs', 'telemetry.sdk.name': 'opentelemetry', 'telemetry.sdk.version': '1.30.1', - 'monitored_resource.project_id': 'my-project', }, asyncAttributesPending: false, _syncAttributes: { @@ -903,7 +902,6 @@ export const expectedOtelExportInput = { 'telemetry.sdk.language': 'nodejs', 'telemetry.sdk.name': 'opentelemetry', 'telemetry.sdk.version': '1.30.1', - 'monitored_resource.project_id': 'my-project', }, _asyncAttributesPromise: {}, }, diff --git a/test-common/metrics-handler-fixture.ts b/test-common/metrics-handler-fixture.ts index 69fce0287..39e89cfcd 100644 --- a/test-common/metrics-handler-fixture.ts +++ b/test-common/metrics-handler-fixture.ts @@ -26,7 +26,6 @@ export const expectedRequestsHandled = [ cluster: 'fake-cluster3', zone: 'us-west1-c', method: 'Bigtable.ReadRows', - client_uid: 'fake-uuid', }, projectId: 'my-project', }, @@ -43,7 +42,6 @@ export const expectedRequestsHandled = [ cluster: 'fake-cluster3', zone: 'us-west1-c', method: 'Bigtable.ReadRows', - client_uid: 'fake-uuid', }, projectId: 'my-project', }, @@ -56,7 +54,6 @@ export const expectedRequestsHandled = [ cluster: 'fake-cluster3', zone: 'us-west1-c', method: 'Bigtable.ReadRows', - client_uid: 'fake-uuid', }, client_name: 'nodejs-bigtable', projectId: 'my-project', diff --git a/test-common/test-metrics-handler.ts b/test-common/test-metrics-handler.ts index 61257913f..0ace7b271 100644 --- a/test-common/test-metrics-handler.ts +++ b/test-common/test-metrics-handler.ts @@ -23,21 +23,20 @@ import { * It logs the metrics and attributes received by the onOperationComplete and onAttemptComplete methods. */ export class TestMetricsHandler implements IMetricsHandler { - private messages: {value: string}; + messages = {value: ''}; + projectId = 'projectId'; requestsHandled: (OnOperationCompleteData | OnAttemptCompleteData)[] = []; - constructor(messages: {value: string}) { - this.messages = messages; - } /** * Logs the metrics and attributes received for an operation completion. * @param {OnOperationCompleteData} data Metrics related to the completed operation. */ onOperationComplete(data: OnOperationCompleteData) { - this.requestsHandled.push(data); - data.client_name = 'nodejs-bigtable'; + const dataWithProject = Object.assign({projectId: this.projectId}, data); + dataWithProject.client_name = 'nodejs-bigtable'; + this.requestsHandled.push(dataWithProject); this.messages.value += 'Recording parameters for onOperationComplete:\n'; - this.messages.value += `${JSON.stringify(data)}\n`; + this.messages.value += `${JSON.stringify(dataWithProject)}\n`; } /** @@ -45,9 +44,10 @@ export class TestMetricsHandler implements IMetricsHandler { * @param {OnOperationCompleteData} data Metrics related to the completed attempt. */ onAttemptComplete(data: OnAttemptCompleteData) { - this.requestsHandled.push(data); - data.client_name = 'nodejs-bigtable'; + const dataWithProject = Object.assign({projectId: this.projectId}, data); + dataWithProject.client_name = 'nodejs-bigtable'; + this.requestsHandled.push(dataWithProject); this.messages.value += 'Recording parameters for onAttemptComplete:\n'; - this.messages.value += `${JSON.stringify(data)}\n`; + this.messages.value += `${JSON.stringify(dataWithProject)}\n`; } } diff --git a/test/base64keymap.ts b/test/base64keymap.ts new file mode 100644 index 000000000..9691e0bbc --- /dev/null +++ b/test/base64keymap.ts @@ -0,0 +1,184 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {describe, it, before, beforeEach, afterEach} from 'mocha'; +import * as Long from 'long'; +import * as proxyquire from 'proxyquire'; +import * as sn from 'sinon'; + +import {RowStateEnum} from '../src/chunktransformer.js'; +import {Mutation} from '../src/mutation.js'; +import {Row} from '../src/row.js'; +import {EncodedKeyMap, SqlValue} from '../src/execute-query/values.js'; + +describe('Bigtable/EncodedKeyMap', () => { + describe('map tests', () => { + it('test constructor', () => { + const bufferKey = Buffer.from('exampleKey'); + const entries: [string | Buffer, string][] = [ + [bufferKey, 'valueForBufferKey'], + ['stringKey', 'valueForStringKey'], + ]; + + const map = new EncodedKeyMap(entries); + // get works with the same object + assert.deepStrictEqual(map.get(bufferKey), 'valueForBufferKey'); + // get works with a new object + assert.deepStrictEqual( + map.get(Buffer.from('exampleKey')), + 'valueForBufferKey', + ); + // get works with a regular string + assert.deepStrictEqual(map.get('stringKey'), 'valueForStringKey'); + }); + it('test duplicate keys', () => { + const bufferKey1 = Buffer.from('exampleKey'); + const bufferKey2 = Buffer.from('exampleKey'); + const bufferKey3 = Buffer.from('exampleKey'); + const entries: [string | Buffer, string][] = [ + [bufferKey1, 'valueForBufferKey1'], + ['stringKey', 'valueForStringKey1'], + [bufferKey2, 'valueForBufferKey2'], + ['stringKey', 'valueForStringKey2'], + ]; + + const map = new EncodedKeyMap(entries); + // get works with the same object + assert.deepStrictEqual(map.get(bufferKey1), 'valueForBufferKey2'); + assert.deepStrictEqual(map.get(bufferKey2), 'valueForBufferKey2'); + // get works with a new object + assert.deepStrictEqual( + map.get(Buffer.from('exampleKey')), + 'valueForBufferKey2', + ); + // get works with a regular string + assert.deepStrictEqual(map.get('stringKey'), 'valueForStringKey2'); + + // check that old value is replaced + map.set(bufferKey3, 'valueForBufferKey3'); + assert.deepStrictEqual( + map.get(Buffer.from('exampleKey')), + 'valueForBufferKey3', + ); + map.set('stringKey', 'valueForStringKey3'); + assert.deepStrictEqual(map.get('stringKey'), 'valueForStringKey3'); + }); + it('test get/set', () => { + const bufferKey = Buffer.from('exampleKey'); + const map = new EncodedKeyMap(); + map.set(bufferKey, 'valueForBufferKey'); + map.set('stringKey', 'valueForStringKey'); + // get works with the same object + assert.deepStrictEqual(map.get(bufferKey), 'valueForBufferKey'); + // get works with a new object + assert.deepStrictEqual( + map.get(Buffer.from('exampleKey')), + 'valueForBufferKey', + ); + // get works with a regular string + assert.deepStrictEqual(map.get('stringKey'), 'valueForStringKey'); + }); + it('test null vs empty bytes', () => { + const entries: [string | Buffer | null, string][] = [ + [null, 'valueForNull'], + ['', 'valueForEmptyString'], + ]; + + // TS normally would not permit a null key, thus we pass entries as any + const map = new EncodedKeyMap(entries as any); + // get works with the same object + assert.deepStrictEqual(map.get(''), 'valueForEmptyString'); + // get works with a regular string + assert.deepStrictEqual(map.get(null as any), 'valueForNull'); + }); + it('test null vs empty bytes', () => { + const entries: [string | Buffer | null, string][] = [ + [null, 'valueForNull'], + [Buffer.from(''), 'valueForEmptyBuffer'], + ]; + + // TS normally would not permit a null key, thus we pass entries as any + const map = new EncodedKeyMap(entries as any); + // get works with the same object + assert.deepStrictEqual(map.get(Buffer.from('')), 'valueForEmptyBuffer'); + // get works with a regular string + assert.deepStrictEqual(map.get(null as any), 'valueForNull'); + }); + it('map builtin functions', () => { + const entries: [string | Buffer | null, string][] = [ + [Buffer.from('Buffer1'), 'valueForBuffer1'], + ['stringKey1', 'valueForString1'], + ]; + + // TS normally would not permit a null key, thus we pass entries as any + const map = new EncodedKeyMap(entries as any); + + // get works with a buffer + assert.deepStrictEqual( + map.get(Buffer.from('Buffer1')), + 'valueForBuffer1', + ); + // get works with a regular string + assert.deepStrictEqual(map.get('stringKey1'), 'valueForString1'); + + // delete, set, has, size + + map.set(Buffer.from('Buffer2'), 'valueForBuffer2'); + map.set('stringKey2', 'valueForString2'); + + assert.deepStrictEqual(map.size, 4); + + assert.deepStrictEqual( + map.get(Buffer.from('Buffer2')), + 'valueForBuffer2', + ); + assert.deepStrictEqual(map.get('stringKey2'), 'valueForString2'); + + assert.strictEqual(map.has('stringKey2'), true); + assert.strictEqual(map.has(Buffer.from('Buffer2')), true); + + map.delete('stringKey2'); + map.delete(Buffer.from('Buffer2')); + + assert.strictEqual(map.has('stringKey2'), false); + assert.strictEqual(map.has(Buffer.from('Buffer2')), false); + + assert.deepStrictEqual(map.size, 2); + + // iterators + + const keys = [...map.keys()]; + assert.deepStrictEqual(keys[0]?.toString(), 'Buffer1'); + assert.deepStrictEqual(keys[0] instanceof Buffer, true); + assert.deepStrictEqual(keys[1], 'stringKey1'); + + const values = [...map.values()]; + assert.deepStrictEqual(values[0], 'valueForBuffer1'); + assert.deepStrictEqual(values[1], 'valueForString1'); + + const resultForEach: [string | bigint | Uint8Array | null, SqlValue][] = + []; + map.forEach((value, key) => { + resultForEach.push([key, value]); + }); + + assert.deepStrictEqual(resultForEach[0][0]?.toString(), 'Buffer1'); + assert.deepStrictEqual(resultForEach[0][0] instanceof Buffer, true); + assert.deepStrictEqual(resultForEach[0][1], 'valueForBuffer1'); + assert.deepStrictEqual(resultForEach[1][0], 'stringKey1'); + assert.deepStrictEqual(resultForEach[1][1], 'valueForString1'); + }); + }); +}); diff --git a/test/bytebuffertransformer.ts b/test/bytebuffertransformer.ts new file mode 100644 index 000000000..d96f49b90 --- /dev/null +++ b/test/bytebuffertransformer.ts @@ -0,0 +1,380 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {describe, it} from 'mocha'; +import * as sinon from 'sinon'; +import {google} from '../protos/protos'; +import {createProtoRows} from './utils/proto-bytes'; +import {ByteBufferTransformer} from '../src/execute-query/bytebuffertransformer'; +import * as SqlValues from '../src/execute-query/values'; + +type PublicByteBufferTransformer = { + messageQueue: Buffer[]; + messageBuffer: Uint8Array[]; + push: (data: any) => void; + processProtoRowsBatch: ( + partialResultSet: google.bigtable.v2.IPartialResultSet, + ) => void; +}; + +describe('Bigtable/ExecuteQueryByteBufferTransformer', () => { + let checksumValidStub: any; + let checksumIsValid = true; + let byteBuffer: PublicByteBufferTransformer; + + beforeEach(() => { + checksumIsValid = true; + checksumValidStub = sinon + .stub(SqlValues, 'checksumValid') + .callsFake(() => checksumIsValid); + byteBuffer = + new ByteBufferTransformer() as any as PublicByteBufferTransformer; + }); + + afterEach(() => { + checksumValidStub.restore(); + }); + + describe('processProtoRowsBatch', () => { + it('empty result', done => { + assert.throws(() => { + byteBuffer.processProtoRowsBatch({}); + }, /Error: Response did not contain any results!/); + done(); + }); + + it('just checksum', done => { + const response1 = createProtoRows(undefined, undefined, undefined, { + intValue: 1, + }); + const responseWithChecksum = createProtoRows(undefined, 111, undefined); + + // fill the buffer + byteBuffer.processProtoRowsBatch(response1.results!); + + // check that the buffer is filled + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.strictEqual( + byteBuffer.messageBuffer[0], + response1.results!.protoRowsBatch!.batchData!, + ); + + // send the checksum + byteBuffer.processProtoRowsBatch(responseWithChecksum.results!); + + // check that the buffer is flushed and queue contains the new message + assert.strictEqual(byteBuffer.messageQueue.length, 1); + assert.deepStrictEqual( + byteBuffer.messageQueue[0], + Buffer.concat([ + response1.results!.protoRowsBatch!.batchData! as Buffer, + ]), + ); + assert.strictEqual(byteBuffer.messageBuffer.length, 0); + done(); + }); + + it('checksum flushes the buffer', done => { + const response1 = createProtoRows(undefined, undefined, undefined, { + intValue: 1, + }); + const responseWithChecksum = createProtoRows(undefined, 111, undefined, { + intValue: 2, + }); + + // fill the buffer + byteBuffer.processProtoRowsBatch(response1.results!); + + // check that the buffer is filled + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.strictEqual( + byteBuffer.messageBuffer[0], + response1.results!.protoRowsBatch!.batchData!, + ); + + // send a reset + byteBuffer.processProtoRowsBatch(responseWithChecksum.results!); + + // check that the buffer is flushed and queue contains the new message + // containing both values + assert.strictEqual(byteBuffer.messageQueue.length, 1); + assert.deepStrictEqual( + byteBuffer.messageQueue[0], + Buffer.concat([ + response1.results!.protoRowsBatch!.batchData! as Buffer, + responseWithChecksum.results!.protoRowsBatch!.batchData! as Buffer, + ]), + ); + assert.strictEqual(byteBuffer.messageBuffer.length, 0); + done(); + }); + + it('just reset', done => { + const responseWithReset = createProtoRows(undefined, undefined, true); + + // send a reset + byteBuffer.processProtoRowsBatch(responseWithReset.results!); + + done(); + }); + + it('reset empties the buffer', done => { + // we first prepare the byteBuffer with a few messages + // then we send a reset and observe that the queue and + // buffer have been emptied and only the new message + // is present + const response1 = createProtoRows(undefined, undefined, undefined, { + intValue: 1, + }); + const responseWithReset = createProtoRows(undefined, undefined, true, { + intValue: 4, + }); + + byteBuffer.processProtoRowsBatch(response1.results!); + + // check that the buffer is filled + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.strictEqual( + byteBuffer.messageBuffer[0], + response1.results!.protoRowsBatch!.batchData!, + ); + + // send a reset + byteBuffer.processProtoRowsBatch(responseWithReset.results!); + + // check that the buffer has been emptied and populated with + // the new message after the reset + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.deepStrictEqual( + byteBuffer.messageBuffer[0], + responseWithReset.results!.protoRowsBatch!.batchData!, + ); + done(); + }); + + it('reset empties the queue and buffer', done => { + // we first prepare the byteBuffer with a few messages + // then we send a reset and observe that the queue and + // buffer have been emptied and only the new message + // is present + const responses = [ + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + createProtoRows(undefined, 111, undefined, {intValue: 2}), + createProtoRows(undefined, undefined, undefined, {intValue: 3}), + ]; + const responseWithReset = createProtoRows(undefined, undefined, true, { + intValue: 4, + }); + + // fill the buffer with messages + for (const response of responses) { + byteBuffer.processProtoRowsBatch(response.results!); + } + + // check that the buffer and queue are filled + assert.strictEqual(byteBuffer.messageQueue.length, 1); + assert.deepStrictEqual( + byteBuffer.messageQueue[0], + Buffer.concat([ + responses[0].results!.protoRowsBatch!.batchData! as Buffer, + responses[1].results!.protoRowsBatch!.batchData! as Buffer, + ]), + ); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.strictEqual( + byteBuffer.messageBuffer[0], + responses[2].results!.protoRowsBatch!.batchData!, + ); + + // send a reset + byteBuffer.processProtoRowsBatch(responseWithReset.results!); + + // check that the buffer and queue have been emptied and populated with + // the new message after the reset + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.deepStrictEqual( + byteBuffer.messageBuffer[0], + responseWithReset.results!.protoRowsBatch!.batchData!, + ); + done(); + }); + + it('token triggers push', done => { + let pushedData = null; + byteBuffer.push = (data: any) => { + pushedData = data; + }; + const response1 = createProtoRows(undefined, undefined, undefined, { + intValue: 1, + }); + const responseWithToken = createProtoRows('token', 111, undefined, { + intValue: 2, + }); + + // fill the buffer + byteBuffer.processProtoRowsBatch(response1.results!); + + // check that the buffer is filled + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.strictEqual( + byteBuffer.messageBuffer[0], + response1.results!.protoRowsBatch!.batchData!, + ); + + // send a token + byteBuffer.processProtoRowsBatch(responseWithToken.results!); + + // check that the data was pushed and buffer and queue are empty + // but pushed data contins the value from the 2nd message + assert.strictEqual(byteBuffer.messageBuffer.length, 0); + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.deepStrictEqual(pushedData, [ + [ + Buffer.concat([ + response1.results!.protoRowsBatch!.batchData! as Buffer, + responseWithToken.results!.protoRowsBatch!.batchData! as Buffer, + ]), + ], + Buffer.from('token'), + ]); + done(); + }); + + it('separate token', done => { + let pushedData = null; + byteBuffer.push = (data: any) => { + pushedData = data; + }; + const response1 = createProtoRows(undefined, 111, undefined, { + intValue: 1, + }); + const responseWithToken = createProtoRows('token', undefined, undefined); + + // fill the buffer + byteBuffer.processProtoRowsBatch(response1.results!); + + // check that the buffer is filled + assert.strictEqual(byteBuffer.messageQueue.length, 1); + assert.strictEqual(byteBuffer.messageBuffer.length, 0); + + // send a token + byteBuffer.processProtoRowsBatch(responseWithToken.results!); + + // check that the data was pushed and buffer and queue are empty + assert.strictEqual(byteBuffer.messageBuffer.length, 0); + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.deepStrictEqual(pushedData, [ + [response1.results!.protoRowsBatch!.batchData! as Buffer], + Buffer.from('token'), + ]); + done(); + }); + + it('checksum without data throws', done => { + const responseWithChecksum = createProtoRows(undefined, 111, undefined); + + // send a checksum + assert.throws(() => { + byteBuffer.processProtoRowsBatch(responseWithChecksum.results!); + }, /Error: Recieved empty batch with non-zero checksum\./); + + done(); + }); + + it('token without checksum throws', done => { + let pushedData = null; + byteBuffer.push = (data: any) => { + pushedData = data; + }; + const response1 = createProtoRows(undefined, undefined, undefined, { + intValue: 1, + }); + const responseWithToken = createProtoRows('token', undefined, undefined); + + // fill the buffer + byteBuffer.processProtoRowsBatch(response1.results!); + + // check that the buffer is filled + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 1); + assert.strictEqual( + byteBuffer.messageBuffer[0], + response1.results!.protoRowsBatch!.batchData!, + ); + + // send a token + assert.throws(() => { + byteBuffer.processProtoRowsBatch(responseWithToken.results!); + }, /Error: Recieved incomplete batch of rows\./); + + done(); + }); + + it('token without data', done => { + let pushedData = null; + byteBuffer.push = (data: any) => { + pushedData = data; + }; + const responseWithToken = createProtoRows('token', undefined, undefined); + + // check that the buffer and queue are empty + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.strictEqual(byteBuffer.messageBuffer.length, 0); + + // send a token + byteBuffer.processProtoRowsBatch(responseWithToken.results!); + + // check that the token was pushed even though the buffer and queue are empty + assert.strictEqual(byteBuffer.messageBuffer.length, 0); + assert.strictEqual(byteBuffer.messageQueue.length, 0); + assert.deepStrictEqual(pushedData, [[], Buffer.from('token')]); + done(); + }); + + it('cheksum properly calculated', done => { + checksumValidStub.restore(); + const response = createProtoRows( + 'token1', + 2412835642, + undefined, + {intValue: 1}, + {intValue: 2}, + ); + byteBuffer.processProtoRowsBatch(response.results!); + done(); + }); + + it('invalid cheksum throws', done => { + checksumValidStub.restore(); + const response = createProtoRows( + 'token1', + 111, + undefined, + {intValue: 1}, + {intValue: 2}, + ); + assert.throws(() => { + byteBuffer.processProtoRowsBatch(response.results!); + }, /Error: Failed to validate next batch of results/); + done(); + }); + }); +}); diff --git a/test/executequery.ts b/test/executequery.ts new file mode 100644 index 000000000..2e87ac8f3 --- /dev/null +++ b/test/executequery.ts @@ -0,0 +1,2049 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import * as promisify from '@google-cloud/promisify'; +import * as assert from 'assert'; +import {before, beforeEach, afterEach, describe, it} from 'mocha'; +import * as sinon from 'sinon'; +import * as proxyquire from 'proxyquire'; +import {grpc} from 'google-gax'; +import * as inst from '../src/instance'; +import {Bigtable} from '../src'; +import {google} from '../protos/protos'; +import * as SqlTypes from '../src/execute-query/types'; +import * as pumpify from 'pumpify'; +import { + ArrayReadableStream, + createMetadata, + createPrepareQueryResponse, + createProtoRows, + pbType, +} from './utils/proto-bytes'; +import {QueryResultRow} from '../src/execute-query/values'; +import { + PreparedStatement, + SHOULD_REFRESH_SOON_PERIOD_MS, +} from '../src/execute-query/preparedstatement'; +import {MetadataConsumer} from '../src/execute-query/metadataconsumer'; +import {PassThrough} from 'stream'; +import * as SqlValues from '../src/execute-query/values'; + +const sandbox = sinon.createSandbox(); + +const fakePromisify = Object.assign({}, promisify, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + promisifyAll(klass: Function, options: any) { + if (klass.name !== 'Instance') { + return; + } + assert.deepStrictEqual(options.exclude, [ + 'appProfile', + 'cluster', + 'table', + 'getBackupsStream', + 'getTablesStream', + 'getAppProfilesStream', + 'view', + ]); + }, +}); + +class MockPreparedStatement { + callbacks: any[] = []; + markedAsExpired = false; + getData = (cb: any, timeout: any) => { + this.callbacks.push(cb); + }; + getParameterTypes = () => { + return {}; + }; + markAsExpired = () => { + this.markedAsExpired = true; + }; +} + +function createResultSetMetadata( + ...values: [string | null, google.bigtable.v2.Type][] +): SqlTypes.ResultSetMetadata { + return MetadataConsumer.parseMetadata(createMetadata(...values).metadata!); +} + +const performCallbacks = (callbacks: any[], interval: number) => { + let counter = 0; + const performNext = () => { + callbacks[counter++](); + if (counter < callbacks.length) { + setTimeout(performNext, interval); + } + }; + performNext(); +}; + +const createExpiredQueryError = () => { + return { + code: grpc.status.FAILED_PRECONDITION, + details: 'failed precondition', + statusDetails: [ + { + violations: [ + { + type: 'PREPARED_QUERY_EXPIRED', + description: + 'The prepared query has expired. Please re-issue the ExecuteQuery with a valid prepared query.', + }, + ], + }, + ], + }; +}; + +describe('Bigtable/ExecuteQueryStateMachine', () => { + const INSTANCE_ID = 'my-instance'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const BIGTABLE = { + projectName: 'projects/my-project', + projectId: 'my-project', + request: () => {}, + } as Bigtable; + let Instance: typeof inst.Instance; + let instance: inst.Instance; + let checksumValidStub: any; + + before(() => { + Instance = proxyquire('../src/instance.js', { + '@google-cloud/promisify': fakePromisify, + pumpify, + }).Instance; + }); + + beforeEach(() => { + instance = new Instance(BIGTABLE, INSTANCE_ID); + checksumValidStub = sinon + .stub(SqlValues, 'checksumValid') + .callsFake(() => true); + }); + + afterEach(() => { + sandbox.restore(); + checksumValidStub.restore(); + }); + + describe('happy_path', () => { + it('responses within timeout', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, 111, undefined, {intValue: 2}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', undefined, undefined), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token2', undefined, undefined), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream.emit('end'); + bigtableStream.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(responses.length, 2); + assert.equal(responses[0].get(0), 1); + assert.equal(responses[1].get(0), 2); + done(); + }, + ], + 1, + ); + }); + }); + + describe('queryPlanErrors', () => { + it('one query plan error', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0](new Error('fetching QP failed')); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 2); + preparedStatement.callbacks[1]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', 111, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream.emit('end'); + bigtableStream.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(responses[0].get(0), 1); + done(); + }, + ], + 1, + ); + }); + + it('query plan expired error', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const expiredError = createExpiredQueryError(); + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', expiredError); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainAndRefreshQueryPlan', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + assert.equal(preparedStatement.markedAsExpired, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 2); + preparedStatement.callbacks[1]( + undefined, + 'bytes', + createResultSetMetadata(['f2', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows('token1', 111, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream2.emit('end'); + bigtableStream2.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.throws(() => { + // we make sure that the column name from the first preparedStatement is not present. + responses[0].get('f1'); + }); + assert.equal(responses[0].get('f2'), 1); + done(); + }, + ], + 1, + ); + }); + + it('query plan expired error after data was recieved', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const expiredError = createExpiredQueryError(); + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', expiredError); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainAndRefreshQueryPlan', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 2); + preparedStatement.callbacks[1]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows('token1', 111, undefined, {intValue: 2}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream2.emit('end'); + bigtableStream2.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + // assert we only got the second response, the first one was discarded + assert.equal(responses.length, 1); + assert.equal(responses[0].get(0), 2); + done(); + }, + ], + 1, + ); + }); + + it('query plan expired error after token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const expiredError = createExpiredQueryError(); + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + const rowsEmitted = 0; + const responses: QueryResultRow[] = []; + resultStream + .on('error', () => { + errorEmitted = true; + }) + .on('data', (row: any) => { + responses.push(row); + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', 111, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream.write(expiredError); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(responses.length, 1); + assert.equal(responses[0].get(0), 1); + assert.equal(errorEmitted, true); + done(); + }, + ], + 1, + ); + }); + }); + + describe('streamEnding', () => { + it('empty stream', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + let streamEnded = false; + resultStream.on('end', () => { + streamEnded = true; + }); + resultStream.on('error', () => { + errorEmitted = true; + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.emit('end'); + bigtableStream.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(errorEmitted, false); + assert.equal(responses.length, 0); + assert.equal(streamEnded, true); + done(); + }, + ], + 1, + ); + }); + + it('unexpected end after some data before token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + let rowsEmitted = 0; + resultStream + .on('error', () => { + errorEmitted = true; + }) + .on('data', () => { + rowsEmitted += 1; + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.emit('end'); + bigtableStream.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(rowsEmitted, 0); + done(); + }, + ], + 1, + ); + }); + + it('unexpected end before a token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + resultStream.on('error', () => { + errorEmitted = true; + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', 111, undefined, {intValue: 2}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 3}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream.emit('end'); + bigtableStream.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(responses.length, 2); + assert.equal(responses[0].get(0), 1); + assert.equal(responses[1].get(0), 2); + done(); + }, + ], + 1, + ); + }); + + it('empty response - query returned no rows', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + let rowsEmitted = 0; + resultStream + .on('error', () => { + errorEmitted = true; + }) + .on('data', () => { + rowsEmitted += 1; + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', undefined, undefined), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream.emit('end'); + bigtableStream.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(errorEmitted, false); + assert.equal(rowsEmitted, 0); + done(); + }, + ], + 1, + ); + }); + }); + + describe('streamErrors', () => { + it('retryable error before anything', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const retryableError = { + code: grpc.status.DEADLINE_EXCEEDED, + message: 'retryable error', + }; + + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', retryableError); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainingBeforeResumeToken', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(preparedStatement.callbacks.length, 1); // query plan was not refreshed + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows('token', 111, undefined, {intValue: 2}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream2.emit('end'); + bigtableStream2.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(responses.length, 2); + assert.equal(responses[0].get(0), 1); + assert.equal(responses[1].get(0), 2); + done(); + }, + ], + 1, + ); + }); + + it('retryable error before token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const retryableError = { + code: grpc.status.DEADLINE_EXCEEDED, + message: 'retryable error', + }; + + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', retryableError); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainingBeforeResumeToken', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(preparedStatement.callbacks.length, 1); // query plan was not refreshed + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows(undefined, undefined, undefined, {intValue: 2}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows('token', 111, undefined, {intValue: 3}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream2.emit('end'); + bigtableStream2.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(responses.length, 2); + // the first message before the retry should have been discarded + assert.equal(responses[0].get(0), 2); + assert.equal(responses[1].get(0), 3); + done(); + }, + ], + 1, + ); + }); + + it('retryable error before token, byteBuffer keeps emitting data', done => { + // in this test we simulate a situation where even though the + // error was emitted, a data event emitted after it. This can + // happen if an event is buffered in the readable part of the byteBuffer + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const retryableError = { + code: grpc.status.DEADLINE_EXCEEDED, + message: 'retryable error', + }; + + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + let valuesStream: any = null; + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + valuesStream = resultStream._stateMachine.valuesStream; + bigtableStream.emit('error', retryableError); + }, + () => { + // emit data after the error was emitted + valuesStream.emit('data', [ + [ + createProtoRows(undefined, undefined, undefined, {intValue: 2}) + .results?.protoRowsBatch?.batchData, + ], + 'unreachableToken', + ]); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainingBeforeResumeToken', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(preparedStatement.callbacks.length, 1); // query plan was not refreshed + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows(undefined, undefined, undefined, {intValue: 3}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream2.write( + createProtoRows('token', 111, undefined, {intValue: 4}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream2.emit('end'); + bigtableStream2.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(responses.length, 2); + // the first message before the retry should have been discarded + assert.equal(responses[0].get(0), 3); + assert.equal(responses[1].get(0), 4); + done(); + }, + ], + 1, + ); + }); + + it('retryable error before token then expire', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream2.abort = () => {}; + + const bigtableStream3 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream3.abort = () => {}; + + const retryableError = { + code: grpc.status.DEADLINE_EXCEEDED, + message: 'retryable error', + }; + + const expiredError = createExpiredQueryError(); + + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', retryableError); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainingBeforeResumeToken', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(preparedStatement.callbacks.length, 1); // query plan was not refreshed + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream3 as any; + }; + bigtableStream2.emit('error', expiredError); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainAndRefreshQueryPlan', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 2); + preparedStatement.callbacks[1]( + undefined, + 'bytes', + createResultSetMetadata(['f2', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream3.write( + createProtoRows('token1', 111, undefined, {intValue: 2}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + bigtableStream3.emit('end'); + bigtableStream3.emit('close'); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Finished'); + assert.equal(responses.length, 1); + // the first message before the retry should have been discarded + assert.equal(responses[0].get('f2'), 2); + assert.throws(() => { + // we make sure that the column name from the first preparedStatement is not present. + responses[0].get('f1'); + }); + done(); + }, + ], + 1, + ); + }); + + it('retryable error after token then expire', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream2.abort = () => {}; + + const bigtableStream3 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream3.abort = () => {}; + + const retryableError = { + code: grpc.status.DEADLINE_EXCEEDED, + message: 'retryable error', + }; + + const expiredError = createExpiredQueryError(); + + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + resultStream.on('error', () => { + errorEmitted = true; + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', 111, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', retryableError); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'DrainingAfterResumeToken', + ); + assert.equal(resultStream._stateMachine.retryTimer !== null, true); + // speed up the retry timer + clearTimeout(resultStream._stateMachine.retryTimer); + resultStream._stateMachine.startNextAttempt(); + }, + () => { + assert.equal(preparedStatement.callbacks.length, 1); // query plan was not refreshed + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream3 as any; + }; + bigtableStream2.emit('error', expiredError); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(responses.length, 1); + assert.equal(responses[0].get(0), 1); + done(); + }, + ], + 1, + ); + }); + + it('non-retryable error before token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const nonretryableError = { + code: grpc.status.PERMISSION_DENIED, + message: 'non-retryable error', + }; + + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + let rowsEmitted = 0; + resultStream + .on('error', () => { + errorEmitted = true; + }) + .on('data', () => { + rowsEmitted += 1; + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', nonretryableError); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(rowsEmitted, 0); + done(); + }, + ], + 1, + ); + }); + + it('non-retryable error after token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + + const bigtableStream2 = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream2.abort = () => {}; + + const nonretryableError = { + code: grpc.status.PERMISSION_DENIED, + message: 'non-retryable error', + }; + + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + resultStream.on('error', () => { + errorEmitted = true; + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', 111, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + BIGTABLE.request = () => { + return bigtableStream2 as any; + }; + bigtableStream.emit('error', nonretryableError); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(responses.length, 1); + assert.equal(responses[0].get(0), 1); + done(); + }, + ], + 1, + ); + }); + }); + + describe('timeouts', () => { + it('timeout immediately', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + let rowsEmitted = 0; + resultStream + .on('error', () => { + errorEmitted = true; + }) + .on('data', () => { + rowsEmitted += 1; + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + }, + () => { + resultStream._stateMachine.handleTotalTimeout(); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(rowsEmitted, 0); + done(); + }, + ], + 1, + ); + }); + + it('timeout after PQ', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + let rowsEmitted = 0; + resultStream + .on('error', () => { + errorEmitted = true; + }) + .on('data', () => { + rowsEmitted += 1; + }); + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0](new Error('fetching QP failed!')); + }, + () => { + assert.equal(errorEmitted, false); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 2); + preparedStatement.callbacks[1]( + new Error('fetching QP failed again!'), + ); + }, + () => { + assert.equal(errorEmitted, false); + resultStream._stateMachine.handleTotalTimeout(); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(rowsEmitted, 0); + done(); + }, + ], + 1, + ); + }); + + it('timeout before token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + let rowsEmitted = 0; + resultStream + .on('error', () => { + errorEmitted = true; + }) + .on('data', () => { + rowsEmitted += 1; + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + resultStream._stateMachine.handleTotalTimeout(); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(rowsEmitted, 0); + done(); + }, + ], + 1, + ); + }); + + it('timeout after token', done => { + const bigtableStream = new PassThrough({ + objectMode: true, + highWaterMark: 0, + }) as any; + bigtableStream.abort = () => {}; + BIGTABLE.request = () => bigtableStream as any; + const preparedStatement = new MockPreparedStatement(); + const resultStream = instance.createExecuteQueryStream({ + preparedStatement, + } as any) as any; + let errorEmitted = false; + const responses: QueryResultRow[] = []; + resultStream.on('data', (row: any) => { + responses.push(row); + }); + resultStream.on('error', () => { + errorEmitted = true; + }); + + performCallbacks( + [ + () => { + clearTimeout(resultStream._stateMachine.timeoutTimer); + assert.equal(resultStream._stateMachine.state, 'AwaitingQueryPlan'); + assert.equal(preparedStatement.callbacks.length, 1); + }, + () => { + preparedStatement.callbacks[0]( + undefined, + 'bytes', + createResultSetMetadata(['f1', pbType({int64Type: {}})]), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'BeforeFirstResumeToken', + ); + bigtableStream.write( + createProtoRows('token1', 111, undefined, {intValue: 1}), + ); + }, + () => { + assert.equal( + resultStream._stateMachine.state, + 'AfterFirstResumeToken', + ); + resultStream._stateMachine.handleTotalTimeout(); + }, + () => { + assert.equal(resultStream._stateMachine.state, 'Failed'); + assert.equal(errorEmitted, true); + assert.equal(responses.length, 1); + assert.equal(responses[0].get(0), 1); + done(); + }, + ], + 1, + ); + }); + }); +}); + +describe('Bigtable/ExecuteQueryPreparedStatementObject', () => { + const INSTANCE_ID = 'my-instance'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const BIGTABLE = { + projectName: 'projects/my-project', + projectId: 'my-project', + request: () => {}, + } as Bigtable; + + let clock: sinon.SinonFakeTimers; + + beforeEach(() => { + clock = sinon.useFakeTimers({ + toFake: ['setTimeout', 'clearTimeout', 'Date'], + }); + }); + + afterEach(() => { + clock.restore(); + sandbox.restore(); + }); + + describe('happy_path', () => { + it('getting prepared query plan', done => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Int64()}, + ); + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f').type, 'int64'); + done(); + }, 1000); + clock.runAll(); + }); + + it('getting prepared query plan close to validUntil', done => { + const resp = createPrepareQueryResponse(['f', pbType({int64Type: {}})]); + let pqRequestCb = null; + let requestCounter = 0; + (BIGTABLE as any).request = (req: any, cb: any) => { + requestCounter += 1; + pqRequestCb = cb; + }; + const someTimestamp = 1740000000; + resp.validUntil = google.protobuf.Timestamp.create({ + seconds: someTimestamp / 1000, + nanos: 0, + }); + const preparedStatement = new PreparedStatement( + BIGTABLE, + resp, + {} as any, + { + a: SqlTypes.Int64(), + }, + ); + // Set the time to 100 ms after the "should-refresh" point in time + clock.setSystemTime(someTimestamp - SHOULD_REFRESH_SOON_PERIOD_MS + 100); + let getDataCalls = 0; + const doneAfterGetData = () => { + getDataCalls += 1; + if (getDataCalls > 1) { + // assert only one request was made. + assert.equal(requestCounter, 1); + done(); + } + }; + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f').type, 'int64'); + doneAfterGetData(); + }, 1000); + clock.runAll(); + + // refresh is scheduled + assert.equal(pqRequestCb !== null, true); + + // both getData calls should get the old value before the refresh finishes + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f').type, 'int64'); + doneAfterGetData(); + }, 1000); + clock.runAll(); + }); + + it('getting prepared query plan past validUntil', done => { + const resp = createPrepareQueryResponse(['f', pbType({int64Type: {}})]); + const someTimestamp = 1740000000; + resp.validUntil = google.protobuf.Timestamp.create({ + seconds: someTimestamp / 1000, + nanos: 0, + }); + const preparedStatement = new PreparedStatement( + BIGTABLE, + resp, + {} as any, + { + a: SqlTypes.Int64(), + }, + ); + // Set the time to 100 ms after the "validUntil" point in time + clock.setSystemTime(someTimestamp + 100); + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f').type, 'int64'); + done(); + }, 1000); + clock.runAll(); + }); + + it('multiple getData calls result in only one request', done => { + const originalResp = createPrepareQueryResponse([ + 'f1', + pbType({int64Type: {}}), + ]); + const secondResp = createPrepareQueryResponse([ + 'f2', + pbType({int64Type: {}}), + ]); + let pqRequestCb = null; + let requestCounter = 0; + (BIGTABLE as any).request = (req: any, cb: any) => { + requestCounter += 1; + pqRequestCb = cb; + }; + const someTimestamp = 1740000000; + originalResp.validUntil = google.protobuf.Timestamp.create({ + seconds: someTimestamp / 1000, + nanos: 0, + }); + const preparedStatement = new PreparedStatement( + BIGTABLE, + originalResp, + {} as any, + { + a: SqlTypes.Int64(), + }, + ); + // Set the time to 100 ms after the "should-refresh" point in time + clock.setSystemTime(someTimestamp - SHOULD_REFRESH_SOON_PERIOD_MS + 100); + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f1').type, 'int64'); + }, 1000); + clock.runAll(); + + // refresh is scheduled + assert.equal(pqRequestCb !== null, true); + + // second getData call + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f1').type, 'int64'); + }, 1000); + clock.runAll(); + + // assert only one request was made even though getData was called twice + assert.equal(requestCounter, 1); + + // Bigtable returns the prepareQuery response + pqRequestCb!(null, secondResp); + + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f2').type, 'int64'); + assert.throws(() => { + // we make sure that the column name from the first preparedStatement is not present. + metadata?.get('f1'); + }); + }, 1000); + clock.runAll(); + done(); + }); + }); + + describe('other_cases', () => { + it('getting data after expiration hangs', done => { + const resp = createPrepareQueryResponse(['f', pbType({int64Type: {}})]); + const preparedStatement = new PreparedStatement( + BIGTABLE, + resp, + {} as any, + { + a: SqlTypes.Int64(), + }, + ); + (BIGTABLE as any).request = (req: any, cb: any) => cb(null, resp); + + preparedStatement.markAsExpired(); + assert.equal(preparedStatement.isExpired(), true); + assert.equal((preparedStatement as any).isRefreshing, false); + assert.equal((preparedStatement as any).timer, null); + + let callbackCalled = false; + preparedStatement.getData((err, pqBytes, metadata) => { + callbackCalled = true; + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f').type, 'int64'); + }, 1000); + + // getData scheduled getting the query plan immediately after + assert.equal((preparedStatement as any).timer !== null, true); + assert.equal(callbackCalled, false); + clock.tick(1); + assert.equal((preparedStatement as any).timer, null); + assert.equal(callbackCalled, true); + done(); + }); + + it('plan expired during getData callback', done => { + const resp = createPrepareQueryResponse(['f', pbType({int64Type: {}})]); + const preparedStatement = new PreparedStatement( + BIGTABLE, + resp, + {} as any, + { + a: SqlTypes.Int64(), + }, + ); + (BIGTABLE as any).request = (req: any, cb: any) => cb(null, resp); + + preparedStatement.markAsExpired(); + assert.equal(preparedStatement.isExpired(), true); + assert.equal((preparedStatement as any).isRefreshing, false); + assert.equal((preparedStatement as any).timer, null); + + let callbackCalled = false; + preparedStatement.getData((err, pqBytes, metadata) => { + callbackCalled = true; + assert.equal(err, undefined); + assert.equal(pqBytes, 'xd'); + assert.equal(metadata?.get('f').type, 'int64'); + preparedStatement.markAsExpired(); + }, 1000); + + // this callback gets served second. It will get an error + // because the query got expired between the last refresh and serving of this callback + preparedStatement.getData((err, pqBytes, metadata) => { + assert.equal(callbackCalled, true); + assert.equal(pqBytes, undefined); + assert.equal(metadata, undefined); + assert.equal(err?.message, 'Getting a fresh query plan failed.'); + }, 1000); + + // getData scheduled getting the query plan immediately after + assert.equal((preparedStatement as any).timer !== null, true); + assert.equal(callbackCalled, false); + clock.tick(1); + assert.equal((preparedStatement as any).timer, null); + assert.equal(callbackCalled, true); + done(); + }); + + it('plan refresh failed', done => { + const resp = createPrepareQueryResponse(['f', pbType({int64Type: {}})]); + const preparedStatement = new PreparedStatement( + BIGTABLE, + resp, + {} as any, + { + a: SqlTypes.Int64(), + }, + ); + (BIGTABLE as any).request = (req: any, cb: any) => + cb(new Error('Problem')); + + preparedStatement.markAsExpired(); + assert.equal(preparedStatement.isExpired(), true); + assert.equal((preparedStatement as any).isRefreshing, false); + assert.equal((preparedStatement as any).timer, null); + + let callbackCalled = false; + preparedStatement.getData((err, pqBytes, metadata) => { + callbackCalled = true; + assert.equal(pqBytes, undefined); + assert.equal(metadata, undefined); + assert.equal(err?.message, 'Problem'); + }, 1000); + + assert.equal(callbackCalled, false); + clock.tick(1); + assert.equal((preparedStatement as any).timer, null); + assert.equal(callbackCalled, true); + done(); + }); + }); +}); diff --git a/test/instance.ts b/test/instance.ts index e26b7f9e1..ed3c6bcdd 100644 --- a/test/instance.ts +++ b/test/instance.ts @@ -19,7 +19,7 @@ import * as sinon from 'sinon'; import * as proxyquire from 'proxyquire'; import {ServiceError} from 'google-gax'; import * as snapshot from 'snap-shot-it'; - +import {Readable} from 'stream'; import * as inst from '../src/instance'; import {AppProfile, AppProfileOptions} from '../src/app-profile'; import {Cluster, CreateClusterOptions} from '../src/cluster'; @@ -34,9 +34,31 @@ import {Bigtable, RequestOptions} from '../src'; import {PassThrough} from 'stream'; import * as pumpify from 'pumpify'; import {FakeCluster} from '../system-test/common'; +import { + BigtableDate, + BigtableMap, + QueryResultRow, + SqlValue, + Struct, +} from '../src/execute-query/values'; +import * as SqlTypes from '../src/execute-query/types'; import {RestoreTableConfig} from '../src/backup'; import {Options} from './cluster'; import {createClusterOptionsList} from './constants/cluster'; +import {google} from '../protos/protos'; +import {PreciseDate} from '@google-cloud/precise-date'; +import Long = require('long'); +import { + createMetadata, + createPreparedStatement, + createPrepareQueryResponse, + createProtoRows, + pbType, +} from './utils/proto-bytes'; +import {PreparedStatement} from '../src/execute-query/preparedstatement'; +import * as SqlValues from '../src/execute-query/values'; + +const concat = require('concat-stream'); const sandbox = sinon.createSandbox(); @@ -87,6 +109,25 @@ class FakeTable extends Table { } } +// convenience function for ExecuteQuery tests +function executeQueryResultWithMetadata( + instance: any, + preparedStatement: PreparedStatement | null, + callback: (...args: any[]) => void, +): void { + const stream = instance.createExecuteQueryStream({preparedStatement}); + stream.on('error', callback!).pipe( + concat((rows: QueryResultRow[]) => { + const metadata = stream.getMetadata(); + if (metadata === null) { + callback!(new Error('Server error - did not receive metadata.')); + } else { + callback!(null, rows, metadata); + } + }), + ); +} + describe('Bigtable/Instance', () => { const INSTANCE_ID = 'my-instance'; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -1165,7 +1206,6 @@ describe('Bigtable/Instance', () => { .getAppProfilesStream() .on('error', err => { assert.strictEqual(appProfiles.length, counter); - console.log(err.message); assert.deepStrictEqual( err, new Error( @@ -1966,3 +2006,1389 @@ describe('Bigtable/Instance', () => { }); }); }); + +describe('Bigtable/ExecuteQueryInstance', () => { + // Create an array of Response objects + + const responsesRef = { + responses: [] as google.bigtable.v2.ExecuteQueryResponse[], + + setResponses(values: google.bigtable.v2.ExecuteQueryResponse[]) { + responsesRef.responses = values; + }, + }; + + let requests: any[] = []; + + const INSTANCE_ID = 'my-instance'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const BIGTABLE = { + projectName: 'projects/my-project', + projectId: 'my-project', + request: (config?: any) => { + requests.push(config); + const result: any = Readable.from(responsesRef.responses); + result.abort = () => {}; + return result; + }, + } as Bigtable; + let Instance: typeof inst.Instance; + let instance: inst.Instance; + let checksumValidStub: any; + let checksumIsValid = true; + + before(() => { + Instance = proxyquire('../src/instance.js', { + '@google-cloud/promisify': fakePromisify, + './app-profile.js': {AppProfile: FakeAppProfile}, + './backup.js': {Backup: FakeBackup}, + './cluster.js': {Cluster: FakeCluster}, + './family.js': {Family: FakeFamily}, + './table.js': {Table: FakeTable}, + pumpify, + }).Instance; + }); + + beforeEach(() => { + responsesRef.responses = []; + requests = []; + instance = new Instance(BIGTABLE, INSTANCE_ID); + checksumIsValid = true; + checksumValidStub = sinon + .stub(SqlValues, 'checksumValid') + .callsFake(() => checksumIsValid); + }); + + afterEach(() => { + sandbox.restore(); + checksumValidStub.restore(); + }); + + describe('execute', () => { + it('parses non-composite types', done => { + const preparedStatement = createPreparedStatement( + ['int64', pbType({int64Type: {}})], + ['float64', pbType({float64Type: {}})], + ['string', pbType({stringType: {}})], + ['bytes', pbType({bytesType: {}})], + ['date', pbType({dateType: {}})], + ['timestamp', pbType({timestampType: {}})], + ['bool', pbType({boolType: {}})], + ); + + responsesRef.setResponses([ + createProtoRows( + 'token1', + 111, + undefined, + {intValue: 1}, + {floatValue: 2.5}, + {stringValue: '3'}, + {bytesValue: new Uint8Array([4, 5, 6])}, + {dateValue: new google.type.Date({year: 2024, month: 0, day: 1})}, + { + timestampValue: new google.protobuf.Timestamp({ + seconds: 1234, + nanos: 5678, + }), + }, + {boolValue: true}, + ), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(metadata!.get(0).type, 'int64'); + assert.strictEqual(metadata!.get(1).type, 'float64'); + assert.strictEqual(metadata!.get(2).type, 'string'); + assert.strictEqual(metadata!.get(3).type, 'bytes'); + assert.strictEqual(metadata!.get(4).type, 'date'); + assert.strictEqual(metadata!.get(5).type, 'timestamp'); + assert.strictEqual(metadata!.get(6).type, 'bool'); + + assert.strictEqual(result![0].get(0), BigInt(1)); + assert.strictEqual(result![0].get(1), 2.5); + assert.strictEqual(result![0].get(2), '3'); + assert.deepEqual(result![0].get(3), new Uint8Array([4, 5, 6])); + assert.deepEqual(result![0].get(4), new BigtableDate(2024, 0, 1)); + assert.deepEqual(result![0].get(5), new PreciseDate([1234, 5678])); + assert.strictEqual(result![0].get(6), true); + done(); + }, + ); + }); + + it('parses multiple rows', done => { + const preparedStatement = createPreparedStatement([ + 'f1', + pbType({int64Type: {}}), + ]); + + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, {intValue: 1}), + createProtoRows('token2', 111, undefined, {intValue: 2}), + createProtoRows('token3', 111, undefined, {intValue: 3}), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(metadata!.get(0), metadata!.get('f1')); + assert.strictEqual(metadata!.get(0).type, 'int64'); + + assert.strictEqual(result![0].get(0), BigInt(1)); + assert.strictEqual(result![1].get(0), BigInt(2)); + assert.strictEqual(result![2].get(0), BigInt(3)); + done(); + }, + ); + }); + + it('handles nulls properly', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + + responsesRef.setResponses([ + createProtoRows(undefined, undefined, undefined, {intValue: 1}), + createProtoRows('token1', 111, undefined, {}), + createProtoRows(undefined, undefined, undefined, {}), + createProtoRows('token2', 111, undefined, {intValue: 2}), + createProtoRows(undefined, undefined, undefined, {}), + createProtoRows(undefined, undefined, undefined, {intValue: 3}), + createProtoRows('token3', 111, undefined), + createProtoRows(undefined, undefined, undefined, {}), + createProtoRows(undefined, undefined, undefined, {}), + createProtoRows('token4', 111, undefined), + createProtoRows(undefined, undefined, undefined, {intValue: 4}), + createProtoRows(undefined, undefined, undefined, {intValue: 5}), + createProtoRows('token5', 111, undefined), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(result?.length, 5); + + assert.strictEqual(result![0].get(0), BigInt(1)); + assert.strictEqual(result![0].get(1), null); + + assert.strictEqual(result![1].get(0), null); + assert.strictEqual(result![1].get(1), BigInt(2)); + + assert.strictEqual(result![2].get(0), null); + assert.strictEqual(result![2].get(1), BigInt(3)); + + assert.strictEqual(result![3].get(0), null); + assert.strictEqual(result![3].get(1), null); + + assert.strictEqual(result![4].get(0), BigInt(4)); + assert.strictEqual(result![4].get(1), BigInt(5)); + + done(); + }, + ); + }); + + it('handles nulls for all types', done => { + const preparedStatement = createPreparedStatement( + ['int64', pbType({int64Type: {}})], + ['float64', pbType({float64Type: {}})], + ['string', pbType({stringType: {}})], + ['bytes', pbType({bytesType: {}})], + ['date', pbType({dateType: {}})], + ['timestamp', pbType({timestampType: {}})], + ['bool', pbType({boolType: {}})], + ['array', pbType({arrayType: {elementType: pbType({int64Type: {}})}})], + [ + 'map', + pbType({ + mapType: { + keyType: pbType({int64Type: {}}), + valueType: pbType({int64Type: {}}), + }, + }), + ], + [ + 'struct', + pbType({ + structType: { + fields: [{fieldName: 'f1', type: pbType({int64Type: {}})}], + }, + }), + ], + [ + 'arrayWithNulls', + pbType({arrayType: {elementType: pbType({int64Type: {}})}}), + ], + [ + 'mapWithNulls', + pbType({ + mapType: { + keyType: pbType({int64Type: {}}), + valueType: pbType({stringType: {}}), + }, + }), + ], + [ + 'structWithNulls', + pbType({ + structType: { + fields: [ + {fieldName: 'f1', type: pbType({int64Type: {}})}, + {fieldName: null, type: pbType({float64Type: {}})}, + {fieldName: 'f3', type: pbType({stringType: {}})}, + ], + }, + }), + ], + ); + responsesRef.setResponses([ + createProtoRows( + 'token1', + 111, + undefined, + {}, + {}, + {}, + {}, + {}, + {}, + {}, + {}, + {}, + {}, + // arrayWithNulls + { + arrayValue: { + values: [{intValue: 1}, {}, {intValue: 3}], + }, + }, + // mapWithNulls + { + arrayValue: { + values: [ + { + arrayValue: { + values: [{intValue: 1}, {}], + }, + }, + { + arrayValue: { + values: [{intValue: 2}, {}], + }, + }, + { + arrayValue: { + values: [{intValue: 3}, {stringValue: 'c'}], + }, + }, + ], + }, + }, + //structWithNulls + { + arrayValue: { + values: [{intValue: 1}, {}, {}], + }, + }, + ), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(result![0].get(0), null); + assert.strictEqual(result![0].get(1), null); + assert.strictEqual(result![0].get(2), null); + assert.strictEqual(result![0].get(3), null); + assert.strictEqual(result![0].get(4), null); + assert.strictEqual(result![0].get(5), null); + assert.strictEqual(result![0].get(6), null); + assert.strictEqual(result![0].get(7), null); + assert.strictEqual(result![0].get(8), null); + assert.strictEqual(result![0].get(9), null); + + const arrayWithNulls = result![0].get(10) as SqlValue[]; + assert.strictEqual(arrayWithNulls[0], BigInt(1)); + assert.strictEqual(arrayWithNulls[1], null); + assert.strictEqual(arrayWithNulls[2], BigInt(3)); + + const mapWithNulls = result![0].get(11) as BigtableMap; + assert.strictEqual(mapWithNulls.size, 3); + assert.strictEqual(mapWithNulls.get(BigInt(1)), null); + assert.strictEqual(mapWithNulls.get(BigInt(2)), null); + assert.strictEqual(mapWithNulls.get(BigInt(3)), 'c'); + + const structWithNulls = result![0].get(12) as Struct; + assert.strictEqual(structWithNulls.get('f1'), BigInt(1)); + assert.strictEqual(structWithNulls.get(1), null); + assert.strictEqual(structWithNulls.get('f3'), null); + + done(); + }, + ); + }); + + it('parses multiple rows in one batch', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + responsesRef.setResponses([ + createProtoRows( + undefined, + undefined, + undefined, + {intValue: 1}, + {intValue: 2}, + ), + createProtoRows( + undefined, + undefined, + undefined, + {intValue: 3}, + {intValue: 4}, + ), + createProtoRows('token1', 111, undefined, {intValue: 5}, {intValue: 6}), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(metadata!.get(0), metadata!.get('f1')); + assert.strictEqual(metadata!.get(1), metadata!.get('f2')); + assert.strictEqual(metadata!.get(0).type, 'int64'); + assert.strictEqual(metadata!.get(1).type, 'int64'); + + assert.strictEqual(result![0].get(0), BigInt(1)); + assert.strictEqual(result![0].get('f1'), BigInt(1)); + assert.strictEqual(result![0].get(1), BigInt(2)); + assert.strictEqual(result![0].get('f2'), BigInt(2)); + + assert.strictEqual(result![1].get(0), BigInt(3)); + assert.strictEqual(result![1].get('f1'), BigInt(3)); + assert.strictEqual(result![1].get(1), BigInt(4)); + assert.strictEqual(result![1].get('f2'), BigInt(4)); + + assert.strictEqual(result![2].get(0), BigInt(5)); + assert.strictEqual(result![2].get('f1'), BigInt(5)); + assert.strictEqual(result![2].get(1), BigInt(6)); + assert.strictEqual(result![2].get('f2'), BigInt(6)); + done(); + }, + ); + }); + + it('parses an array of ints', done => { + const preparedStatement = createPreparedStatement([ + 'f1', + pbType({arrayType: {elementType: pbType({int64Type: {}})}}), + ]); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, { + arrayValue: { + values: [{intValue: 1}, {intValue: 2}, {intValue: 3}], + }, + }), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(metadata!.get(0), metadata!.get('f1')); + assert.strictEqual(metadata!.get(0).type, 'array'); + const arrayType = metadata!.get(0); + assert(arrayType.type === 'array'); + assert.strictEqual(arrayType.elementType.type, 'int64'); + + const structResult = result![0].get('f1') as SqlValue[]; + assert.strictEqual(structResult[0], BigInt(1)); + assert.strictEqual(structResult[1], BigInt(2)); + assert.strictEqual(structResult[2], BigInt(3)); + done(); + }, + ); + }); + + it('parses a struct', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + [ + 'f2', + pbType({ + structType: { + fields: [ + {fieldName: 'f1', type: pbType({int64Type: {}})}, + {fieldName: null, type: pbType({float64Type: {}})}, + {fieldName: 'f3', type: pbType({stringType: {}})}, + ], + }, + }), + ], + ); + responsesRef.setResponses([ + createProtoRows( + 'token1', + 111, + undefined, + {intValue: 1}, + { + arrayValue: { + values: [{intValue: 1}, {floatValue: 2.5}, {stringValue: '3'}], + }, + }, + ), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(metadata!.get(0), metadata!.get('f1')); + assert.strictEqual(metadata!.get(1), metadata!.get('f2')); + assert.strictEqual(metadata!.get(0).type, 'int64'); + const structType = metadata!.get(1); + assert.strictEqual(structType.type, 'struct'); + assert.strictEqual(structType.get('f1').type, 'int64'); + assert.strictEqual(structType.get(1).type, 'float64'); + assert.strictEqual(structType.get('f3').type, 'string'); + + assert.strictEqual(result![0].get(0), BigInt(1)); + assert.strictEqual(result![0].get('f1'), BigInt(1)); + const structResult = result![0].get(1) as Struct; + assert.strictEqual(structResult.get('f1'), structResult.get(0)); + assert.strictEqual(structResult.get('f3'), structResult.get(2)); + + assert.strictEqual(structResult.get(0), BigInt(1)); + assert.strictEqual(structResult.get(1), 2.5); + assert.strictEqual(structResult.get(2), '3'); + done(); + }, + ); + }); + + it('parses a map', done => { + const preparedStatement = createPreparedStatement([ + 'f1', + pbType({ + mapType: { + keyType: pbType({int64Type: {}}), + valueType: pbType({stringType: {}}), + }, + }), + ]); + responsesRef.setResponses([ + createProtoRows(undefined, undefined, undefined, { + arrayValue: { + values: [ + { + arrayValue: { + values: [{intValue: 1}, {stringValue: 'a'}], + }, + }, + { + arrayValue: { + values: [{intValue: 2}, {stringValue: 'b'}], + }, + }, + { + arrayValue: { + values: [{intValue: 3}, {stringValue: 'c'}], + }, + }, + ], + }, + }), + createProtoRows('token2', 111, undefined, { + arrayValue: { + values: [ + { + arrayValue: { + values: [{intValue: 4}, {stringValue: 'd'}], + }, + }, + { + arrayValue: { + values: [{intValue: 5}, {stringValue: 'e'}], + }, + }, + { + arrayValue: { + values: [{intValue: 6}, {stringValue: 'f'}], + }, + }, + ], + }, + }), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + const mapType = metadata!.get(0); + assert.strictEqual(mapType.type, 'map'); + assert.strictEqual(mapType.keyType.type, 'int64'); + assert.strictEqual(mapType.valueType.type, 'string'); + + assert.strictEqual(result?.length, 2); + + const mapResult0 = result![0].get('f1') as BigtableMap; + assert.strictEqual(mapResult0.size, 3); + assert.strictEqual(mapResult0.get(BigInt(1)), 'a'); + assert.strictEqual(mapResult0.get(BigInt(2)), 'b'); + assert.strictEqual(mapResult0.get(BigInt(3)), 'c'); + + const mapResult1 = result![1].get('f1') as BigtableMap; + assert.strictEqual(mapResult1.size, 3); + assert.strictEqual(mapResult1.get(BigInt(4)), 'd'); + assert.strictEqual(mapResult1.get(BigInt(5)), 'e'); + assert.strictEqual(mapResult1.get(BigInt(6)), 'f'); + done(); + }, + ); + }); + + it('map retains last encountered value for duplicate key', done => { + const preparedStatement = createPreparedStatement([ + 'f1', + pbType({ + mapType: { + keyType: pbType({int64Type: {}}), + valueType: pbType({stringType: {}}), + }, + }), + ]); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, { + arrayValue: { + values: [ + { + arrayValue: { + values: [{intValue: 1}, {stringValue: 'a'}], + }, + }, + { + arrayValue: { + values: [{intValue: 2}, {stringValue: 'b'}], + }, + }, + { + arrayValue: { + values: [{intValue: 1}, {stringValue: 'c'}], + }, + }, + ], + }, + }), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + const mapType = metadata!.get(0); + assert.strictEqual(mapType.type, 'map'); + assert.strictEqual(mapType.keyType.type, 'int64'); + assert.strictEqual(mapType.valueType.type, 'string'); + + assert.strictEqual(result?.length, 1); + + const mapResult0 = result![0].get('f1') as BigtableMap; + assert.strictEqual(mapResult0.size, 2); + assert.strictEqual(mapResult0.get(BigInt(1)), 'c'); + assert.strictEqual(mapResult0.get(BigInt(2)), 'b'); + done(); + }, + ); + }); + + it('accessing duplicated struct field throws', done => { + const preparedStatement = createPreparedStatement([ + 'structColumn', + pbType({ + structType: { + fields: [ + {fieldName: 'f1', type: pbType({int64Type: {}})}, + {fieldName: null, type: pbType({float64Type: {}})}, + {fieldName: 'f1', type: pbType({stringType: {}})}, + ], + }, + }), + ]); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, { + arrayValue: { + values: [{intValue: 1}, {floatValue: 2.5}, {stringValue: '3'}], + }, + }), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, result, metadata) => { + assert.strictEqual(metadata!.get(0).type, 'struct'); + + const struct = result![0].get(0) as Struct; + assert.strictEqual(struct.get(0), BigInt(1)); + assert.strictEqual(struct.get(1), 2.5); + assert.strictEqual(struct.get(2), '3'); + + assert.throws(() => { + result![0].get('f1'); + }, Error); + done(); + }, + ); + }); + + it('unsupported kind in metadata is detected', done => { + const type = {kind: 'unknown-type'}; + const BIGTABLE2 = { + projectName: 'projects/my-project2', + projectId: 'my-project2', + request: (req, cb: any) => { + cb!( + null, + createPrepareQueryResponse( + ['f1', pbType({int64Type: {}})], + ['f2', type as any], + ), + ); + }, + } as Bigtable; + const instance2 = new Instance(BIGTABLE2, INSTANCE_ID); + + instance2.prepareStatement('query', (err, result) => { + assert.notStrictEqual(err, null); + assert.ok(err instanceof Error); + done(); + }); + }); + + it('unsupported map key type throws', done => { + const BIGTABLE2 = { + projectName: 'projects/my-project2', + projectId: 'my-project2', + request: (req, cb: any) => { + cb!( + null, + createPrepareQueryResponse([ + 'map', + pbType({ + mapType: { + keyType: pbType({dateType: {}}), + valueType: pbType({int64Type: {}}), + }, + }), + ]), + ); + }, + } as Bigtable; + const instance2 = new Instance(BIGTABLE2, INSTANCE_ID); + instance2.prepareStatement('query', (err, result) => { + assert.notStrictEqual(err, null); + done(); + }); + }); + + it('map with null key is rejected', done => { + const preparedStatement = createPreparedStatement([ + 'map', + pbType({ + mapType: { + keyType: pbType({int64Type: {}}), + valueType: pbType({int64Type: {}}), + }, + }), + ]); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, { + arrayValue: { + values: [ + { + arrayValue: { + values: [{}, {intValue: 1}], + }, + }, + ], + }, + }), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.strictEqual(result?.length, 1); + done(); + }); + }); + + it('map with null value is ok', done => { + const preparedStatement = createPreparedStatement([ + 'map', + pbType({ + mapType: { + keyType: pbType({int64Type: {}}), + valueType: pbType({int64Type: {}}), + }, + }), + ]); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, { + arrayValue: { + values: [ + { + arrayValue: { + values: [{intValue: 1}, {}], + }, + }, + ], + }, + }), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.strictEqual(result?.length, 1); + done(); + }); + }); + + it('bigints are correctly converted to longs', done => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + Object.fromEntries( + Array.from({length: 11}, (_, i) => [ + String.fromCharCode(97 + i), + SqlTypes.Int64(), + ]), + ), // parameter types: {a:INT64, b:INT64, ... } + ); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, {intValue: 1}), + ]); + instance.executeQuery( + { + preparedStatement, + parameters: { + a: BigInt(1), + b: BigInt(-1), + c: BigInt(0), + d: BigInt(Number.MAX_SAFE_INTEGER), + e: BigInt(Number.MIN_SAFE_INTEGER), + f: BigInt('9007199254740992'), // MAX_SAFE_INTEGER + 1 + g: BigInt('-9007199254740992'), // MIN_SAFE_INTEGER - 1 + h: BigInt('1152921504606846976'), // 2^60 + i: BigInt('-1152921504606846976'), // - 2^60 + j: BigInt('9223372036854775807'), // 2^63 - 1 + k: BigInt('-9223372036854775808'), // - 2^63 + }, + } as any, + err => { + assert.equal(err, null); + assert.strictEqual(requests.length, 1); + const reqOpts = requests[0] + .reqOpts as google.bigtable.v2.IExecuteQueryRequest; + + assert.deepEqual(reqOpts.params!['a'].intValue, Long.fromInt(1)); + assert.deepEqual(reqOpts.params!['b'].intValue, Long.fromInt(-1)); + assert.deepEqual(reqOpts.params!['c'].intValue, Long.fromInt(0)); + assert.deepEqual( + reqOpts.params!['d'].intValue, + Long.fromNumber(Number.MAX_SAFE_INTEGER), + ); + assert.deepEqual( + reqOpts.params!['e'].intValue, + Long.fromNumber(Number.MIN_SAFE_INTEGER), + ); + assert.deepEqual( + reqOpts.params!['f'].intValue, + Long.fromString('9007199254740992'), + ); + assert.deepEqual( + reqOpts.params!['g'].intValue, + Long.fromString('-9007199254740992'), + ); + assert.deepEqual( + reqOpts.params!['h'].intValue, + Long.fromString('1152921504606846976'), + ); + assert.deepEqual( + reqOpts.params!['i'].intValue, + Long.fromString('-1152921504606846976'), + ); + assert.deepEqual( + reqOpts.params!['j'].intValue, + Long.fromString('9223372036854775807'), + ); + assert.deepEqual( + reqOpts.params!['k'].intValue, + Long.fromString('-9223372036854775808'), + ); + done(); + }, + ); + }); + + it('value not matching provided type is rejected', () => { + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Int64()}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: 'a'}, + } as any, + () => {}, + ); + }, + {message: 'Value a cannot be converted to int64.'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Float64()}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: BigInt(1)}, + } as any, + () => {}, + ); + }, + {message: 'Value 1 cannot be converted to float64.'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.String()}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: 1}, + } as any, + () => {}, + ); + }, + {message: 'Value 1 cannot be converted to string.'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Bytes()}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: 1}, + } as any, + () => {}, + ); + }, + {message: 'Value 1 cannot be converted to bytes.'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Bool()}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: 1}, + } as any, + () => {}, + ); + }, + {message: 'Value 1 cannot be converted to boolean.'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Timestamp()}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: 1}, + } as any, + () => {}, + ); + }, + { + message: + 'Value 1 cannot be converted to timestamp, please use PreciseDate instead.', + }, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Date()}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: 1}, + } as any, + () => {}, + ); + }, + {message: 'Value 1 cannot be converted to date.'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Array(SqlTypes.Int64())}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: 1}, + } as any, + () => {}, + ); + }, + {message: 'Value 1 cannot be converted to an array.'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Array(SqlTypes.Int64())}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: {a: [1, 'a']}, + } as any, + () => {}, + ); + }, + { + message: + 'Error while converting element 0 of an array: Value 1 cannot be converted to int64 - argument of type INT64 should by passed as BigInt.', + }, + ); + // TS does not permit passing a Struct or a Map as parameters, + // but we want to check it throws an error + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Map(SqlTypes.Int64(), SqlTypes.Int64())}, + ); + instance.executeQuery( + { + preparedStatement, + parameters: { + a: new Map([ + [BigInt(1), 2], + [BigInt(3), 'a'] as any as [bigint, number], + ]), + }, + } as any, + () => {}, + ); + }, + {message: 'Map is not a supported query param type'}, + ); + assert.throws( + () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + { + a: SqlTypes.Struct({ + name: 'f1', + type: SqlTypes.Int64(), + }), + }, + ); + instance.executeQuery( + { + preparedStatement, + parameters: { + a: SqlTypes.Struct({ + name: 'f1', + type: SqlTypes.Int64(), + }), + }, + } as any, + () => {}, + ); + }, + {message: 'Struct is not a supported query param type'}, + ); + }); + + it('null value is accepted', done => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + { + a: SqlTypes.Int64(), + }, + ); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, {intValue: 1}), + ]); + instance.executeQuery( + { + preparedStatement, + parameters: {a: null}, + } as any, + () => { + assert.strictEqual(requests.length, 1); + const reqOpts = requests[0] + .reqOpts as google.bigtable.v2.IExecuteQueryRequest; + + assert.notStrictEqual(reqOpts.params!['a'].type!.int64Type, null); + done(); + }, + ); + }); + + it('parameter type is used for null', done => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + { + a: SqlTypes.Int64(), + b: SqlTypes.Float64(), + c: SqlTypes.Bool(), + d: SqlTypes.Bytes(), + e: SqlTypes.String(), + f: SqlTypes.Date(), + g: SqlTypes.Timestamp(), + h: SqlTypes.Array(SqlTypes.Int64()), + }, + ); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, {intValue: 1}), + ]); + instance.executeQuery( + { + preparedStatement, + parameters: { + a: null, + b: null, + c: null, + d: null, + e: null, + f: null, + g: null, + h: null, + }, + } as any, + () => { + assert.strictEqual(requests.length, 1); + const reqOpts = requests[0] + .reqOpts as google.bigtable.v2.IExecuteQueryRequest; + + assert.notStrictEqual(reqOpts.params!['a'].type!.int64Type, null); + assert.notStrictEqual(reqOpts.params!['b'].type!.float64Type, null); + assert.notStrictEqual(reqOpts.params!['c'].type!.boolType, null); + assert.notStrictEqual(reqOpts.params!['d'].type!.bytesType, null); + assert.notStrictEqual(reqOpts.params!['e'].type!.stringType, null); + assert.notStrictEqual(reqOpts.params!['f'].type!.dateType, null); + assert.notStrictEqual(reqOpts.params!['g'].type!.timestampType, null); + assert.notStrictEqual(reqOpts.params!['h'].type!.arrayType, null); + assert.notStrictEqual( + reqOpts.params!['h'].type!.arrayType?.elementType, + null, + ); + done(); + }, + ); + }); + + it('large bigints are rejected', () => { + const preparedStatement = new PreparedStatement( + BIGTABLE, + createPrepareQueryResponse(['f', pbType({int64Type: {}})]), + {} as any, + {a: SqlTypes.Int64()}, + ); + assert.throws( + () => { + instance.executeQuery( + { + preparedStatement, + parameters: {a: BigInt('-9223372036854775809')}, + } as any, + () => {}, + ); + }, + { + message: + 'Value -9223372036854775809 cannot be converted to int64 - it is out of range.', + }, + ); + assert.throws( + () => { + instance.executeQuery( + { + preparedStatement, + parameters: {a: BigInt('9223372036854775808')}, + } as any, + () => {}, + ); + }, + { + message: + 'Value 9223372036854775808 cannot be converted to int64 - it is out of range.', + }, + ); + }); + + it('duplicate struct field names are not accessible by name', done => { + const preparedStatement = createPreparedStatement([ + 's', + pbType({ + structType: { + fields: [ + {fieldName: 'f1', type: pbType({int64Type: {}})}, + {fieldName: 'f2', type: pbType({int64Type: {}})}, + {fieldName: 'f1', type: pbType({stringType: {}})}, + ], + }, + }), + ]); + responsesRef.setResponses([ + createProtoRows('token1', 111, undefined, { + arrayValue: { + values: [{intValue: 1}, {intValue: 2}, {stringValue: '3'}], + }, + }), + ]); + instance.executeQuery(preparedStatement, (err, rows) => { + const struct = rows![0].get('s')! as Struct; + assert.strictEqual(struct.get(0), BigInt(1)); + assert.strictEqual(struct.get(1), BigInt(2)); + assert.strictEqual(struct.get(2), '3'); + + assert.throws(() => { + struct.get('f1'); + }, Error); + done(); + }); + }); + + it('duplicate row field names are not accessible by name', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ['f1', pbType({int64Type: {}})], + ); + responsesRef.setResponses([ + createProtoRows( + 'token1', + 111, + undefined, + {intValue: 1}, + {intValue: 2}, + {intValue: 3}, + ), + ]); + executeQueryResultWithMetadata( + instance, + preparedStatement, + (err, rows, metadata) => { + const row = rows![0]; + assert.strictEqual(row.get(0), BigInt(1)); + assert.strictEqual(row.get(1), BigInt(2)); + assert.strictEqual(row.get(2), BigInt(3)); + assert.strictEqual(row.get('f2'), BigInt(2)); + + assert.throws(() => { + row.get('f1'); + }, Error); + + assert.strictEqual(metadata!.get(0).type, 'int64'); + assert.strictEqual(metadata!.get(1).type, 'int64'); + assert.strictEqual(metadata!.get(2).type, 'int64'); + assert.strictEqual(metadata!.get('f2').type, 'int64'); + + assert.throws(() => { + metadata!.get('f1'); + }, Error); + + done(); + }, + ); + }); + + it('unfinished batch is detected', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + responsesRef.setResponses([ + createProtoRows(undefined, undefined, undefined, {intValue: 3}), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.notStrictEqual(err, null); + assert.ok(err instanceof Error); + done(); + }); + }); + + it('token without batch ending detected', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + responsesRef.setResponses([ + createProtoRows('token', undefined, undefined, {intValue: 3}), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.notStrictEqual(err, null); + assert.ok(err instanceof Error); + done(); + }); + }); + + it('reset works', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + + const respWithReset1 = createProtoRows(undefined, undefined, undefined, { + intValue: 1, + }); + respWithReset1.results!.reset = true; + + const respWithReset2 = createProtoRows( + undefined, + 111, + undefined, + {intValue: 3}, + {intValue: 4}, + ); + respWithReset2.results!.reset = true; + + responsesRef.setResponses([ + createProtoRows( + undefined, + undefined, + undefined, + {intValue: 1}, + {intValue: 2}, + ), + respWithReset1, + respWithReset2, + createProtoRows('token', 222, undefined, {intValue: 5}, {intValue: 6}), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.equal(err, null); + assert.strictEqual(result![0].get(0), BigInt(3)); + assert.strictEqual(result![0].get(1), BigInt(4)); + assert.strictEqual(result![1].get(0), BigInt(5)); + assert.strictEqual(result![1].get(1), BigInt(6)); + done(); + }); + }); + + it('partial row after token detected', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + responsesRef.setResponses([ + createProtoRows( + 'token1', + 111, + undefined, + {intValue: 1}, + {intValue: 2}, + {intValue: 3}, + ), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.notStrictEqual(err, null); + assert.ok(err instanceof Error); + done(); + }); + }); + + it('partial row after batch checksum detected', done => { + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + responsesRef.setResponses([ + createProtoRows( + undefined, + 111, + undefined, + {intValue: 1}, + {intValue: 2}, + {intValue: 3}, + ), + createProtoRows('token1', 222, undefined, {intValue: 4}), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.notStrictEqual(err, null); + assert.ok(err instanceof Error); + done(); + }); + }); + + it('cheksum fail detected', done => { + checksumIsValid = false; + const preparedStatement = createPreparedStatement( + ['f1', pbType({int64Type: {}})], + ['f2', pbType({int64Type: {}})], + ); + responsesRef.setResponses([ + createProtoRows( + undefined, + 111, + undefined, + {intValue: 1}, + {intValue: 2}, + ), + createProtoRows('token1', 222, undefined, {intValue: 3}, {intValue: 4}), + ]); + instance.executeQuery(preparedStatement, (err, result) => { + assert.notStrictEqual(err, null); + assert.ok(err instanceof Error); + done(); + }); + }); + }); +}); diff --git a/test/metric-service-client-credentials.ts b/test/metric-service-client-credentials.ts new file mode 100644 index 000000000..edd8001eb --- /dev/null +++ b/test/metric-service-client-credentials.ts @@ -0,0 +1,85 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as proxyquire from 'proxyquire'; +import {ClientOptions, grpc} from 'google-gax'; +import * as assert from 'assert'; +import {setupBigtable} from '../system-test/client-side-metrics-setup-table'; +import {MetricServiceClient} from '@google-cloud/monitoring'; + +describe('Bigtable/MetricServiceClientCredentials', () => { + it('should pass the credentials to the exporter', done => { + const clientOptions = { + metricsEnabled: true, + sslCreds: grpc.credentials.createInsecure(), + }; + class FakeExporter { + constructor(options: ClientOptions) { + try { + assert.strictEqual(options, clientOptions); + done(); + } catch (e) { + done(e); + } + } + } + const FakeCGPMetricsHandler = proxyquire( + '../src/client-side-metrics/gcp-metrics-handler.js', + { + './exporter': { + CloudMonitoringExporter: FakeExporter, + }, + }, + ).GCPMetricsHandler; + const FakeBigtable = proxyquire('../src/index.js', { + './client-side-metrics/gcp-metrics-handler': { + GCPMetricsHandler: FakeCGPMetricsHandler, + }, + }).Bigtable; + new FakeBigtable(clientOptions); + }); + it('should use second project for the metric service client', async () => { + const SECOND_PROJECT_ID = 'second-project-id'; + const clientOptions = {metricsEnabled: true, projectId: SECOND_PROJECT_ID}; + let savedOptions: ClientOptions = {}; + class FakeExporter { + constructor(options: ClientOptions) { + savedOptions = options; + } + } + const FakeCGPMetricsHandler = proxyquire( + '../src/client-side-metrics/gcp-metrics-handler.js', + { + './exporter': { + CloudMonitoringExporter: FakeExporter, + }, + }, + ).GCPMetricsHandler; + const FakeBigtable = proxyquire('../src/index.js', { + './client-side-metrics/gcp-metrics-handler': { + GCPMetricsHandler: FakeCGPMetricsHandler, + }, + }).Bigtable; + new FakeBigtable(clientOptions); + // savedOptions are the options passed down to the exporter + // we want to ensure that when the second project id is provided to the + // fake client that this sends savedOptions to the exporter that then + // fetches the right projectId when the saved options are provided to the + // MetricsServiceClient as this is required to save the metrics to the right + // project. + const client = new MetricServiceClient(savedOptions); + const projectIdUsed = await client.getProjectId(); + assert.strictEqual(projectIdUsed, SECOND_PROJECT_ID); + }); +}); diff --git a/test/metrics-collector/gcp-metrics-handler.ts b/test/metrics-collector/gcp-metrics-handler.ts index 655bdd78d..c3dddb086 100644 --- a/test/metrics-collector/gcp-metrics-handler.ts +++ b/test/metrics-collector/gcp-metrics-handler.ts @@ -18,7 +18,6 @@ import { ExportResult, metricsToRequest, } from '../../src/client-side-metrics/exporter'; -import {GCPMetricsHandler} from '../../src/client-side-metrics/gcp-metrics-handler'; import {MetricExporter} from '@google-cloud/opentelemetry-cloud-monitoring-exporter'; import {expectedRequestsHandled} from '../../test-common/metrics-handler-fixture'; import { @@ -31,6 +30,37 @@ import { } from '../../test-common/expected-otel-export-input'; import * as assert from 'assert'; import {replaceTimestamps} from '../../test-common/replace-timestamps'; +import * as proxyquire from 'proxyquire'; + +/** + * Cleans a ResourceMetrics object by replacing client UUIDs with a placeholder. + * + * This function creates a deep copy of the input ResourceMetrics object and + * then iterates through its metrics, replacing any existing client_uid attribute + * in the data points with the string 'fake-uuid'. This is primarily used in + * testing to ensure consistent metric output by removing the variability of + * randomly generated client UUIDs. + * + * @param {ResourceMetrics} metrics The ResourceMetrics object to clean. + * @returns {ResourceMetrics} A new ResourceMetrics object with client UUIDs replaced by 'fake-uuid'. + */ +function cleanMetrics(metrics: ResourceMetrics): ResourceMetrics { + const newMetrics = JSON.parse(JSON.stringify(metrics)); // Deep copy to avoid modifying the original object + + newMetrics.scopeMetrics.forEach((scopeMetric: any) => { + scopeMetric.metrics.forEach((metric: any) => { + if (metric.dataPoints) { + metric.dataPoints.forEach((dataPoint: any) => { + if (dataPoint.attributes && dataPoint.attributes.client_uid) { + dataPoint.attributes.client_uid = 'fake-uuid'; + } + }); + } + }); + }); + + return newMetrics; +} describe('Bigtable/GCPMetricsHandler', () => { it('Should export a value ready for sending to the CloudMonitoringExporter', function (done) { @@ -50,6 +80,10 @@ describe('Bigtable/GCPMetricsHandler', () => { let exported = false; class TestExporter extends MetricExporter { + constructor() { + super(); + } + export( metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void, @@ -57,6 +91,7 @@ describe('Bigtable/GCPMetricsHandler', () => { if (!exported) { exported = true; try { + metrics = cleanMetrics(metrics); replaceTimestamps( metrics as unknown as typeof expectedOtelExportInput, [123, 789], @@ -84,7 +119,10 @@ describe('Bigtable/GCPMetricsHandler', () => { JSON.parse(JSON.stringify(metrics)), expectedOtelExportInput, ); - const convertedRequest = metricsToRequest(parsedExportInput); + const convertedRequest = metricsToRequest( + 'my-project', + parsedExportInput, + ); assert.deepStrictEqual( convertedRequest.timeSeries.length, expectedOtelExportConvertedValue.timeSeries.length, @@ -113,10 +151,17 @@ describe('Bigtable/GCPMetricsHandler', () => { } } } + const stubs = { + './exporter': { + CloudMonitoringExporter: TestExporter, + }, + }; + const FakeMetricsHandler = proxyquire( + '../../src/client-side-metrics/gcp-metrics-handler.js', + stubs, + ).GCPMetricsHandler; - const handler = new GCPMetricsHandler( - new TestExporter({projectId: 'some-project'}), - ); + const handler = new FakeMetricsHandler('my-project'); for (const request of expectedRequestsHandled) { if (request.attemptLatency) { diff --git a/test/metrics-collector/metrics-collector.ts b/test/metrics-collector/metrics-collector.ts index c712ecb0b..626c254c0 100644 --- a/test/metrics-collector/metrics-collector.ts +++ b/test/metrics-collector/metrics-collector.ts @@ -22,21 +22,27 @@ import { } from '../../src/client-side-metrics/client-side-metrics-attributes'; import {grpc} from 'google-gax'; import {expectedRequestsHandled} from '../../test-common/metrics-handler-fixture'; +import * as path from 'path'; // Import the 'path' module import * as gax from 'google-gax'; import * as proxyquire from 'proxyquire'; -const root = gax.protobuf.loadSync( - './protos/google/bigtable/v2/response_params.proto', +import {GCPMetricsHandler} from '../../src/client-side-metrics/gcp-metrics-handler'; +import {ResourceMetrics} from '@opentelemetry/sdk-metrics'; +const protoPath = path.join( + __dirname, + '../../protos/google/bigtable/v2/response_params.proto', ); +const root = gax.protobuf.loadSync(protoPath); const ResponseParams = root.lookupType('ResponseParams'); +const projectId = 'my-project'; + /** * A fake implementation of the Bigtable client for testing purposes. Provides a * metricsTracerFactory and a stubbed projectId method. */ class FakeBigtable { - clientUid = 'fake-uuid'; appProfileId?: string; - projectId = 'my-project'; + projectId = projectId; } /** @@ -49,6 +55,11 @@ class FakeInstance { id = 'fakeInstanceId'; } +const logger = {value: ''}; +const testHandler = new TestMetricsHandler(); +testHandler.projectId = projectId; +testHandler.messages = logger; + describe('Bigtable/MetricsCollector', () => { class FakeHRTime { startTime = BigInt(0); @@ -63,17 +74,16 @@ describe('Bigtable/MetricsCollector', () => { 'node:process': { hrtime: new FakeHRTime(), }, + './gcp-metrics-handler': { + GCPMetricsHandler: testHandler, + }, }; const FakeOperationsMetricsCollector = proxyquire( '../../src/client-side-metrics/operation-metrics-collector.js', stubs, ).OperationMetricsCollector; - const logger = {value: ''}; - it('should record the right metrics with a typical method call', async () => { - const testHandler = new TestMetricsHandler(logger); - const metricsHandlers = [testHandler]; class FakeTable { id = 'fakeTableId'; instance = new FakeInstance(); @@ -107,9 +117,9 @@ describe('Bigtable/MetricsCollector', () => { }; const metricsCollector = new FakeOperationsMetricsCollector( this, - metricsHandlers, MethodName.READ_ROWS, StreamingState.STREAMING, + [testHandler as unknown as GCPMetricsHandler], ); // In this method we simulate a series of events that might happen // when a user calls one of the Table methods. @@ -133,10 +143,7 @@ describe('Bigtable/MetricsCollector', () => { logger.value += '9. User receives second row.\n'; metricsCollector.onRowReachesUser(); logger.value += '10. A transient error occurs.\n'; - metricsCollector.onAttemptComplete( - this.bigtable.projectId, - grpc.status.DEADLINE_EXCEEDED, - ); + metricsCollector.onAttemptComplete(grpc.status.DEADLINE_EXCEEDED); logger.value += '11. After a timeout, the second attempt is made.\n'; metricsCollector.onAttemptStart(); logger.value += '12. Client receives status information.\n'; @@ -155,14 +162,7 @@ describe('Bigtable/MetricsCollector', () => { metricsCollector.onRowReachesUser(); logger.value += '19. User reads row 1\n'; logger.value += '20. Stream ends, operation completes\n'; - metricsCollector.onAttemptComplete( - this.bigtable.projectId, - grpc.status.OK, - ); - metricsCollector.onOperationComplete( - this.bigtable.projectId, - grpc.status.OK, - ); + metricsCollector.onOperationComplete(grpc.status.OK); } } } diff --git a/test/metrics-collector/metricsToRequest.ts b/test/metrics-collector/metricsToRequest.ts index cd71a36fd..fcba50ab2 100644 --- a/test/metrics-collector/metricsToRequest.ts +++ b/test/metrics-collector/metricsToRequest.ts @@ -23,7 +23,9 @@ import {ResourceMetrics} from '@opentelemetry/sdk-metrics'; describe('Bigtable/metricsToRequest', () => { it('Converts an otel request to a request ready for the metric service client', () => { + const projectId = 'my-project'; const convertedValue = metricsToRequest( + projectId, expectedOtelExportInput as unknown as ResourceMetrics, ); assert.deepStrictEqual( diff --git a/test/metrics-collector/typical-method-call.txt b/test/metrics-collector/typical-method-call.txt index 917dcd0c6..17a134445 100644 --- a/test/metrics-collector/typical-method-call.txt +++ b/test/metrics-collector/typical-method-call.txt @@ -15,7 +15,7 @@ getDate call returns 5000 ms 10. A transient error occurs. getDate call returns 6000 ms Recording parameters for onAttemptComplete: -{"attemptLatency":4000,"serverLatency":101,"connectivityErrorCount":0,"streaming":"true","status":"4","client_name":"nodejs-bigtable","metricsCollectorData":{"instanceId":"fakeInstanceId","table":"fakeTableId","cluster":"fake-cluster3","zone":"us-west1-c","method":"Bigtable.ReadRows","client_uid":"fake-uuid"},"projectId":"my-project"} +{"projectId":"my-project","attemptLatency":4000,"serverLatency":101,"connectivityErrorCount":0,"streaming":"true","status":"4","client_name":"nodejs-bigtable","metricsCollectorData":{"instanceId":"fakeInstanceId","table":"fakeTableId","cluster":"fake-cluster3","zone":"us-west1-c","method":"Bigtable.ReadRows"}} 11. After a timeout, the second attempt is made. getDate call returns 7000 ms 12. Client receives status information. @@ -31,7 +31,7 @@ getDate call returns 9000 ms 20. Stream ends, operation completes getDate call returns 10000 ms Recording parameters for onAttemptComplete: -{"attemptLatency":3000,"serverLatency":103,"connectivityErrorCount":0,"streaming":"true","status":"0","client_name":"nodejs-bigtable","metricsCollectorData":{"instanceId":"fakeInstanceId","table":"fakeTableId","cluster":"fake-cluster3","zone":"us-west1-c","method":"Bigtable.ReadRows","client_uid":"fake-uuid"},"projectId":"my-project"} +{"projectId":"my-project","attemptLatency":3000,"serverLatency":103,"connectivityErrorCount":0,"streaming":"true","status":"0","client_name":"nodejs-bigtable","metricsCollectorData":{"instanceId":"fakeInstanceId","table":"fakeTableId","cluster":"fake-cluster3","zone":"us-west1-c","method":"Bigtable.ReadRows"}} getDate call returns 11000 ms Recording parameters for onOperationComplete: -{"status":"0","streaming":"true","metricsCollectorData":{"instanceId":"fakeInstanceId","table":"fakeTableId","cluster":"fake-cluster3","zone":"us-west1-c","method":"Bigtable.ReadRows","client_uid":"fake-uuid"},"client_name":"nodejs-bigtable","projectId":"my-project","operationLatency":10000,"retryCount":1,"firstResponseLatency":2000,"applicationLatencies":[1000,1000]} +{"projectId":"my-project","status":"0","streaming":"true","metricsCollectorData":{"instanceId":"fakeInstanceId","table":"fakeTableId","cluster":"fake-cluster3","zone":"us-west1-c","method":"Bigtable.ReadRows"},"client_name":"nodejs-bigtable","operationLatency":10000,"retryCount":1,"firstResponseLatency":2000,"applicationLatencies":[1000,1000]} diff --git a/test/row.ts b/test/row.ts index 35bd985bc..9e4833239 100644 --- a/test/row.ts +++ b/test/row.ts @@ -19,9 +19,19 @@ import * as proxyquire from 'proxyquire'; import * as sinon from 'sinon'; import {Mutation} from '../src/mutation.js'; import * as rw from '../src/row'; -import {Table, Entry} from '../src/table.js'; +import { + Table, + Entry, + GetRowsOptions, + GetRowsCallback, + GetRowsResponse, +} from '../src/table.js'; import {Chunk} from '../src/chunktransformer.js'; -import {CallOptions} from 'google-gax'; +import {CallOptions, ServiceError} from 'google-gax'; +import {ClientSideMetricsConfigManager} from '../src/client-side-metrics/metrics-config-manager'; +import {Bigtable} from '../src/'; +import {getRowsInternal} from '../src/utils/getRowsInternal'; +import {TabularApiSurface} from '../src/tabular-api-surface'; const sandbox = sinon.createSandbox(); @@ -78,19 +88,36 @@ describe('Bigtable/Row', () => { let RowError: typeof rw.RowError; let row: rw.Row; - before(() => { + function getFakeRow( + getRowsInternal: ( + table: TabularApiSurface, + singleRow: boolean, + optionsOrCallback?: GetRowsOptions | GetRowsCallback, + cb?: GetRowsCallback, + ) => void | Promise, + ) { const Fake = proxyquire('../src/row.js', { '@google-cloud/promisify': fakePromisify, './mutation.js': {Mutation: FakeMutation}, './filter.js': {Filter: FakeFilter}, './row-data-utils.js': {RowDataUtils: FakeRowDataUtil}, + './utils/getRowsInternal': { + getRowsInternal, + }, }); - Row = Fake.Row; RowError = Fake.RowError; + return Fake; + } + + before(() => { + const Fake = getFakeRow(() => {}); + Row = Fake.Row; }); beforeEach(() => { row = new Row(TABLE, ROW_ID); + row.table.bigtable._metricsConfigManager = + new ClientSideMetricsConfigManager([]); }); afterEach(() => { @@ -997,15 +1024,48 @@ describe('Bigtable/Row', () => { }); describe('get', () => { + function getRowInstance( + fn: (reqOpts: any) => void | Promise, + ) { + const getRowsInternal = ( + table: TabularApiSurface, + singleRow: boolean, + optionsOrCallback?: GetRowsOptions | GetRowsCallback, + cb?: GetRowsCallback, + ) => { + return fn(optionsOrCallback); + }; + const Fake = getFakeRow(getRowsInternal); + Row = Fake.Row; + row = new Row(TABLE, ROW_ID); + return row; + } + + function getRowInstanceForErrResp(err: ServiceError | null, resp?: any[]) { + const getRowsInternal = ( + table: TabularApiSurface, + singleRow: boolean, + optionsOrCallback?: GetRowsOptions | GetRowsCallback, + cb?: GetRowsCallback, + ) => { + if (cb) { + cb(err, resp); + } + }; + const Fake = getFakeRow(getRowsInternal); + Row = Fake.Row; + row = new Row(TABLE, ROW_ID); + return row; + } it('should provide the proper request options', done => { // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.strictEqual(reqOpts.keys[0], ROW_ID); assert.strictEqual(reqOpts.filter, undefined); assert.strictEqual(FakeMutation.parseColumnName.callCount, 0); done(); }; - + const row = getRowInstance(fn); row.get(assert.ifError); }); @@ -1022,12 +1082,13 @@ describe('Bigtable/Row', () => { ]; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.deepStrictEqual(reqOpts.filter, expectedFilter); assert.strictEqual(FakeMutation.parseColumnName.callCount, 1); assert(FakeMutation.parseColumnName.calledWith(keys[0])); done(); }; + const row = getRowInstance(fn); row.get(keys, assert.ifError); }); @@ -1058,7 +1119,7 @@ describe('Bigtable/Row', () => { ]; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.deepStrictEqual(reqOpts.filter, expectedFilter); const spy = FakeMutation.parseColumnName; @@ -1068,6 +1129,7 @@ describe('Bigtable/Row', () => { assert.strictEqual(spy.getCall(1).args[0], keys[1]); done(); }; + const row = getRowInstance(fn); row.get(keys, assert.ifError); }); @@ -1082,12 +1144,13 @@ describe('Bigtable/Row', () => { ]; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.deepStrictEqual(reqOpts.filter, expectedFilter); assert.strictEqual(FakeMutation.parseColumnName.callCount, 1); assert(FakeMutation.parseColumnName.calledWith(keys[0])); done(); }; + const row = getRowInstance(fn); row.get(keys, assert.ifError); }); @@ -1121,13 +1184,14 @@ describe('Bigtable/Row', () => { ]; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.deepStrictEqual(reqOpts.filter, expectedFilter); assert.strictEqual(FakeMutation.parseColumnName.callCount, 1); assert(FakeMutation.parseColumnName.calledWith(keys[0])); assert.strictEqual(reqOpts.decode, options.decode); done(); }; + const row = getRowInstance(fn); row.get(keys, options, assert.ifError); }); @@ -1175,13 +1239,14 @@ describe('Bigtable/Row', () => { ]; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.deepStrictEqual(reqOpts.filter, expectedFilter); assert.strictEqual(FakeMutation.parseColumnName.callCount, 2); assert(FakeMutation.parseColumnName.calledWith(keys[0])); assert.strictEqual(reqOpts.decode, options.decode); done(); }; + const row = getRowInstance(fn); row.get(keys, options, assert.ifError); }); @@ -1196,10 +1261,11 @@ describe('Bigtable/Row', () => { const expectedFilter = options.filter; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.deepStrictEqual(reqOpts.filter, expectedFilter); done(); }; + const row = getRowInstance(fn); row.get(keys, options, assert.ifError); }); @@ -1210,18 +1276,19 @@ describe('Bigtable/Row', () => { }; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (row.table.getRows as Function) = (reqOpts: any) => { + const fn = (reqOpts: any) => { assert.strictEqual(reqOpts.decode, options.decode); assert(!reqOpts.filter); done(); }; + const row = getRowInstance(fn); row.get(options, assert.ifError); }); it('should return an error to the callback', done => { const error = new Error('err'); - sandbox.stub(row.table, 'getRows').callsArgWith(1, error); + const row = getRowInstanceForErrResp(error as ServiceError); row.get((err, row) => { assert.strictEqual(error, err); assert.strictEqual(row, undefined); @@ -1230,7 +1297,7 @@ describe('Bigtable/Row', () => { }); it('should return a custom error if the row is not found', done => { - sandbox.stub(row.table, 'getRows').callsArgWith(1, null, []); + const row = getRowInstanceForErrResp(null, []); row.get((err, row_) => { assert(err instanceof RowError); assert.strictEqual(err!.message, 'Unknown row: ' + row.id + '.'); @@ -1245,7 +1312,7 @@ describe('Bigtable/Row', () => { a: 'a', b: 'b', }; - sandbox.stub(row.table, 'getRows').callsArgWith(1, null, [fakeRow]); + const row = getRowInstanceForErrResp(null, [fakeRow]); row.get((err, row_) => { assert.ifError(err); assert.strictEqual(row_, row); @@ -1263,11 +1330,11 @@ describe('Bigtable/Row', () => { }; const keys = ['a', 'b']; + const row = getRowInstanceForErrResp(null, [fakeRow]); row.data = { c: 'c', }; - sandbox.stub(row.table, 'getRows').callsArgWith(1, null, [fakeRow]); row.get(keys, (err, data) => { assert.ifError(err); assert.deepStrictEqual(Object.keys(data), keys); diff --git a/test/table.ts b/test/table.ts index a1a28282a..913e77c12 100644 --- a/test/table.ts +++ b/test/table.ts @@ -30,6 +30,11 @@ import * as tblTypes from '../src/table'; import {Bigtable, RequestOptions} from '../src'; import {EventEmitter} from 'events'; import {TableUtils} from '../src/utils/table'; +import {ClientSideMetricsConfigManager} from '../src/client-side-metrics/metrics-config-manager'; +import {OperationMetricsCollector} from '../src/client-side-metrics/operation-metrics-collector'; +import {SinonSpy} from 'sinon'; +import {TabularApiSurface} from '../src/tabular-api-surface'; +import {GetRowsOptions} from '../src/table'; const sandbox = sinon.createSandbox(); const noop = () => {}; @@ -59,6 +64,24 @@ function createFake(klass: any) { }; } +class FakeMetricsCollector { + onOperationStart() {} + onOperationComplete() {} + onResponse() {} + onAttemptStart() {} + onAttemptComplete() {} + onMetadataReceived() {} + handleStatusAndMetadata() {} + onStatusMetadataReceived() {} + onRowReachesUser() {} +} + +class FakeMetricsConfigManager extends ClientSideMetricsConfigManager { + createOperation() { + return new FakeMetricsCollector() as unknown as OperationMetricsCollector; + } +} + const FakeFamily = createFake(Family); FakeFamily.formatRule_ = sinon.spy(rule => rule); @@ -100,6 +123,43 @@ const FakeFilter = { }, }; +function getTableMock( + createReadStreamInternal: ( + table: TabularApiSurface, + singleRow: boolean, + opts?: GetRowsOptions, + ) => PassThrough, +) { + const FakeGetRows = proxyquire('../src/utils/getRowsInternal.js', { + './createReadStreamInternal': { + createReadStreamInternal: createReadStreamInternal, + }, + }); + const FakeTabularApiSurface = proxyquire('../src/tabular-api-surface.js', { + '@google-cloud/promisify': fakePromisify, + './family.js': {Family: FakeFamily}, + './mutation.js': {Mutation: FakeMutation}, + './filter.js': {Filter: FakeFilter}, + pumpify, + './row.js': {Row: FakeRow}, + './chunktransformer.js': {ChunkTransformer: FakeChunkTransformer}, + './utils/createReadStreamInternal': { + createReadStreamInternal, + }, + './utils/getRowsInternal': { + getRowsInternal: FakeGetRows.getRowsInternal, + }, + }).TabularApiSurface; + const Table = proxyquire('../src/table.js', { + '@google-cloud/promisify': fakePromisify, + './family.js': {Family: FakeFamily}, + './mutation.js': {Mutation: FakeMutation}, + './row.js': {Row: FakeRow}, + './tabular-api-surface': {TabularApiSurface: FakeTabularApiSurface}, + }).Table; + return Table; +} + describe('Bigtable/Table', () => { const TABLE_ID = 'my-table'; let INSTANCE: inst.Instance; @@ -110,27 +170,26 @@ describe('Bigtable/Table', () => { let table: any; before(() => { - const FakeTabularApiSurface = proxyquire('../src/tabular-api-surface.js', { - '@google-cloud/promisify': fakePromisify, - './family.js': {Family: FakeFamily}, - './mutation.js': {Mutation: FakeMutation}, - './filter.js': {Filter: FakeFilter}, - pumpify, - './row.js': {Row: FakeRow}, - './chunktransformer.js': {ChunkTransformer: FakeChunkTransformer}, - }).TabularApiSurface; - Table = proxyquire('../src/table.js', { - '@google-cloud/promisify': fakePromisify, - './family.js': {Family: FakeFamily}, - './mutation.js': {Mutation: FakeMutation}, - './row.js': {Row: FakeRow}, - './tabular-api-surface': {TabularApiSurface: FakeTabularApiSurface}, - }).Table; + const FakeCreateReadStreamInternal = proxyquire( + '../src/utils/createReadStreamInternal.js', + { + '../row.js': {Row: FakeRow}, + '../chunktransformer.js': {ChunkTransformer: FakeChunkTransformer}, + '../filter.js': {Filter: FakeFilter}, + '../mutation.js': {Mutation: FakeMutation}, + pumpify, + }, + ).createReadStreamInternal; + Table = getTableMock(FakeCreateReadStreamInternal); }); beforeEach(() => { INSTANCE = { - bigtable: {} as Bigtable, + bigtable: { + _metricsConfigManager: new FakeMetricsConfigManager( + [], + ) as ClientSideMetricsConfigManager, + } as Bigtable, name: 'a/b/c/d', } as inst.Instance; TABLE_NAME = INSTANCE.name + '/tables/' + TABLE_ID; @@ -2301,13 +2360,14 @@ describe('Bigtable/Table', () => { describe('getRows', () => { describe('success', () => { + let createReadStreamInternal: SinonSpy<[], PassThrough>; const fakeRows = [ {key: 'c', data: {}}, {key: 'd', data: {}}, ]; beforeEach(() => { - table.createReadStream = sinon.spy(() => { + createReadStreamInternal = sinon.spy(() => { const stream = new PassThrough({ objectMode: true, }); @@ -2322,6 +2382,17 @@ describe('Bigtable/Table', () => { return stream; }); + Table = getTableMock(createReadStreamInternal); + INSTANCE = { + bigtable: { + _metricsConfigManager: new FakeMetricsConfigManager( + [], + ) as ClientSideMetricsConfigManager, + } as Bigtable, + name: 'a/b/c/d', + } as inst.Instance; + TABLE_NAME = INSTANCE.name + '/tables/' + TABLE_ID; + table = new Table(INSTANCE, TABLE_ID); }); it('should return the rows to the callback', done => { @@ -2332,8 +2403,8 @@ describe('Bigtable/Table', () => { assert.deepStrictEqual(rows, fakeRows); // eslint-disable-next-line @typescript-eslint/no-explicit-any - const spy = (table as any).createReadStream.getCall(0); - assert.strictEqual(spy.args[0], options); + const spy = createReadStreamInternal.getCall(0); + assert.strictEqual((spy.args as any)[2], options); done(); }); }); @@ -2348,10 +2419,11 @@ describe('Bigtable/Table', () => { }); describe('error', () => { + let createReadStreamInternal: SinonSpy<[], PassThrough>; const error = new Error('err'); beforeEach(() => { - table.createReadStream = sinon.spy(() => { + createReadStreamInternal = sinon.spy(() => { const stream = new PassThrough({ objectMode: true, }); @@ -2362,6 +2434,17 @@ describe('Bigtable/Table', () => { return stream; }); + Table = getTableMock(createReadStreamInternal); + INSTANCE = { + bigtable: { + _metricsConfigManager: new FakeMetricsConfigManager( + [], + ) as ClientSideMetricsConfigManager, + } as Bigtable, + name: 'a/b/c/d', + } as inst.Instance; + TABLE_NAME = INSTANCE.name + '/tables/' + TABLE_ID; + table = new Table(INSTANCE, TABLE_ID); }); it('should return the error to the callback', done => { diff --git a/test/timed-stream.ts b/test/timed-stream.ts new file mode 100644 index 000000000..c58cf6552 --- /dev/null +++ b/test/timed-stream.ts @@ -0,0 +1,344 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {describe, it} from 'mocha'; +import {PassThrough, Readable} from 'stream'; +import {TimedStream} from '../src/timed-stream'; +import * as assert from 'assert'; + +// set up streams +function* numberGenerator(n: number) { + for (let i = 0; i < n; i++) { + yield String(i) + '\n'; + } +} + +describe('Bigtable/TimedStream', () => { + describe('with handlers', () => { + describe('with no delay from server', () => { + it('should measure the total time accurately for a series of 30 rows with a synchronous call', function (done) { + this.timeout(200000); + const sourceStream = Readable.from(numberGenerator(30)); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // iterate stream + timedStream.on('data', async (chunk: any) => { + // Simulate 1 second of busy work + const startTime = Date.now(); + while (Date.now() - startTime < 1000) { + /* empty */ + } + }); + timedStream.on('end', () => { + const totalMilliseconds = timedStream.getTotalDurationMs(); + try { + assert(totalMilliseconds > 29000); + assert(totalMilliseconds < 31000); + done(); + } catch (e) { + done(e); + } + }); + }); + it('should measure the total time accurately for a series of 30 rows with an async call', function (done) { + this.timeout(200000); + const sourceStream = Readable.from(numberGenerator(30)); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // iterate stream + timedStream.on('data', async (chunk: any) => { + // Simulate 1 second of busy work + const sleep = (ms: number) => + new Promise(resolve => setTimeout(resolve, ms)); + await sleep(1000); + }); + timedStream.on('end', () => { + const totalMilliseconds = timedStream.getTotalDurationMs(); + try { + assert(totalMilliseconds < 500); + done(); + } catch (e) { + done(e); + } + }); + }); + it('should measure the total time accurately for a series of 30 rows with a sync then an async call', function (done) { + this.timeout(200000); + const sourceStream = Readable.from(numberGenerator(30)); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // iterate stream + timedStream.on('data', async (chunk: any) => { + const startTime = Date.now(); + // Simulate 1 second of busy work + while (Date.now() - startTime < 1000) { + /* empty */ + } + const sleep = (ms: number) => + new Promise(resolve => setTimeout(resolve, ms)); + await sleep(1000); + }); + timedStream.on('end', () => { + const totalMilliseconds = timedStream.getTotalDurationMs(); + try { + assert(totalMilliseconds < 32000); + assert(totalMilliseconds > 28000); + done(); + } catch (e) { + done(e); + } + }); + }); + it('should measure the total time accurately for a series of 30 rows with an async call then a sync call', function (done) { + this.timeout(200000); + const sourceStream = Readable.from(numberGenerator(30)); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // iterate stream + timedStream.on('data', async (chunk: any) => { + const startTime = Date.now(); + // Simulate 1 second of busy work + const sleep = (ms: number) => + new Promise(resolve => setTimeout(resolve, ms)); + await sleep(1000); + // Simulate 1 second of busy work + while (Date.now() - startTime < 1000) { + /* empty */ + } + }); + timedStream.on('end', () => { + const totalMilliseconds = timedStream.getTotalDurationMs(); + try { + assert(totalMilliseconds < 500); + done(); + } catch (e) { + done(e); + } + }); + }); + }); + describe('with delay from server', () => { + it('should measure the total time accurately for a series of 10 rows', function (done) { + this.timeout(200000); + const dataEvents = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map(i => + i.toString(), + ); + const sourceStream = new PassThrough(); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream); + + setTimeout(async () => { + // iterate stream + timedStream.on('data', async (chunk: any) => { + // Simulate 1 second of busy work + const startTime = Date.now(); + while (Date.now() - startTime < 1000) { + /* empty */ + } + }); + timedStream.on('end', () => { + clearInterval(interval); + // print results + try { + const totalMilliseconds = timedStream.getTotalDurationMs(); + // totalMilliseconds should be around 10 seconds, 1 per row + assert(totalMilliseconds > 9000); + assert(totalMilliseconds < 11000); + done(); + } catch (e) { + done(e); + } + }); + }, 500); + + const interval = setInterval(() => { + if (dataEvents.length > 0) { + const dataEvent = dataEvents.shift(); + sourceStream.write(dataEvent); + } else { + sourceStream.emit('end'); + } + }, 5000); + }); + it('should measure the total time accurately for a series of 30 rows with backpressure and a delay', function (done) { + this.timeout(200000); + const eventNumbers = []; + for (let i = 0; i < 40; i++) { + eventNumbers.push(i); + } + const dataEvents = eventNumbers.map(i => i.toString()); + const sourceStream = new PassThrough(); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // iterate stream + timedStream.on('data', async (chunk: any) => { + // Simulate 1 second of busy work + const startTime = Date.now(); + while (Date.now() - startTime < 1000) { + /* empty */ + } + }); + timedStream.on('end', () => { + clearInterval(interval); + const totalMilliseconds = timedStream.getTotalDurationMs(); + try { + assert(totalMilliseconds > 39000); + assert(totalMilliseconds < 41000); + done(); + } catch (e) { + done(e); + } + }); + // First load the stream with events. + for (let i = 0; i < 20; i++) { + const dataEvent = dataEvents.shift(); + sourceStream.write(dataEvent); + } + // Then rows get sent every 5 seconds. + const interval = setInterval(() => { + if (dataEvents.length > 0) { + const dataEvent = dataEvents.shift(); + sourceStream.write(dataEvent); + } else { + sourceStream.emit('end'); + } + }, 5000); + }); + }); + }); + describe('while iterating through a stream loop', () => { + describe('with no delay from server', () => { + it('should measure the total time accurately for a series of 30 rows', async function () { + this.timeout(200000); + const sourceStream = Readable.from(numberGenerator(30)); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // iterate stream + for await (const chunk of timedStream as unknown as PassThrough) { + // Simulate 1 second of busy work + const startTime = Date.now(); + while (Date.now() - startTime < 1000) { + /* empty */ + } + } + const totalMilliseconds = timedStream.getTotalDurationMs(); + assert(totalMilliseconds > 29000); + assert(totalMilliseconds < 31000); + }); + it('should measure the total time accurately for a series of 30 rows with an async call', async function () { + this.timeout(200000); + const sourceStream = Readable.from(numberGenerator(30)); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // iterate stream + for await (const chunk of timedStream as unknown as PassThrough) { + // Simulate 1 second of busy work + const sleep = (ms: number) => + new Promise(resolve => setTimeout(resolve, ms)); + await sleep(1000); + } + const totalMilliseconds = timedStream.getTotalDurationMs(); + assert(totalMilliseconds < 500); + }); + }); + describe('with delay from server', () => { + it('should measure the total time accurately for a series of 10 rows', function (done) { + this.timeout(200000); + const dataEvents = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map(i => + i.toString(), + ); + const sourceStream = new PassThrough(); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream); + + setTimeout(async () => { + try { + // iterate stream + for await (const chunk of timedStream as unknown as PassThrough) { + // Simulate 1 second of busy work + const startTime = Date.now(); + while (Date.now() - startTime < 1000) { + /* empty */ + } + } + clearInterval(interval); + const totalMilliseconds = timedStream.getTotalDurationMs(); + // totalMilliseconds should be around 10 seconds, 1 per row + assert(totalMilliseconds > 9000); + assert(totalMilliseconds < 11000); + done(); + } catch (e) { + done(e); + } + }, 500); + + const interval = setInterval(() => { + if (dataEvents.length > 0) { + const dataEvent = dataEvents.shift(); + sourceStream.write(dataEvent); + } else { + sourceStream.emit('end'); + } + }, 5000); + }); + it('should measure the total time accurately for a series of 40 rows with backpressure and a delay', async function () { + this.timeout(200000); + const eventNumbers = []; + for (let i = 0; i < 40; i++) { + eventNumbers.push(i); + } + const dataEvents = eventNumbers.map(i => i.toString()); + const sourceStream = new PassThrough(); + const timedStream = new TimedStream({}); + // @ts-ignore + sourceStream.pipe(timedStream as unknown as WritableStream); + // First load the stream with events. + for (let i = 0; i < 20; i++) { + const dataEvent = dataEvents.shift(); + sourceStream.write(dataEvent); + } + // Then rows get sent every 5 seconds. + const interval = setInterval(() => { + if (dataEvents.length > 0) { + const dataEvent = dataEvents.shift(); + sourceStream.write(dataEvent); + } else { + sourceStream.emit('end'); + } + }, 5000); + // iterate stream + for await (const chunk of timedStream as unknown as PassThrough) { + // Simulate 1 second of busy work + const startTime = Date.now(); + while (Date.now() - startTime < 1000) { + /* empty */ + } + } + clearInterval(interval); + const totalMilliseconds = timedStream.getTotalDurationMs(); + assert(totalMilliseconds > 37000); + assert(totalMilliseconds < 43000); + }); + }); + }); +}); diff --git a/test/utils/proto-bytes.ts b/test/utils/proto-bytes.ts new file mode 100644 index 000000000..031ade177 --- /dev/null +++ b/test/utils/proto-bytes.ts @@ -0,0 +1,144 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import {Readable} from 'stream'; +import {google} from '../../protos/protos'; +import {PreparedStatement} from '../../src/execute-query/preparedstatement'; + +export const createMetadata = ( + ...values: [string | null, google.bigtable.v2.Type][] +): google.bigtable.v2.ExecuteQueryResponse => { + return google.bigtable.v2.ExecuteQueryResponse.create({ + metadata: { + protoSchema: { + columns: values.map(v => + google.bigtable.v2.ColumnMetadata.create({ + name: v[0], + type: v[1], + }), + ), + }, + }, + }); +}; + +export const createPreparedStatement = ( + ...values: [string | null, google.bigtable.v2.Type][] +): PreparedStatement => { + const metadataPB = createMetadata(...values).metadata!; + const prepareQueryResponse = google.bigtable.v2.PrepareQueryResponse.create({ + metadata: metadataPB, + preparedQuery: 'xd', + validUntil: null, + }); + return new PreparedStatement( + undefined as any, + prepareQueryResponse, + {} as any, + {}, + ); +}; + +export const createPrepareQueryResponse = ( + ...values: [string | null, google.bigtable.v2.Type][] +): google.bigtable.v2.PrepareQueryResponse => { + const metadataPB = createMetadata(...values).metadata!; + return google.bigtable.v2.PrepareQueryResponse.create({ + metadata: metadataPB, + preparedQuery: 'xd', + validUntil: null, + }); +}; + +export const pbType = ( + value: google.bigtable.v2.IType, +): google.bigtable.v2.Type => { + return google.bigtable.v2.Type.create(value); +}; + +export const createProtoRows = ( + resumeToken?: string, + batchChecksum?: number, + reset?: boolean, + ...values: google.bigtable.v2.IValue[] +): google.bigtable.v2.ExecuteQueryResponse => { + const bytes = google.bigtable.v2.ProtoRows.encode( + google.bigtable.v2.ProtoRows.create({ + values: values.map(v => google.bigtable.v2.Value.create(v)), + }), + ).finish(); + + return { + response: 'results', + results: { + protoRowsBatch: values.length > 0 ? {batchData: bytes} : undefined, + resumeToken: resumeToken ? Buffer.from(resumeToken) : undefined, + batchChecksum: batchChecksum, + reset: reset || false, + }, + } as google.bigtable.v2.ExecuteQueryResponse; +}; + +interface BigtableError { + status: any; + message: any; + code: any; +} + +interface CustomCallback { + callback: any; +} + +type DataObject = + | BigtableError + | google.bigtable.v2.ExecuteQueryResponse + | CustomCallback; + +export class ArrayReadableStream extends Readable { + private data: DataObject[]; + private index: number; + private aborted: boolean; + + constructor(data: DataObject[]) { + super({objectMode: true, highWaterMark: 0}); + this.data = data; + this.index = 0; + this.aborted = false; + } + + _read() { + if (!this.aborted && this.index < this.data.length) { + const item = this.data[this.index]; + this.index++; + if ((item as BigtableError).code !== undefined) { + this.emit('error', item as BigtableError); + } else if ((item as CustomCallback).callback !== undefined) { + (item as CustomCallback).callback(); + this._read(); + } else { + this.push(item as google.bigtable.v2.ExecuteQueryResponse); + } + } else { + this.push(null); // No more data + } + } + + abort() { + this.aborted = true; + super.destroy(); + } + + end() { + super.destroy(); + } +} diff --git a/testproxy/known_failures.txt b/testproxy/known_failures.txt index 64c5f23af..f8210a570 100644 --- a/testproxy/known_failures.txt +++ b/testproxy/known_failures.txt @@ -15,6 +15,4 @@ TestSampleRowKeys_Retry_WithRoutingCookie\| TestSampleRowKeys_Generic_CloseClient\| TestSampleRowKeys_Generic_Headers\| TestSampleRowKeys_NoRetry_NoEmptyKey\| -TestSampleRowKeys_Retry_WithRetryInfo\| -TestExecuteQuery_EmptyResponse\| -TestExecuteQuery_SingleSimpleRow +TestSampleRowKeys_Retry_WithRetryInfo diff --git a/testproxy/services/execute-query.js b/testproxy/services/execute-query.js new file mode 100644 index 000000000..1c6ba26ef --- /dev/null +++ b/testproxy/services/execute-query.js @@ -0,0 +1,65 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const grpc = require('@grpc/grpc-js'); +const { + parseMetadata, + parseRows, + parseParameters, +} = require('../../build/testproxy/services/utils/request/createExecuteQueryResponse.js'); +const normalizeCallback = require('./utils/normalize-callback.js'); + +const executeQuery = ({clientMap}) => + normalizeCallback(async rawRequest => { + const {request, clientId} = rawRequest.request; + + const {instanceName} = request; + const bigtable = clientMap.get(clientId); + const instance = bigtable.instance(instanceName); + + try { + const [parameters, parameterTypes] = await parseParameters( + request.params, + ); + const [preparedStatement] = await instance.prepareStatement({ + query: request.query, + parameterTypes: parameterTypes, + }); + const [rows] = await instance.executeQuery({ + preparedStatement, + parameters: parameters, + retryOptions: {}, + }); + + const parsedMetadata = await parseMetadata(preparedStatement); + const parsedRows = await parseRows(preparedStatement, rows); + return { + status: {code: grpc.status.OK, details: []}, + rows: parsedRows, + metadata: {columns: parsedMetadata}, + }; + } catch (e) { + return { + status: { + code: e.code || grpc.status.INTERNAL, + details: [], // e.details must be in an empty array for the test runner to return the status. This is tracked in https://b.corp.google.com/issues/383096533. + message: e.message, + }, + }; + } + }); + +module.exports = executeQuery; diff --git a/testproxy/services/index.js b/testproxy/services/index.js index 8e539fec3..09b169109 100644 --- a/testproxy/services/index.js +++ b/testproxy/services/index.js @@ -24,6 +24,7 @@ const readRow = require('./read-row.js'); const readRows = require('./read-rows.js'); const removeClient = require('./remove-client.js'); const sampleRowKeys = require('./sample-row-keys.js'); +const executeQuery = require('./execute-query.js'); /* * Starts the client pool map and retrieves the object that @@ -45,6 +46,7 @@ function getServicesImplementation() { readRows: readRows({clientMap}), removeClient: removeClient({clientMap}), sampleRowKeys: sampleRowKeys({clientMap}), + executeQuery: executeQuery({clientMap}), }; } diff --git a/testproxy/services/utils/request/createExecuteQueryResponse.ts b/testproxy/services/utils/request/createExecuteQueryResponse.ts new file mode 100644 index 000000000..da4f4eeb6 --- /dev/null +++ b/testproxy/services/utils/request/createExecuteQueryResponse.ts @@ -0,0 +1,223 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../../../protos/protos'; +import {SqlTypes} from '../../../../src'; +import {MetadataConsumer} from '../../../../src/execute-query/metadataconsumer'; +import { + convertJsValueToValue, + executeQueryTypeToPBType, +} from '../../../../src/execute-query/parameterparsing'; +import {PreparedStatement} from '../../../../src/execute-query/preparedstatement'; +import {ExecuteQueryStreamTransformWithMetadata} from '../../../../src/execute-query/queryresultrowtransformer'; +import { + QueryResultRow, + SqlValue, + EncodedKeyMap, + Struct, + BigtableMap, + ExecuteQueryParameterValue, +} from '../../../../src/execute-query/values'; +import * as is from 'is'; + +async function getMetadataFromPreparedStatement( + preparedStatement: PreparedStatement, +): Promise { + return await new Promise((resolve, reject) => { + preparedStatement.getData( + ( + err?: Error, + preparedQueryBytes?: Uint8Array | string, + metadata?: SqlTypes.ResultSetMetadata, + ) => { + if (err) { + reject(err); + } else { + resolve(metadata!); + } + }, + 100, + ); + }); +} + +export async function parseMetadata(preparedStatement: PreparedStatement) { + const metadata = await getMetadataFromPreparedStatement(preparedStatement); + const values = metadata.columns.map((v, i) => { + return [metadata.getFieldNameAtIndex(i), executeQueryTypeToPBType(v)]; + }); + return values.map(v => + protos.google.bigtable.v2.ColumnMetadata.create({ + name: v[0] as any, + type: v[1] as any, + }), + ); +} + +function convertToArray( + value: SqlValue, + type: SqlTypes.ArrayType, +): protos.google.bigtable.v2.IValue { + if (!is.array(value)) { + throw new Error(`Value ${value} cannot be converted to an array.`); + } + const arrayValue = value as Array; + return { + arrayValue: { + values: arrayValue.map((element, index) => { + try { + return convertAnyValueToPb(element, type.elementType); + // eslint-disable-next-line + } catch (conversionError: any) { + if (conversionError instanceof Error) { + throw new Error( + `Error while converting element ${index} of an array: ${conversionError.message}`, + ); + } else { + throw conversionError; + } + } + }), + }, + }; +} + +function convertToStruct( + value: SqlValue, + type: SqlTypes.StructType, +): protos.google.bigtable.v2.IValue { + if (!(typeof value === 'object' && value instanceof Struct)) { + throw new Error(`Value ${value} cannot be converted to an array.`); + } + const arrayValue = value as Struct; + return { + arrayValue: { + values: arrayValue.values.map((element, index) => { + try { + return convertAnyValueToPb(element, type.get(index)); + // eslint-disable-next-line + } catch (convertionError: any) { + if (convertionError instanceof Error) { + throw new Error( + `Error while converting element ${index} of a Struct to field of type ${ + type.get(index).type + }: ${convertionError.message}`, + ); + } else { + throw convertionError; + } + } + }), + }, + }; +} + +function convertToMap( + value: SqlValue, + type: SqlTypes.MapType, +): protos.google.bigtable.v2.IValue { + if (!(value instanceof EncodedKeyMap)) { + throw new Error(`Value ${value} cannot be converted to a map.`); + } + const arrayValue = value as BigtableMap; + return { + arrayValue: { + values: Array.from(arrayValue.entries()).flatMap(([key, value]) => { + return { + arrayValue: { + values: [ + convertMapEntry(key, type.keyType, key, 'key'), + convertMapEntry(value, type.valueType, key, 'value'), + ], + }, + }; + }), + }, + }; +} + +function convertMapEntry( + value: SqlValue, + type: SqlTypes.Type, + keyName: string | bigint | Uint8Array | null, + keyOrValue: 'key' | 'value', +): protos.google.bigtable.v2.IValue { + try { + return convertAnyValueToPb(value, type); + // eslint-disable-next-line + } catch (convertionError: any) { + if (convertionError instanceof Error) { + throw new Error( + `Error while converting element ${keyName} of a Map to map ${keyOrValue} of type ${type.type}: ${convertionError.message}`, + ); + } else { + throw convertionError; + } + } +} + +function convertAnyValueToPb( + value: SqlValue, + type: SqlTypes.Type, +): protos.google.bigtable.v2.IValue { + if (value === null || value === undefined) { + return protos.google.bigtable.v2.Value.create({}); + } + if (type.type === 'array') { + return convertToArray(value, type); + } else if (type.type === 'struct') { + return convertToStruct(value, type); + } else if (type.type === 'map') { + return convertToMap(value, type); + } else { + return convertJsValueToValue(value as ExecuteQueryParameterValue, type); + } +} + +export async function parseRows( + preparedStatement: PreparedStatement, + rows: QueryResultRow[], +) { + const metadata = await getMetadataFromPreparedStatement(preparedStatement); + const parsedRows = rows.map(row => { + const rowValues = metadata.columns.map((type, i) => { + const value = row.get(i); + return convertAnyValueToPb(value, type); + }); + return {values: rowValues}; + }); + return parsedRows; +} + +export async function parseParameters(params: { + [param: string]: protos.google.bigtable.v2.Value; +}) { + const parameters: {[param: string]: SqlValue} = {}; + const parameterTypes: {[param: string]: SqlTypes.Type} = {}; + const transfomer = new ExecuteQueryStreamTransformWithMetadata( + null as any, + () => false, + 'utf-8', + {}, + ); + for (const [paramName, pbValue] of Object.entries(params)) { + const type = MetadataConsumer.parsePBType( + pbValue.type as protos.google.bigtable.v2.Type, + ); + const value = transfomer.valueToJsType(pbValue, type); + parameters[paramName] = value; + parameterTypes[paramName] = type; + } + return [parameters, parameterTypes]; +}