diff --git a/.eslintignore b/.eslintignore index d1d207128..8d5c00cca 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,8 +1,10 @@ # TypeScript: js files are generated using tsc transpiler -/test/unit/typescript/*.js +/test/other/typescript/* # Integer is part of the Closure Library /lib/types/integer.js # TinkerPop-based files -/lib/datastax/graph/type-serializers.js \ No newline at end of file +/lib/datastax/graph/type-serializers.js + +/out/**/*.js \ No newline at end of file diff --git a/.eslintrc.js b/.eslintrc.js index ffce4d35b..fac9659e9 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,4 +1,9 @@ module.exports = { + plugins: [ + "@stylistic", + '@typescript-eslint' + ], + parser: '@typescript-eslint/parser', "env": { "commonjs": true, "es6": true @@ -6,7 +11,7 @@ module.exports = { "parserOptions": { "ecmaVersion": 2017 }, - "extends": "eslint:recommended", + "extends": ["eslint:recommended", "plugin:@typescript-eslint/eslint-recommended", 'plugin:@typescript-eslint/recommended'], "rules": { "indent": [ "error", @@ -32,7 +37,16 @@ module.exports = { "strict": ["error", "global"], "array-callback-return": "error", "curly": "error", - "no-unused-vars": ["error", { "args": "none" }], + "no-prototype-builtins": "off", + "@typescript-eslint/no-namespace": "off", + "no-unused-vars": "off", + "@typescript-eslint/no-duplicate-enum-values": "off", + "@typescript-eslint/no-unused-vars": ["error", { + "argsIgnorePattern": "^_", + "caughtErrorsIgnorePattern": "^_" + }], + "@typescript-eslint/no-unsafe-function-type": "off", + "@typescript-eslint/ban-ts-comment": "off", "global-require": "error", "eqeqeq": ["error", "allow-null"], @@ -67,7 +81,7 @@ module.exports = { "no-new-wrappers": "error", "no-octal-escape": "error", "no-proto": "error", - "no-redeclare": "error", + "no-redeclare": "off", "no-restricted-properties": ["error", { object: "arguments", property: "callee", @@ -88,6 +102,8 @@ module.exports = { allowShortCircuit: false, allowTernary: false, }], + "@typescript-eslint/no-this-alias": "off", + "@typescript-eslint/no-explicit-any": "off", "no-useless-call": "off", "no-useless-concat": "error", "no-useless-escape": "error", @@ -104,7 +120,19 @@ module.exports = { "yoda": "error", "constructor-super": "error", "require-await": "error", - "require-atomic-updates": "off" + "require-atomic-updates": "off", + "prefer-rest-params": "off", + "sort-imports": + [ + "error", + { + "ignoreCase": true, + "ignoreDeclarationSort": true + } + ], + "@typescript-eslint/no-require-imports": "off", + "prefer-spread": "off", + "@typescript-eslint/no-unsafe-function-types": "off", }, "globals": { "Buffer": false, diff --git a/.gitignore b/.gitignore index 1677f5bc7..f4118cf95 100644 --- a/.gitignore +++ b/.gitignore @@ -23,4 +23,6 @@ out/ /docs **/typescript/*.js **/typescript/*.js.map -**/typescript/generated.ts \ No newline at end of file + +tsdoc-metadata.json +temp \ No newline at end of file diff --git a/.madgerc b/.madgerc new file mode 100644 index 000000000..047209a13 --- /dev/null +++ b/.madgerc @@ -0,0 +1,8 @@ +{ + "detectiveOptions": { + "ts": { + "skipTypeImports": true + } + }, + "fileExtensions": ["ts"] +} \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index eee0ef332..a011e372a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,16 +9,15 @@ jobs: include: - stage: "tests" name: "Run eslint" - script: "npm install -g eslint@4; npm run eslint;" + script: "npm run eslint;" node_js: "18" - name: "TypeScript 4.9 generation and compilation tests" node_js: "18" script: - npm install -g typescript@4.9; - - pushd test/unit/typescript/ + - pushd test/other/typescript/ + - npm install - tsc -p . - - node -e "require('./api-generation-test').generate()" > generated.ts - - tsc generated.ts - name: "Unit tests - Node.js 20" node_js: "20" script: "npm test" @@ -39,7 +38,6 @@ jobs: node_js: "18" script: - npm install -g typescript; - - pushd test/unit/typescript/ + - pushd test/other/typescript/ + - npm install - tsc -p . - - node -e "require('./api-generation-test').generate()" > generated.ts - - tsc generated.ts diff --git a/Jenkinsfile b/Jenkinsfile index 7d7b74a93..f42edcab4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -78,15 +78,15 @@ def installDriverAndDependencies() { ''' sh label: 'Install driver dependencies', script: '''#!/bin/bash -lex - npm install mocha-jenkins-reporter@0 npm install kerberos@1 - npm install -g eslint@4 ''' } -def executeLinter() { - sh label: 'Perform static analysis of source code', script: '''#!/bin/bash -lex +def executeStaticAnalysisAndBundler() { + sh label: 'Perform static analysis and bundling of source code', script: '''#!/bin/bash -lex npm run eslint + npm run circular-dep + npm run bundle ''' } @@ -219,7 +219,7 @@ def describeAdhocTestingStage() { def describeInstallAndLint(){ describePerCommitStage() installDriverAndDependencies() - executeLinter() + executeStaticAnalysisAndBundler() } // branch pattern for cron @@ -556,7 +556,7 @@ pipeline { } stage('Execute-Linter') { steps { - executeLinter() + executeStaticAnalysisAndBundler() } } stage('Execute-Tests') { diff --git a/api-extractor.json b/api-extractor.json new file mode 100644 index 000000000..d5db268a2 --- /dev/null +++ b/api-extractor.json @@ -0,0 +1,459 @@ +/** + * Config file for API Extractor. For more info, please visit: https://api-extractor.com + */ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + + /** + * Optionally specifies another JSON config file that this file extends from. This provides a way for + * standard settings to be shared across multiple projects. + * + * If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains + * the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be + * resolved using NodeJS require(). + * + * SUPPORTED TOKENS: none + * DEFAULT VALUE: "" + */ + // "extends": "./shared/api-extractor-base.json" + // "extends": "my-package/include/api-extractor-base.json" + + /** + * Determines the "" token that can be used with other config file settings. The project folder + * typically contains the tsconfig.json and package.json config files, but the path is user-defined. + * + * The path is resolved relative to the folder of the config file that contains the setting. + * + * The default value for "projectFolder" is the token "", which means the folder is determined by traversing + * parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder + * that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error + * will be reported. + * + * SUPPORTED TOKENS: + * DEFAULT VALUE: "" + */ + "projectFolder": ".", + + /** + * (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor + * analyzes the symbols exported by this module. + * + * The file extension must be ".d.ts" and not ".ts". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + */ + "mainEntryPointFilePath": "/out/cassandra-rollup.d.ts", + + /** + * A list of NPM package names whose exports should be treated as part of this package. + * + * For example, suppose that Webpack is used to generate a distributed bundle for the project "library1", + * and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part + * of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly + * imports library2. To avoid this, we might specify: + * + * "bundledPackages": [ "library2" ], + * + * This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been + * local files for library1. + * + * The "bundledPackages" elements may specify glob patterns using minimatch syntax. To ensure deterministic + * output, globs are expanded by matching explicitly declared top-level dependencies only. For example, + * the pattern below will NOT match "@my-company/example" unless it appears in a field such as "dependencies" + * or "devDependencies" of the project's package.json file: + * + * "bundledPackages": [ "@my-company/*" ], + */ + "bundledPackages": [], + + /** + * Specifies what type of newlines API Extractor should use when writing output files. By default, the output files + * will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead. + * To use the OS's default newline kind, specify "os". + * + * DEFAULT VALUE: "crlf" + */ + // "newlineKind": "crlf", + + /** + * Specifies how API Extractor sorts members of an enum when generating the .api.json file. By default, the output + * files will be sorted alphabetically, which is "by-name". To keep the ordering in the source code, specify + * "preserve". + * + * DEFAULT VALUE: "by-name" + */ + // "enumMemberOrder": "by-name", + + /** + * Set to true when invoking API Extractor's test harness. When `testMode` is true, the `toolVersion` field in the + * .api.json file is assigned an empty string to prevent spurious diffs in output files tracked for tests. + * + * DEFAULT VALUE: "false" + */ + // "testMode": false, + + /** + * Determines how the TypeScript compiler engine will be invoked by API Extractor. + */ + "compiler": { + /** + * Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * Note: This setting will be ignored if "overrideTsconfig" is used. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/tsconfig.json" + */ + // "tsconfigFilePath": "/tsconfig.json", + /** + * Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk. + * The object must conform to the TypeScript tsconfig schema: + * + * http://json.schemastore.org/tsconfig + * + * If omitted, then the tsconfig.json file will be read from the "projectFolder". + * + * DEFAULT VALUE: no overrideTsconfig section + */ + // "overrideTsconfig": { + // . . . + // } + /** + * This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended + * and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when + * dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses + * for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck. + * + * DEFAULT VALUE: false + */ + // "skipLibCheck": true, + }, + + /** + * Configures how the API report file (*.api.md) will be generated. + */ + "apiReport": { + /** + * (REQUIRED) Whether to generate an API report. + */ + "enabled": true + + /** + * The base filename for the API report files, to be combined with "reportFolder" or "reportTempFolder" + * to produce the full file path. The "reportFileName" should not include any path separators such as + * "\" or "/". The "reportFileName" should not include a file extension, since API Extractor will automatically + * append an appropriate file extension such as ".api.md". If the "reportVariants" setting is used, then the + * file extension includes the variant name, for example "my-report.public.api.md" or "my-report.beta.api.md". + * The "complete" variant always uses the simple extension "my-report.api.md". + * + * Previous versions of API Extractor required "reportFileName" to include the ".api.md" extension explicitly; + * for backwards compatibility, that is still accepted but will be discarded before applying the above rules. + * + * SUPPORTED TOKENS: , + * DEFAULT VALUE: "" + */ + // "reportFileName": "", + + /** + * To support different approval requirements for different API levels, multiple "variants" of the API report can + * be generated. The "reportVariants" setting specifies a list of variants to be generated. If omitted, + * by default only the "complete" variant will be generated, which includes all @internal, @alpha, @beta, + * and @public items. Other possible variants are "alpha" (@alpha + @beta + @public), "beta" (@beta + @public), + * and "public" (@public only). + * + * DEFAULT VALUE: [ "complete" ] + */ + // "reportVariants": ["public", "beta"], + + /** + * Specifies the folder where the API report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy, + * e.g. for an API review. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/etc/" + */ + // "reportFolder": "/etc/", + + /** + * Specifies the folder where the temporary report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * After the temporary file is written to disk, it is compared with the file in the "reportFolder". + * If they are different, a production build will fail. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/" + */ + // "reportTempFolder": "/temp/", + + /** + * Whether "forgotten exports" should be included in the API report file. Forgotten exports are declarations + * flagged with `ae-forgotten-export` warnings. See https://api-extractor.com/pages/messages/ae-forgotten-export/ to + * learn more. + * + * DEFAULT VALUE: "false" + */ + // "includeForgottenExports": false + }, + + /** + * Configures how the doc model file (*.api.json) will be generated. + */ + "docModel": { + /** + * (REQUIRED) Whether to generate a doc model file. + */ + "enabled": true + + /** + * The output path for the doc model file. The file extension should be ".api.json". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/.api.json" + */ + // "apiJsonFilePath": "/temp/.api.json", + + /** + * Whether "forgotten exports" should be included in the doc model file. Forgotten exports are declarations + * flagged with `ae-forgotten-export` warnings. See https://api-extractor.com/pages/messages/ae-forgotten-export/ to + * learn more. + * + * DEFAULT VALUE: "false" + */ + // "includeForgottenExports": false, + + /** + * The base URL where the project's source code can be viewed on a website such as GitHub or + * Azure DevOps. This URL path corresponds to the `` path on disk. + * + * This URL is concatenated with the file paths serialized to the doc model to produce URL file paths to individual API items. + * For example, if the `projectFolderUrl` is "https://github.com/microsoft/rushstack/tree/main/apps/api-extractor" and an API + * item's file path is "api/ExtractorConfig.ts", the full URL file path would be + * "https://github.com/microsoft/rushstack/tree/main/apps/api-extractor/api/ExtractorConfig.js". + * + * This setting can be omitted if you don't need source code links in your API documentation reference. + * + * SUPPORTED TOKENS: none + * DEFAULT VALUE: "" + */ + // "projectFolderUrl": "http://github.com/path/to/your/projectFolder" + }, + + /** + * Configures how the .d.ts rollup file will be generated. + */ + "dtsRollup": { + /** + * (REQUIRED) Whether to generate the .d.ts rollup file. + */ + "enabled": true, + + /** + * Specifies the output path for a .d.ts rollup file to be generated without any trimming. + * This file will include all declarations that are exported by the main entry point. + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/dist/.d.ts" + */ + "untrimmedFilePath": "", + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for an "alpha" release. + * This file will include only declarations that are marked as "@public", "@beta", or "@alpha". + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "alphaTrimmedFilePath": "/dist/-alpha.d.ts", + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release. + * This file will include only declarations that are marked as "@public" or "@beta". + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "betaTrimmedFilePath": "/dist/-beta.d.ts", + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release. + * This file will include only declarations that are marked as "@public". + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + "publicTrimmedFilePath": "/dist/-public.d.ts", + + /** + * When a declaration is trimmed, by default it will be replaced by a code comment such as + * "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the + * declaration completely. + * + * DEFAULT VALUE: false + */ + // "omitTrimmingComments": true + }, + + /** + * Configures how the tsdoc-metadata.json file will be generated. + */ + "tsdocMetadata": { + /** + * Whether to generate the tsdoc-metadata.json file. + * + * DEFAULT VALUE: true + */ + // "enabled": true, + /** + * Specifies where the TSDoc metadata file should be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * The default value is "", which causes the path to be automatically inferred from the "tsdocMetadata", + * "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup + * falls back to "tsdoc-metadata.json" in the package folder. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "tsdocMetadataFilePath": "/dist/tsdoc-metadata.json" + }, + + /** + * Configures how API Extractor reports error and warning messages produced during analysis. + * + * There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages. + */ + "messages": { + /** + * Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing + * the input .d.ts files. + * + * TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "compilerMessageReporting": { + /** + * Configures the default routing for messages that don't match an explicit rule in this table. + */ + "default": { + /** + * Specifies whether the message should be written to the the tool's output log. Note that + * the "addToApiReportFile" property may supersede this option. + * + * Possible values: "error", "warning", "none" + * + * Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail + * and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes + * the "--local" option), the warning is displayed but the build will not fail. + * + * DEFAULT VALUE: "warning" + */ + "logLevel": "warning" + + /** + * When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md), + * then the message will be written inside that file; otherwise, the message is instead logged according to + * the "logLevel" option. + * + * DEFAULT VALUE: false + */ + // "addToApiReportFile": false + } + + // "TS2551": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + }, + + /** + * Configures handling of messages reported by API Extractor during its analysis. + * + * API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag" + * + * DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings + */ + "extractorMessageReporting": { + "default": { + "logLevel": "warning" + // "addToApiReportFile": false + }, + + "ae-missing-release-tag": { + "logLevel": "none", + "addToApiReportFile": false + }, + + "ae-unresolved-link": { + "logLevel": "none", + "addToApiReportFile": false + } + // + // . . . + }, + + /** + * Configures handling of messages reported by the TSDoc parser when analyzing code comments. + * + * TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "tsdocMessageReporting": { + "default": { + "logLevel": "none" + // "addToApiReportFile": false + } + + // "tsdoc-link-tag-unescaped-text": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + } + } +} diff --git a/dist/cassandra-driver-public.d.ts b/dist/cassandra-driver-public.d.ts new file mode 100644 index 000000000..2c78290fb --- /dev/null +++ b/dist/cassandra-driver-public.d.ts @@ -0,0 +1,7975 @@ +import { ConnectionOptions } from 'tls'; +import EventEmitter from 'events'; +import { EventEmitter as EventEmitter_2 } from 'stream'; +import { default as Long } from 'long'; +import * as Long_2 from 'long'; +import Long__default from 'long'; +import { Readable } from 'stream'; +import { Socket } from 'net'; + +export declare const addressResolution: { + AddressTranslator: typeof AddressTranslator; + EC2MultiRegionTranslator: typeof EC2MultiRegionTranslator; +}; + +/* Excluded from this release type: AddressResolver */ + +/** + * @class + * @classdesc + * Translates IP addresses received from Cassandra nodes into locally queryable + * addresses. + *

+ * The driver auto-detects new Cassandra nodes added to the cluster through server + * side pushed notifications and through checking the system tables. For each + * node, the address received will correspond to the address set as + * rpc_address in the node yaml file. In most case, this is the correct + * address to use by the driver and that is what is used by default. However, + * sometimes the addresses received through this mechanism will either not be + * reachable directly by the driver or should not be the preferred address to use + * to reach the node (for instance, the rpc_address set on Cassandra nodes + * might be a private IP, but some clients may have to use a public IP, or + * pass by a router to reach that node). This interface allows to deal with + * such cases, by allowing to translate an address as sent by a Cassandra node + * to another address to be used by the driver for connection. + *

+ * Please note that the contact points addresses provided while creating the + * {@link Client} instance are not "translated", only IP address retrieve from or sent + * by Cassandra nodes to the driver are. + */ +export declare class AddressTranslator { + /** + * Translates a Cassandra rpc_address to another address if necessary. + * @param {String} address the address of a node as returned by Cassandra. + *

+ * Note that if the rpc_address of a node has been configured to 0.0.0.0 + * server side, then the provided address will be the node listen_address, + * *not* 0.0.0.0. + *

+ * @param {Number} port The port number, as specified in the [protocolOptions]{@link ClientOptions} at Client instance creation (9042 by default). + * @param {Function} callback Callback to invoke with endpoint as first parameter. + * The endpoint is an string composed of the IP address and the port number in the format ipAddress:port. + */ + translate(address: string, port: number, callback: Function): void; +} + +/** + * Creates a new Aggregate. + * @classdesc Describes a CQL aggregate. + * @alias module:metadata~Aggregate + * @constructor + */ +export declare class Aggregate { + /* Excluded from this release type: name */ + /** + * Name of the keyspace where the aggregate is declared. + */ + keyspaceName: string; + /** + * Signature of the aggregate. + * @type {Array.} + */ + signature: Array; + /** + * List of the CQL aggregate argument types. + * @type {Array.<{code, info}>} + */ + argumentTypes: Array<{ + code: number; + info?: (object | Array | string); + }>; + /** + * State Function. + * @type {String} + */ + stateFunction: string; + /** + * State Type. + * @type {{code, info}} + */ + stateType: { + code: number; + info?: (object | Array | string); + }; + /** + * Final Function. + * @type {String} + */ + finalFunction: string; + /* Excluded from this release type: initConditionRaw */ + /** + * Initial state value of this aggregate. + * @type {String} + */ + initCondition: string; + /** + * Type of the return value. + * @type {{code: number, info: (Object|Array|null)}} + */ + returnType: { + code: number; + info?: (object | Array | string); + }; + /** + * Indicates whether or not this aggregate is deterministic. This means that + * given a particular input, the aggregate will always produce the same output. + * @type {Boolean} + */ + deterministic: boolean; + /* Excluded from this release type: __constructor */ +} + +/** + * @class + * @classdesc + * A load balancing policy wrapper that ensure that only hosts from a provided + * allow list will ever be returned. + *

+ * This policy wraps another load balancing policy and will delegate the choice + * of hosts to the wrapped policy with the exception that only hosts contained + * in the allow list provided when constructing this policy will ever be + * returned. Any host not in the while list will be considered ignored + * and thus will not be connected to. + *

+ * This policy can be useful to ensure that the driver only connects to a + * predefined set of hosts. Keep in mind however that this policy defeats + * somewhat the host auto-detection of the driver. As such, this policy is only + * useful in a few special cases or for testing, but is not optimal in general. + * If all you want to do is limiting connections to hosts of the local + * data-center then you should use DCAwareRoundRobinPolicy and *not* this policy + * in particular. + * @extends LoadBalancingPolicy + */ +export declare class AllowListPolicy extends LoadBalancingPolicy { + private childPolicy; + private allowList; + /** + * Create a new policy that wraps the provided child policy but only "allow" hosts + * from the provided list. + * @class + * @classdesc + * A load balancing policy wrapper that ensure that only hosts from a provided + * allow list will ever be returned. + *

+ * This policy wraps another load balancing policy and will delegate the choice + * of hosts to the wrapped policy with the exception that only hosts contained + * in the allow list provided when constructing this policy will ever be + * returned. Any host not in the while list will be considered ignored + * and thus will not be connected to. + *

+ * This policy can be useful to ensure that the driver only connects to a + * predefined set of hosts. Keep in mind however that this policy defeats + * somewhat the host auto-detection of the driver. As such, this policy is only + * useful in a few special cases or for testing, but is not optimal in general. + * If all you want to do is limiting connections to hosts of the local + * data-center then you should use DCAwareRoundRobinPolicy and *not* this policy + * in particular. + * @param {LoadBalancingPolicy} childPolicy the wrapped policy. + * @param {Array.} allowList The hosts address in the format ipAddress:port. + * Only hosts from this list may get connected + * to (whether they will get connected to or not depends on the child policy). + * @constructor + */ + constructor(childPolicy: LoadBalancingPolicy, allowList: Array); + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + /** + * Uses the child policy to return the distance to the host if included in the allow list. + * Any host not in the while list will be considered ignored. + * @param host + */ + getDistance(host: Host): distance; + /** + * Checks if the host is in the allow list. + * @param {Host} host + * @returns {boolean} + * @private + */ + private _contains; + /* Excluded from this release type: newQueryPlan */ + private _filter; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +/** + * Represents an error that is raised when one of the arguments provided to a method is not valid + */ +export declare class ArgumentError extends DriverError { + /** + * Represents an error that is raised when one of the arguments provided to a method is not valid + * @param {String} message + * @constructor + */ + constructor(message: string); +} + +declare type ArrayOrObject = any[] | { + [key: string]: any; +} | null; + +/** + * Wraps a number or null value to hint the client driver that the data type of the value is a double + * @memberOf module:datastax/graph + */ +export declare function asDouble(value: number): object; + +/** + * Wraps a number or null value to hint the client driver that the data type of the value is a double + * @memberOf module:datastax/graph + */ +export declare function asFloat(value: number): object; + +/** + * Wraps a number or null value to hint the client driver that the data type of the value is an int + * @memberOf module:datastax/graph + */ +export declare function asInt(value: number): object; + +/** + * Wraps a Date or null value to hint the client driver that the data type of the value is a timestamp + * @memberOf module:datastax/graph + */ +export declare function asTimestamp(value: Date): object; + +/** + * Wraps an Object or null value to hint the client driver that the data type of the value is a user-defined type. + * @memberOf module:datastax/graph + * @param {object} value The object representing the UDT. + * @param {{name: string, keyspace: string, fields: Array}} udtInfo The UDT metadata as defined by the driver. + */ +export declare function asUdt(value: object, udtInfo: { + name: string; + keyspace: string; + fields: Array; +}): object; + +/** + * DSE Authentication module. + *

+ * Contains the classes used for connecting to a DSE cluster secured with DseAuthenticator. + *

+ * @module auth + */ +export declare const auth: { + Authenticator: typeof Authenticator; + AuthProvider: typeof AuthProvider; + DseGssapiAuthProvider: typeof DseGssapiAuthProvider; + DsePlainTextAuthProvider: typeof DsePlainTextAuthProvider; + /* Excluded from this release type: NoAuthProvider */ + PlainTextAuthProvider: typeof PlainTextAuthProvider; +}; + +/** + * Represents an error when trying to authenticate with auth-enabled host + */ +export declare class AuthenticationError extends DriverError { + additionalInfo: ResponseError; + /** + * Represents an error when trying to authenticate with auth-enabled host + * @param {String} message + * @constructor + */ + constructor(message: string); +} + +/** + * Handles SASL authentication with Cassandra servers. + * Each time a new connection is created and the server requires authentication, + * a new instance of this class will be created by the corresponding. + * @alias module:auth~Authenticator + */ +export declare class Authenticator { + /** + * Obtain an initial response token for initializing the SASL handshake. + * @param {Function} callback + */ + initialResponse(callback: Function): void; + /** + * Evaluates a challenge received from the Server. Generally, this method should callback with + * no error and no additional params when authentication is complete from the client perspective. + * @param {Buffer} challenge + * @param {Function} callback + */ + evaluateChallenge(challenge: Buffer, callback: Function): void; + /** + * Called when authentication is successful with the last information + * optionally sent by the server. + * @param {Buffer} [token] + */ + onAuthenticationSuccess(token?: Buffer): void; +} + +/** + * Provides [Authenticator]{@link module:auth~Authenticator} instances to be used when connecting to a host. + * @abstract + * @alias module:auth~AuthProvider + */ +export declare class AuthProvider { + /** + * Returns an [Authenticator]{@link module:auth~Authenticator} instance to be used when connecting to a host. + * @param {String} endpoint The ip address and port number in the format ip:port + * @param {String} name Authenticator name + * @abstract + * @returns {Authenticator} + */ + newAuthenticator(endpoint: string, name: string): Authenticator; +} + +/** @module types */ +/** + * A BigDecimal consists of an [arbitrary precision integer]{@link module:types~Integer} + * unscaled value and a 32-bit integer scale. If zero + * or positive, the scale is the number of digits to the right of the + * decimal point. If negative, the unscaled value of the number is + * multiplied by ten to the power of the negation of the scale. The + * value of the number represented by the BigDecimal is + * therefore (unscaledValue × 10-scale). + * @class + * @classdesc The BigDecimal class provides operations for + * arithmetic, scale manipulation, rounding, comparison and + * format conversion. The {@link #toString} method provides a + * canonical representation of a BigDecimal. + */ +export declare class BigDecimal { + private _intVal; + private _scale; + /** + * Constructs an immutable arbitrary-precision signed decimal number. + * A BigDecimal consists of an [arbitrary precision integer]{@link module:types~Integer} + * unscaled value and a 32-bit integer scale. If zero + * or positive, the scale is the number of digits to the right of the + * decimal point. If negative, the unscaled value of the number is + * multiplied by ten to the power of the negation of the scale. The + * value of the number represented by the BigDecimal is + * therefore (unscaledValue × 10-scale). + * @param {Integer|Number} unscaledValue The integer part of the decimal. + * @param {Number} scale The scale of the decimal. + * @constructor + */ + constructor(unscaledValue: Integer | number, scale: number); + /** + * Returns the BigDecimal representation of a buffer composed of the scale (int32BE) and the unsigned value (varint BE) + * @param {Buffer} buf + * @returns {BigDecimal} + */ + static fromBuffer(buf: Buffer): BigDecimal; + /** + * Returns a buffer representation composed of the scale as a BE int 32 and the unsigned value as a BE varint + * @param {BigDecimal} value + * @returns {Buffer} + */ + static toBuffer(value: BigDecimal): Buffer; + /** + * Returns a BigDecimal representation of the string + * @param {String} value + * @returns {BigDecimal} + */ + static fromString(value: string): BigDecimal; + /** + * Returns a BigDecimal representation of the Number + * @param {Number} value + * @returns {BigDecimal} + */ + static fromNumber(value: number): BigDecimal; + /** + * Returns true if the value of the BigDecimal instance and other are the same + * @param {BigDecimal} other + * @returns {Boolean} + */ + equals(other: BigDecimal): boolean; + /* Excluded from this release type: inspect */ + /** + * @param {BigDecimal} other + * @returns {boolean} + */ + notEquals(other: BigDecimal): boolean; + /** + * Compares this BigDecimal with the given one. + * @param {BigDecimal} other Integer to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: BigDecimal): number; + /** + * Returns the difference of this and the given BigDecimal. + * @param {BigDecimal} other The BigDecimal to subtract from this. + * @return {!BigDecimal} The BigDecimal result. + */ + subtract(other: BigDecimal): BigDecimal; + /** + * Returns the sum of this and the given BigDecimal. + * @param {BigDecimal} other The BigDecimal to sum to this. + * @return {!BigDecimal} The BigDecimal result. + */ + add(other: BigDecimal): BigDecimal; + /** + * Returns true if the current instance is greater than the other + * @param {BigDecimal} other + * @returns {boolean} + */ + greaterThan(other: BigDecimal): boolean; + /** @return {boolean} Whether this value is negative. */ + isNegative(): boolean; + /** @return {boolean} Whether this value is zero. */ + isZero(): boolean; + /** + * Returns the string representation of this BigDecimal + * @returns {string} + */ + toString(): string; + /** + * Returns a Number representation of this BigDecimal. + * @returns {Number} + */ + toNumber(): number; + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string; +} + +/** + * Represents a client-side error indicating that all connections to a certain host have reached + * the maximum amount of in-flight requests supported. + */ +export declare class BusyConnectionError extends DriverError { + /** + * Represents a client-side error indicating that all connections to a certain host have reached + * the maximum amount of in-flight requests supported. + * @param {String} address + * @param {Number} maxRequestsPerConnection + * @param {Number} connectionLength + * @constructor + */ + constructor(address: string, maxRequestsPerConnection: number, connectionLength: number); +} + +/* Excluded from this release type: ByteOrderedToken */ + +/* Excluded from this release type: ByteOrderedTokenizer */ + +declare const cassandra: { + Client: typeof Client; + ExecutionProfile: typeof ExecutionProfile; + ExecutionOptions: typeof ExecutionOptions; + types: { + /* Excluded from this release type: getDataTypeNameByCode */ + /* Excluded from this release type: FrameHeader */ + /* Excluded from this release type: generateTimestamp */ + opcodes: { + error: number; + startup: number; + ready: number; + authenticate: number; + credentials: number; + options: number; + supported: number; + query: number; + result: number; + prepare: number; + execute: number; + register: number; + event: number; + batch: number; + authChallenge: number; + authResponse: number; + authSuccess: number; + cancel: number; + isInRange: (code: any) => boolean; + }; + consistencies: typeof consistencies; + consistencyToString: {}; + dataTypes: typeof dataTypes; + distance: typeof distance; + frameFlags: { + compression: number; + tracing: number; + customPayload: number; + warning: number; + }; + protocolEvents: { + topologyChange: string; + statusChange: string; + schemaChange: string; + }; + protocolVersion: typeof protocolVersion; + responseErrorCodes: typeof responseErrorCodes; + resultKind: { + voidResult: number; + rows: number; + setKeyspace: number; + prepared: number; + schemaChange: number; + }; + timeuuid: typeof timeuuid; + uuid: typeof uuid; + BigDecimal: typeof BigDecimal; + Duration: typeof Duration; + InetAddress: typeof InetAddress; + Integer: typeof Integer; + LocalDate: typeof LocalDate; + LocalTime: typeof LocalTime; + Long: typeof Long_2.default; + ResultSet: typeof ResultSet; + ResultStream: typeof ResultStream; + Row: typeof Row; + DriverError: typeof DriverError; + TimeoutError: typeof TimeoutError; + TimeUuid: typeof TimeUuid; + Tuple: typeof Tuple; + Uuid: typeof Uuid; + unset: Readonly<{ + readonly unset: true; + }>; + Vector: typeof Vector; + }; + errors: { + ArgumentError: typeof ArgumentError; + AuthenticationError: typeof AuthenticationError; + BusyConnectionError: typeof BusyConnectionError; + DriverError: typeof DriverError; + OperationTimedOutError: typeof OperationTimedOutError; + DriverInternalError: typeof DriverInternalError; + NoHostAvailableError: typeof NoHostAvailableError; + NotSupportedError: typeof NotSupportedError; + ResponseError: typeof ResponseError; + VIntOutOfRangeException: typeof VIntOutOfRangeException; + }; + policies: { + addressResolution: { + AddressTranslator: typeof AddressTranslator; + EC2MultiRegionTranslator: typeof EC2MultiRegionTranslator; + }; + loadBalancing: { + AllowListPolicy: typeof AllowListPolicy; + DCAwareRoundRobinPolicy: typeof DCAwareRoundRobinPolicy; + DefaultLoadBalancingPolicy: typeof DefaultLoadBalancingPolicy; + LoadBalancingPolicy: typeof LoadBalancingPolicy; + RoundRobinPolicy: typeof RoundRobinPolicy; + TokenAwarePolicy: typeof TokenAwarePolicy; + WhiteListPolicy: typeof WhiteListPolicy; + }; + reconnection: { + ReconnectionPolicy: typeof ReconnectionPolicy; + ConstantReconnectionPolicy: typeof ConstantReconnectionPolicy; + ExponentialReconnectionPolicy: typeof ExponentialReconnectionPolicy; + }; + retry: { + IdempotenceAwareRetryPolicy: typeof IdempotenceAwareRetryPolicy; + FallthroughRetryPolicy: typeof FallthroughRetryPolicy; + RetryPolicy: typeof RetryPolicy; + }; + speculativeExecution: { + NoSpeculativeExecutionPolicy: typeof NoSpeculativeExecutionPolicy; + SpeculativeExecutionPolicy: typeof SpeculativeExecutionPolicy; + ConstantSpeculativeExecutionPolicy: typeof ConstantSpeculativeExecutionPolicy; + }; + timestampGeneration: { + TimestampGenerator: typeof TimestampGenerator; + MonotonicTimestampGenerator: typeof MonotonicTimestampGenerator; + }; + defaultAddressTranslator: () => AddressTranslator; + defaultLoadBalancingPolicy: (localDc?: string) => LoadBalancingPolicy; + defaultRetryPolicy: () => RetryPolicy; + defaultReconnectionPolicy: () => ReconnectionPolicy; + defaultSpeculativeExecutionPolicy: () => SpeculativeExecutionPolicy; + defaultTimestampGenerator: () => TimestampGenerator; + }; + auth: { + /* Excluded from this release type: NoAuthProvider */ + Authenticator: typeof Authenticator; + AuthProvider: typeof AuthProvider; + DseGssapiAuthProvider: typeof DseGssapiAuthProvider; + DsePlainTextAuthProvider: typeof DsePlainTextAuthProvider; + PlainTextAuthProvider: typeof PlainTextAuthProvider; + }; + mapping: { + Mapper: typeof Mapper; + ModelMapper: typeof ModelMapper; + ModelBatchMapper: typeof ModelBatchMapper; + ModelBatchItem: typeof ModelBatchItem; + Result: typeof Result; + TableMappings: typeof TableMappings; + DefaultTableMappings: typeof DefaultTableMappings; + UnderscoreCqlToCamelCaseMappings: typeof UnderscoreCqlToCamelCaseMappings; + q: { + in_: (arr: any) => QueryOperator; + gt: (value: any) => QueryOperator; + gte: (value: any) => QueryOperator; + lt: (value: any) => QueryOperator; + lte: (value: any) => QueryOperator; + notEq: (value: any) => QueryOperator; + and: (condition1: any, condition2: any) => QueryOperator; + incr: (value: any) => QueryAssignment; + decr: (value: any) => QueryAssignment; + append: (value: any) => QueryAssignment; + prepend: (value: any) => QueryAssignment; + remove: (value: any) => QueryAssignment; + }; + }; + tracker: { + RequestTracker: typeof RequestTracker; + RequestLogger: typeof RequestLogger; + }; + metrics: { + ClientMetrics: typeof ClientMetrics; + DefaultMetrics: typeof DefaultMetrics; + }; + concurrent: { + executeConcurrent: typeof executeConcurrent; + ResultSetGroup: typeof ResultSetGroup; + }; + token: { + Token: typeof Token; + TokenRange: typeof TokenRange; + }; + metadata: { + Metadata: typeof Metadata; + }; + Encoder: typeof Encoder; + geometry: { + Point: typeof Point; + LineString: typeof LineString; + Polygon: typeof Polygon; + Geometry: typeof Geometry; + }; + datastax: { + graph: { + /* Excluded from this release type: getCustomTypeSerializers */ + /* Excluded from this release type: GraphTypeWrapper */ + /* Excluded from this release type: UdtGraphWrapper */ + Edge: typeof Edge; + Element: typeof Element; + Path: typeof Path; + Property: typeof Property; + Vertex: typeof Vertex; + VertexProperty: typeof VertexProperty; + asInt: typeof asInt; + asDouble: typeof asDouble; + asFloat: typeof asFloat; + asTimestamp: typeof asTimestamp; + asUdt: typeof asUdt; + direction: { + both: { + typeName: any; + elementName: any; + toString(): any; + }; + in: { + typeName: any; + elementName: any; + toString(): any; + }; + out: { + typeName: any; + elementName: any; + toString(): any; + }; + in_: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + GraphResultSet: typeof GraphResultSet; + t: { + id: { + typeName: any; + elementName: any; + toString(): any; + }; + key: { + typeName: any; + elementName: any; + toString(): any; + }; + label: { + typeName: any; + elementName: any; + toString(): any; + }; + value: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + }; + search: { + DateRange: typeof DateRange; + DateRangeBound: typeof DateRangeBound; + dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; + }; + }; + }; + /** + * Returns a new instance of the default [options]{@link ClientOptions} used by the driver. + */ + defaultOptions: () => ClientOptions; + version: string; +}; +export default cassandra; + +/* Excluded from this release type: checkServerIdentity */ + +/** + * Creates a new instance of {@link Client}. + * @classdesc + * Represents a database client that maintains multiple connections to the cluster nodes, providing methods to + * execute CQL statements. + *

+ * The Client uses [policies]{@link module:policies} to decide which nodes to connect to, which node + * to use per each query execution, when it should retry failed or timed-out executions and how reconnection to down + * nodes should be made. + *

+ * @extends EventEmitter + * @param {ClientOptions} options The options for this instance. + * @example Creating a new client instance + * const client = new Client({ + * contactPoints: ['10.0.1.101', '10.0.1.102'], + * localDataCenter: 'datacenter1' + * }); + * @example Executing a query + * const result = await client.connect(); + * console.log(`Connected to ${client.hosts.length} nodes in the cluster: ${client.hosts.keys().join(', ')}`); + * @example Executing a query + * const result = await client.execute('SELECT key FROM system.local'); + * const row = result.first(); + * console.log(row['key']); + */ +export declare class Client extends EventEmitter.EventEmitter { + /* Excluded from this release type: options */ + /* Excluded from this release type: profileManager */ + private connected; + private isShuttingDown; + /** + * Gets the name of the active keyspace. + * @type {String} + */ + keyspace: string; + /** + * Gets the schema and cluster metadata information. + * @type {Metadata} + */ + metadata: Metadata; + /* Excluded from this release type: controlConnection */ + /** + * Gets an associative array of cluster hosts. + * @type {HostMap} + */ + hosts: HostMap; + /** + * The [ClientMetrics]{@link module:metrics~ClientMetrics} instance used to expose measurements of its internal + * behavior and of the server as seen from the driver side. + *

By default, a [DefaultMetrics]{@link module:metrics~DefaultMetrics} instance is used.

+ * @type {ClientMetrics} + */ + metrics: ClientMetrics; + private _graphExecutor; + private connecting; + private insightsClient; + /** + * Creates a new instance of {@link Client}. + * Represents a database client that maintains multiple connections to the cluster nodes, providing methods to + * execute CQL statements. + *

+ * The Client uses [policies]{@link module:policies} to decide which nodes to connect to, which node + * to use per each query execution, when it should retry failed or timed-out executions and how reconnection to down + * nodes should be made. + *

+ * @param {DseClientOptions} options The options for this instance. + * @example Creating a new client instance + * const client = new Client({ + * contactPoints: ['10.0.1.101', '10.0.1.102'], + * localDataCenter: 'datacenter1' + * }); + * @example Executing a query + * const result = await client.connect(); + * console.log(`Connected to ${client.hosts.length} nodes in the cluster: ${client.hosts.keys().join(', ')}`); + * @example Executing a query + * const result = await client.execute('SELECT key FROM system.local'); + * const row = result.first(); + * console.log(row['key']); + * @constructor + */ + constructor(options: DseClientOptions); + /** + * Emitted when a new host is added to the cluster. + *
    + *
  • {@link Host} The host being added.
  • + *
+ * @event Client#hostAdd + */ + /** + * Emitted when a host is removed from the cluster + *
    + *
  • {@link Host} The host being removed.
  • + *
+ * @event Client#hostRemove + */ + /** + * Emitted when a host in the cluster changed status from down to up. + *
    + *
  • {@link Host host} The host that changed the status.
  • + *
+ * @event Client#hostUp + */ + /** + * Emitted when a host in the cluster changed status from up to down. + *
    + *
  • {@link Host host} The host that changed the status.
  • + *
+ * @event Client#hostDown + */ + /** + * Attempts to connect to one of the [contactPoints]{@link ClientOptions} and discovers the rest the nodes of the + * cluster. + *

When the {@link Client} is already connected, it resolves immediately.

+ *

It returns a Promise when a callback is not provided.

+ * @param {function} [callback] The optional callback that is invoked when the pool is connected or it failed to + * connect. + * @example Usage example + * await client.connect(); + */ + connect(): Promise; + connect(callback: EmptyCallback): void; + /** + * Async-only version of {@link Client#connect()}. + * @private + */ + private _connect; + /* Excluded from this release type: log */ + /** + * Executes a query on an available connection. + *

The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag.

+ *

+ * Some execution failures can be handled transparently by the driver, according to the + * [RetryPolicy]{@linkcode module:policies/retry~RetryPolicy} or the + * [SpeculativeExecutionPolicy]{@linkcode module:policies/speculativeExecution} used. + *

+ *

It returns a Promise when a callback is not provided.

+ * @param {String} query The query to execute. + * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value. + * @param {QueryOptions} [options] The query options for the execution. + * @param {ResultCallback} [callback] Executes callback(err, result) when execution completed. When not defined, the + * method will return a promise. + * @example Promise-based API, using async/await + * const query = 'SELECT name, email FROM users WHERE id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * const row = result.first(); + * console.log('%s: %s', row['name'], row['email']); + * @example Callback-based API + * const query = 'SELECT name, email FROM users WHERE id = ?'; + * client.execute(query, [ id ], { prepare: true }, function (err, result) { + * assert.ifError(err); + * const row = result.first(); + * console.log('%s: %s', row['name'], row['email']); + * }); + * @see {@link ExecutionProfile} to reuse a set of options across different query executions. + */ + execute(query: string, params?: ArrayOrObject, options?: QueryOptions): Promise; + execute(query: string, params: ArrayOrObject, options: QueryOptions, callback: ValueCallback): void; + execute(query: string, params: ArrayOrObject, callback: ValueCallback): void; + execute(query: string, callback: ValueCallback): void; + /** + * Executes a graph query. + *

It returns a Promise when a callback is not provided.

+ * @param {String} query The gremlin query. + * @param {Object|null} [parameters] An associative array containing the key and values of the parameters. + * @param {GraphQueryOptions|null} [options] The graph query options. + * @param {Function} [callback] Function to execute when the response is retrieved, taking two arguments: + * err and result. When not defined, the method will return a promise. + * @example Promise-based API, using async/await + * const result = await client.executeGraph('g.V()'); + * // Get the first item (vertex, edge, scalar value, ...) + * const vertex = result.first(); + * console.log(vertex.label); + * @example Callback-based API + * client.executeGraph('g.V()', (err, result) => { + * const vertex = result.first(); + * console.log(vertex.label); + * }); + * @example Iterating through the results + * const result = await client.executeGraph('g.E()'); + * for (let edge of result) { + * console.log(edge.label); // created + * }); + * @example Using result.forEach() + * const result = await client.executeGraph('g.V().hasLabel("person")'); + * result.forEach(function(vertex) { + * console.log(vertex.type); // vertex + * console.log(vertex.label); // person + * }); + * @see {@link ExecutionProfile} to reuse a set of options across different query executions. + */ + executeGraph(traversal: string, parameters: { + [name: string]: any; + } | undefined, options: GraphQueryOptions, callback: ValueCallback): void; + executeGraph(traversal: string, parameters: { + [name: string]: any; + } | undefined, callback: ValueCallback): void; + executeGraph(traversal: string, callback: ValueCallback): void; + executeGraph(traversal: string, parameters?: { + [name: string]: any; + }, options?: GraphQueryOptions): Promise; + /** + * Executes the query and calls rowCallback for each row as soon as they are received. Calls the final + * callback after all rows have been sent, or when there is an error. + *

+ * The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag. + *

+ * @param {String} query The query to execute + * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value. + * @param {QueryOptions} [options] The query options. + * @param {function} rowCallback Executes rowCallback(n, row) per each row received, where n is the row + * index and row is the current Row. + * @param {function} [callback] Executes callback(err, result) after all rows have been received. + *

+ * When dealing with paged results, [ResultSet#nextPage()]{@link module:types~ResultSet#nextPage} method can be used + * to retrieve the following page. In that case, rowCallback() will be again called for each row and + * the final callback will be invoked when all rows in the following page has been retrieved. + *

+ * @example Using per-row callback and arrow functions + * client.eachRow(query, params, { prepare: true }, (n, row) => console.log(n, row), err => console.error(err)); + * @example Overloads + * client.eachRow(query, rowCallback); + * client.eachRow(query, params, rowCallback); + * client.eachRow(query, params, options, rowCallback); + * client.eachRow(query, params, rowCallback, callback); + * client.eachRow(query, params, options, rowCallback, callback); + */ + eachRow(query: string, params: ArrayOrObject, options: QueryOptions, rowCallback: (n: number, row: Row) => void, callback?: ValueCallback): void; + eachRow(query: string, params: ArrayOrObject, rowCallback: (n: number, row: Row) => void, callback?: ValueCallback): void; + eachRow(query: string, rowCallback: (n: number, row: Row) => void): void; + /** + * Executes the query and pushes the rows to the result stream as soon as they received. + *

+ * The stream is a [ReadableStream]{@linkcode https://nodejs.org/api/stream.html#stream_class_stream_readable} object + * that emits rows. + * It can be piped downstream and provides automatic pause/resume logic (it buffers when not read). + *

+ *

+ * The query can be prepared (recommended) or not depending on {@link QueryOptions}.prepare flag. Retries on multiple + * hosts if needed. + *

+ * @param {String} query The query to prepare and execute. + * @param {ArrayOrObject} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value + * @param {QueryOptions} [options] The query options. + * @param {function} [callback] executes callback(err) after all rows have been received or if there is an error + * @returns {ResultStream} + */ + stream(query: string, params?: ArrayOrObject, options?: QueryOptions, callback?: EmptyCallback): ResultStream; + /** + * Executes batch of queries on an available connection to a host. + *

It returns a Promise when a callback is not provided.

+ * @param {Array.|Array.<{query, params}>} queries The queries to execute as an Array of strings or as an array + * of object containing the query and params + * @param {QueryOptions} [options] The query options. + * @param {ResultCallback} [callback] Executes callback(err, result) when the batch was executed + */ + batch(queries: Array, options?: QueryOptions): Promise; + batch(queries: Array, options: QueryOptions, callback: ValueCallback): void; + batch(queries: Array, callback: ValueCallback): void; + /** + * Async-only version of {@link Client#batch()} . + * @param {Array.|Array.<{query, params}>}queries + * @param {QueryOptions} options + * @returns {Promise} + * @private + */ + private _batch; + /** + * Gets the host that are replicas of a given token. + * @param {String} keyspace + * @param {Buffer} token + * @returns {Array} + */ + getReplicas(keyspace: string, token: Buffer): Array; + /** + * Gets a snapshot containing information on the connections pools held by this Client at the current time. + *

+ * The information provided in the returned object only represents the state at the moment this method was called and + * it's not maintained in sync with the driver metadata. + *

+ * @returns {ClientState} A [ClientState]{@linkcode module:metadata~ClientState} instance. + */ + getState(): ClientState; + /** + * Closes all connections to all hosts. + *

It returns a Promise when a callback is not provided.

+ * @param {Function} [callback] Optional callback to be invoked when finished closing all connections. + */ + shutdown(): Promise; + shutdown(callback: EmptyCallback): void; + /** @private */ + private _shutdown; + /* Excluded from this release type: _waitForSchemaAgreement */ + /* Excluded from this release type: handleSchemaAgreementAndRefresh */ + /** + * Connects and handles the execution of prepared and simple statements. + * @param {string} query + * @param {Array} params + * @param {ExecutionOptions} execOptions + * @returns {Promise} + * @private + */ + private _execute; + /** + * Sets the listeners for the nodes. + * @private + */ + private _setHostListeners; + /** + * Sets the distance to each host and when warmup is true, creates all connections to local hosts. + * @returns {Promise} + * @private + */ + private _warmup; + /** + * @returns {Encoder} + * @private + */ + private _getEncoder; + /** + * Returns a BatchRequest instance and fills the routing key information in the provided options. + * @private + */ + private _createBatchRequest; + /** + * Returns an ExecuteRequest instance and fills the routing key information in the provided options. + * @private + */ + private _createExecuteRequest; + /** + * Returns a QueryRequest instance and fills the routing key information in the provided options. + * @private + */ + private _createQueryRequest; + /** + * Sets the routing key based on the parameter values or the provided routing key components. + * @param {ExecutionOptions} execOptions + * @param {Array} params + * @param meta + * @private + */ + private _setRoutingInfo; +} + +/** + * Represents a base class that is used to measure events from the server and the client as seen by the driver. + * @alias module:metrics~ClientMetrics + * @interface + */ +export declare class ClientMetrics { + /** + * Method invoked when an authentication error is obtained from the server. + * @param {AuthenticationError|Error} e The error encountered. + */ + onAuthenticationError(e: AuthenticationError | Error): void; + /** + * Method invoked when an error (different than a server or client timeout, authentication or connection error) is + * encountered when executing a request. + * @param {OperationTimedOutError} e The timeout error. + */ + onClientTimeoutError(e: OperationTimedOutError): void; + /** + * Method invoked when there is a connection error. + * @param {Error} e The error encountered. + */ + onConnectionError(e: Error): void; + /** + * Method invoked when an error (different than a server or client timeout, authentication or connection error) is + * encountered when executing a request. + * @param {Error} e The error encountered. + */ + onOtherError(e: Error): void; + /** + * Method invoked when a read timeout error is obtained from the server. + * @param {ResponseError} e The error encountered. + */ + onReadTimeoutError(e: ResponseError): void; + /** + * Method invoked when a write timeout error is obtained from the server. + * @param {ResponseError} e The error encountered. + */ + onWriteTimeoutError(e: ResponseError): void; + /** + * Method invoked when an unavailable error is obtained from the server. + * @param {ResponseError} e The error encountered. + */ + onUnavailableError(e: ResponseError): void; + /** + * Method invoked when an execution is retried as a result of a client-level timeout. + * @param {Error} e The error that caused the retry. + */ + onClientTimeoutRetry(e: Error): void; + /** + * Method invoked when an error (other than a server or client timeout) is retried. + * @param {Error} e The error that caused the retry. + */ + onOtherErrorRetry(e: Error): void; + /** + * Method invoked when an execution is retried as a result of a read timeout from the server (coordinator to replica). + * @param {Error} e The error that caused the retry. + */ + onReadTimeoutRetry(e: Error): void; + /** + * Method invoked when an execution is retried as a result of an unavailable error from the server. + * @param {Error} e The error that caused the retry. + */ + onUnavailableRetry(e: Error): void; + /** + * Method invoked when an execution is retried as a result of a write timeout from the server (coordinator to + * replica). + * @param {Error} e The error that caused the retry. + */ + onWriteTimeoutRetry(e: Error): void; + /** + * Method invoked when an error is marked as ignored by the retry policy. + * @param {Error} e The error that was ignored by the retry policy. + */ + onIgnoreError(e: Error): void; + /** + * Method invoked when a speculative execution is started. + */ + onSpeculativeExecution(): void; + /** + * Method invoked when a response is obtained successfully. + * @param {Array} latency The latency represented in a [seconds, nanoseconds] tuple + * Array, where nanoseconds is the remaining part of the real time that can't be represented in second precision. + */ + onSuccessfulResponse(latency: Array): void; + /** + * Method invoked when any response is obtained, the response can be the result of a successful execution or a + * server-side error. + * @param {Array} latency The latency represented in a [seconds, nanoseconds] tuple + * Array, where nanoseconds is the remaining part of the real time that can't be represented in second precision. + */ + onResponse(latency: Array): void; +} + +/** + * Client options. + *

While the driver provides lots of extensibility points and configurability, few client options are required.

+ *

Default values for all settings are designed to be suitable for the majority of use cases, you should avoid + * fine tuning it when not needed.

+ *

See [Client constructor]{@link Client} documentation for recommended options.

+ * @typedef {Object} ClientOptions@typedef {Object} ClientOptions + * @property {Array.} contactPoints + * Array of addresses or host names of the nodes to add as contact points. + *

+ * Contact points are addresses of Cassandra nodes that the driver uses to discover the cluster topology. + *

+ *

+ * Only one contact point is required (the driver will retrieve the address of the other nodes automatically), + * but it is usually a good idea to provide more than one contact point, because if that single contact point is + * unavailable, the driver will not be able to initialize correctly. + *

+ * @property {String} [localDataCenter] The local data center to use. + *

+ * If using DCAwareRoundRobinPolicy (default), this option is required and only hosts from this data center are + * connected to and used in query plans. + *

+ * @property {String} [keyspace] The logged keyspace for all the connections created within the {@link Client} instance. + * @property {Object} [credentials] An object containing the username and password for plain-text authentication. + * It configures the authentication provider to be used against Apache Cassandra's PasswordAuthenticator or DSE's + * DseAuthenticator, when default auth scheme is plain-text. + *

+ * Note that you should configure either credentials or authProvider to connect to an + * auth-enabled cluster, but not both. + *

+ * @property {String} [credentials.username] The username to use for plain-text authentication. + * @property {String} [credentials.password] The password to use for plain-text authentication. + * @property {Uuid} [id] A unique identifier assigned to a {@link Client} object, that will be communicated to the + * server (DSE 6.0+) to identify the client instance created with this options. When not defined, the driver will + * generate a random identifier. + * @property {String} [applicationName] An optional setting identifying the name of the application using + * the {@link Client} instance. + *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

+ * @property {String} [applicationVersion] An optional setting identifying the version of the application using + * the {@link Client} instance. + *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

+ * @property {Object} [monitorReporting] Options for reporting mechanism from the client to the DSE server, for + * versions that support it. + * @property {Boolean} [monitorReporting.enabled=true] Determines whether the reporting mechanism is enabled. + * Defaults to true. + * @property {Object} [cloud] The options to connect to a cloud instance. + * @property {String|URL} cloud.secureConnectBundle Determines the file path for the credentials file bundle. + * @property {Number} [refreshSchemaDelay] The default window size in milliseconds used to debounce node list and schema + * refresh metadata requests. Default: 1000. + * @property {Boolean} [isMetadataSyncEnabled] Determines whether client-side schema metadata retrieval and update is + * enabled. + *

Setting this value to false will cause keyspace information not to be automatically loaded, affecting + * replica calculation per token in the different keyspaces. When disabling metadata synchronization, use + * [Metadata.refreshKeyspaces()]{@link module:metadata~Metadata#refreshKeyspaces} to keep keyspace information up to + * date or token-awareness will not work correctly.

+ * Default: true. + * @property {Boolean} [prepareOnAllHosts] Determines if the driver should prepare queries on all hosts in the cluster. + * Default: true. + * @property {Boolean} [rePrepareOnUp] Determines if the driver should re-prepare all cached prepared queries on a + * host when it marks it back up. + * Default: true. + * @property {Number} [maxPrepared] Determines the maximum amount of different prepared queries before evicting items + * from the internal cache. Reaching a high threshold hints that the queries are not being reused, like when + * hard-coding parameter values inside the queries. + * Default: 500. + * @property {Object} [policies] + * @property {LoadBalancingPolicy} [policies.loadBalancing] The load balancing policy instance to be used to determine + * the coordinator per query. + * @property {RetryPolicy} [policies.retry] The retry policy. + * @property {ReconnectionPolicy} [policies.reconnection] The reconnection policy to be used. + * @property {AddressTranslator} [policies.addressResolution] The address resolution policy. + * @property {SpeculativeExecutionPolicy} [policies.speculativeExecution] The SpeculativeExecutionPolicy + * instance to be used to determine if the client should send speculative queries when the selected host takes more + * time than expected. + *

+ * Default: [NoSpeculativeExecutionPolicy]{@link + * module:policies/speculativeExecution~NoSpeculativeExecutionPolicy} + *

+ * @property {TimestampGenerator} [policies.timestampGeneration] The client-side + * [query timestamp generator]{@link module:policies/timestampGeneration~TimestampGenerator}. + *

+ * Default: [MonotonicTimestampGenerator]{@link module:policies/timestampGeneration~MonotonicTimestampGenerator} + * + *

+ *

Use null to disable client-side timestamp generation.

+ * @property {QueryOptions} [queryOptions] Default options for all queries. + * @property {Object} [pooling] Pooling options. + * @property {Number} [pooling.heartBeatInterval] The amount of idle time in milliseconds that has to pass before the + * driver issues a request on an active connection to avoid idle time disconnections. Default: 30000. + * @property {Object} [pooling.coreConnectionsPerHost] Associative array containing amount of connections per host + * distance. + * @property {Number} [pooling.maxRequestsPerConnection] The maximum number of requests per connection. The default + * value is: + *
    + *
  • For modern protocol versions (v3 and above): 2048
  • + *
  • For older protocol versions (v1 and v2): 128
  • + *
+ * @property {Boolean} [pooling.warmup] Determines if all connections to hosts in the local datacenter must be opened on + * connect. Default: true. + * @property {Object} [protocolOptions] + * @property {Number} [protocolOptions.port] The port to use to connect to the Cassandra host. If not set through this + * method, the default port (9042) will be used instead. + * @property {Number} [protocolOptions.maxSchemaAgreementWaitSeconds] The maximum time in seconds to wait for schema + * agreement between nodes before returning from a DDL query. Default: 10. + * @property {Number} [protocolOptions.maxVersion] When set, it limits the maximum protocol version used to connect to + * the nodes. + * Useful for using the driver against a cluster that contains nodes with different major/minor versions of Cassandra. + * @property {Boolean} [protocolOptions.noCompact] When set to true, enables the NO_COMPACT startup option. + *

+ * When this option is supplied SELECT, UPDATE, DELETE, and BATCH + * statements on COMPACT STORAGE tables function in "compatibility" mode which allows seeing these tables + * as if they were "regular" CQL tables. + *

+ *

+ * This option only effects interactions with interactions with tables using COMPACT STORAGE and is only + * supported by C* 3.0.16+, 3.11.2+, 4.0+ and DSE 6.0+. + *

+ * @property {Object} [socketOptions] + * @property {Number} [socketOptions.connectTimeout] Connection timeout in milliseconds. Default: 5000. + * @property {Number} [socketOptions.defunctReadTimeoutThreshold] Determines the amount of requests that simultaneously + * have to timeout before closing the connection. Default: 64. + * @property {Boolean} [socketOptions.keepAlive] Whether to enable TCP keep-alive on the socket. Default: true. + * @property {Number} [socketOptions.keepAliveDelay] TCP keep-alive delay in milliseconds. Default: 0. + * @property {Number} [socketOptions.readTimeout] Per-host read timeout in milliseconds. + *

+ * Please note that this is not the maximum time a call to {@link Client#execute} may have to wait; + * this is the maximum time that call will wait for one particular Cassandra host, but other hosts will be tried if + * one of them timeout. In other words, a {@link Client#execute} call may theoretically wait up to + * readTimeout * number_of_cassandra_hosts (though the total number of hosts tried for a given query also + * depends on the LoadBalancingPolicy in use). + *

When setting this value, keep in mind the following:

+ *
    + *
  • the timeout settings used on the Cassandra side (*_request_timeout_in_ms in cassandra.yaml) should be taken + * into account when picking a value for this read timeout. You should pick a value a couple of seconds greater than + * the Cassandra timeout settings. + *
  • + *
  • + * the read timeout is only approximate and only control the timeout to one Cassandra host, not the full query. + *
  • + *
+ * Setting a value of 0 disables read timeouts. Default: 12000. + * @property {Boolean} [socketOptions.tcpNoDelay] When set to true, it disables the Nagle algorithm. Default: true. + * @property {Number} [socketOptions.coalescingThreshold] Buffer length in bytes use by the write queue before flushing + * the frames. Default: 8000. + * @property {AuthProvider} [authProvider] Provider to be used to authenticate to an auth-enabled cluster. + * @property {RequestTracker} [requestTracker] The instance of RequestTracker used to monitor or log requests executed + * with this instance. + * @property {Object} [sslOptions] Client-to-node ssl options. When set the driver will use the secure layer. + * You can specify cert, ca, ... options named after the Node.js tls.connect() options. + *

+ * It uses the same default values as Node.js tls.connect() except for rejectUnauthorized + * which is set to false by default (for historical reasons). This setting is likely to change + * in upcoming versions to enable validation by default. + *

+ * @property {Object} [encoding] Encoding options. + * @property {Function} [encoding.map] Map constructor to use for Cassandra map type encoding and decoding. + * If not set, it will default to Javascript Object with map keys as property names. + * @property {Function} [encoding.set] Set constructor to use for Cassandra set type encoding and decoding. + * If not set, it will default to Javascript Array. + * @property {Boolean} [encoding.copyBuffer] Determines if the network buffer should be copied for buffer based data + * types (blob, uuid, timeuuid and inet). + *

+ * Setting it to true will cause that the network buffer is copied for each row value of those types, + * causing additional allocations but freeing the network buffer to be reused. + * Setting it to true is a good choice for cases where the Row and ResultSet returned by the queries are long-lived + * objects. + *

+ *

+ * Setting it to false will cause less overhead and the reference of the network buffer to be maintained until the row + * / result set are de-referenced. + * Default: true. + *

+ * @property {Boolean} [encoding.useUndefinedAsUnset] Valid for Cassandra 2.2 and above. Determines that, if a parameter + * is set to + * undefined it should be encoded as unset. + *

+ * By default, ECMAScript undefined is encoded as null in the driver. Cassandra 2.2 + * introduced the concept of unset. + * At driver level, you can set a parameter to unset using the field types.unset. Setting this flag to + * true allows you to use ECMAScript undefined as Cassandra unset. + *

+ *

+ * Default: true. + *

+ * @property {Boolean} [encoding.useBigIntAsLong] Use [BigInt ECMAScript type](https://tc39.github.io/proposal-bigint/) + * to represent CQL bigint and counter data types. + * @property {Boolean} [encoding.useBigIntAsVarint] Use [BigInt ECMAScript + * type](https://tc39.github.io/proposal-bigint/) to represent CQL varint data type. + * @property {Array.} [profiles] The array of [execution profiles]{@link ExecutionProfile}. + * @property {Function} [promiseFactory] Function to be used to create a Promise from a + * callback-style function. + *

+ * Promise libraries often provide different methods to create a promise. For example, you can use Bluebird's + * Promise.fromCallback() method. + *

+ *

+ * By default, the driver will use the + * [Promise constructor]{@link https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise}. + *

+ */ +export declare interface ClientOptions { + /* Excluded from this release type: applicationName */ + /* Excluded from this release type: applicationVersion */ + authProvider?: AuthProvider; + contactPoints?: string[]; + localDataCenter?: string; + /* Excluded from this release type: logEmitter */ + keyspace?: string; + credentials?: { + username: string; + password: string; + }; + cloud?: { + secureConnectBundle: string | URL; + }; + encoding?: { + map?: Function; + set?: Function; + copyBuffer?: boolean; + useUndefinedAsUnset?: boolean; + useBigIntAsLong?: boolean; + useBigIntAsVarint?: boolean; + }; + /* Excluded from this release type: id */ + isMetadataSyncEnabled?: boolean; + maxPrepared?: number; + metrics?: ClientMetrics; + /* Excluded from this release type: monitorReporting */ + policies?: { + addressResolution?: AddressTranslator; + loadBalancing?: LoadBalancingPolicy; + reconnection?: ReconnectionPolicy; + retry?: RetryPolicy; + speculativeExecution?: SpeculativeExecutionPolicy; + timestampGeneration?: TimestampGenerator; + }; + pooling?: { + coreConnectionsPerHost?: { + [key: number]: number; + }; + heartBeatInterval?: number; + maxRequestsPerConnection?: number; + warmup?: boolean; + }; + prepareOnAllHosts?: boolean; + profiles?: ExecutionProfile[]; + protocolOptions?: { + maxSchemaAgreementWaitSeconds?: number; + maxVersion?: number; + noCompact?: boolean; + port?: number; + }; + promiseFactory?: (handler: (callback: (err: Error, result?: any) => void) => void) => Promise; + queryOptions?: QueryOptions; + refreshSchemaDelay?: number; + rePrepareOnUp?: boolean; + requestTracker?: RequestTracker; + /* Excluded from this release type: sni */ + socketOptions?: { + coalescingThreshold?: number; + connectTimeout?: number; + defunctReadTimeoutThreshold?: number; + keepAlive?: boolean; + keepAliveDelay?: number; + readTimeout?: number; + tcpNoDelay?: boolean; + }; + sslOptions?: ConnectionOptions; +} + +/** + * Represents the state of a {@link Client}. + *

+ * Exposes information on the connections maintained by a Client at a specific time. + *

+ * @alias module:metadata~ClientState + * @constructor + */ +export declare class ClientState { + private _hosts; + private _openConnections; + private _inFlightQueries; + /* Excluded from this release type: __constructor */ + /** + * Get an array of hosts to which the client is connected to. + * @return {Array} + */ + getConnectedHosts(): Array; + /** + * Gets the amount of open connections to a given host. + * @param {Host} host + * @return {Number} + */ + getOpenConnections(host: Host): number; + /** + * Gets the amount of queries that are currently being executed through a given host. + *

+ * This corresponds to the number of queries that have been sent by the Client to server Host on one of its connections + * but haven't yet obtained a response. + *

+ * @param {Host} host + * @return {Number} + */ + getInFlightQueries(host: Host): number; + /** + * Returns the string representation of the instance. + */ + toString(): string; + /* Excluded from this release type: from */ +} + +export declare interface ColumnInfo { + name: string; + type: DataTypeInfo; +} + +export declare const concurrent: { + executeConcurrent: typeof executeConcurrent; + ResultSetGroup: typeof ResultSetGroup; +}; + +/* Excluded from this release type: Connection */ + +/** + * Consistency levels + * @type {Object} + * @property {Number} any Writing: A write must be written to at least one node. If all replica nodes for the given row key are down, the write can still succeed after a hinted handoff has been written. If all replica nodes are down at write time, an ANY write is not readable until the replica nodes for that row have recovered. + * @property {Number} one Returns a response from the closest replica, as determined by the snitch. + * @property {Number} two Returns the most recent data from two of the closest replicas. + * @property {Number} three Returns the most recent data from three of the closest replicas. + * @property {Number} quorum Reading: Returns the record with the most recent timestamp after a quorum of replicas has responded regardless of data center. Writing: A write must be written to the commit log and memory table on a quorum of replica nodes. + * @property {Number} all Reading: Returns the record with the most recent timestamp after all replicas have responded. The read operation will fail if a replica does not respond. Writing: A write must be written to the commit log and memory table on all replica nodes in the cluster for that row. + * @property {Number} localQuorum Reading: Returns the record with the most recent timestamp once a quorum of replicas in the current data center as the coordinator node has reported. Writing: A write must be written to the commit log and memory table on a quorum of replica nodes in the same data center as the coordinator node. Avoids latency of inter-data center communication. + * @property {Number} eachQuorum Reading: Returns the record once a quorum of replicas in each data center of the cluster has responded. Writing: Strong consistency. A write must be written to the commit log and memtable on a quorum of replica nodes in all data centers. + * @property {Number} serial Achieves linearizable consistency for lightweight transactions by preventing unconditional updates. + * @property {Number} localSerial Same as serial but confined to the data center. A write must be written conditionally to the commit log and memtable on a quorum of replica nodes in the same data center. + * @property {Number} localOne Similar to One but only within the DC the coordinator is in. + */ +export declare enum consistencies { + any = 0, + one = 1, + two = 2, + three = 3, + quorum = 4, + all = 5, + localQuorum = 6, + eachQuorum = 7, + serial = 8, + localSerial = 9, + localOne = 10 +} + +/** + * Mapping of consistency level codes to their string representation. + * @type {Object} + */ +export declare const consistencyToString: {}; + +/** + * A reconnection policy that waits a constant time between each reconnection attempt. + */ +export declare class ConstantReconnectionPolicy extends ReconnectionPolicy { + private delay; + /** + * A reconnection policy that waits a constant time between each reconnection attempt. + * @param {Number} delay Delay in ms + * @constructor + */ + constructor(delay: number); + /** + * A new reconnection schedule that returns the same next delay value + * @returns { Iterator} An infinite iterator + */ + newSchedule(): Iterator; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +/** + * @classdesc + * A {@link SpeculativeExecutionPolicy} that schedules a given number of speculative executions, + * separated by a fixed delay. + * @extends {SpeculativeExecutionPolicy} + */ +export declare class ConstantSpeculativeExecutionPolicy extends SpeculativeExecutionPolicy { + private _delay; + private _maxSpeculativeExecutions; + /** + * Creates a new instance of ConstantSpeculativeExecutionPolicy. + * @constructor + * @param {Number} delay The delay between each speculative execution. + * @param {Number} maxSpeculativeExecutions The amount of speculative executions that should be scheduled after the + * initial execution. Must be strictly positive. + */ + constructor(delay: number, maxSpeculativeExecutions: number); + newPlan(keyspace: string, queryInfo: string | Array): { + nextExecution: () => number; + }; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +/* Excluded from this release type: ControlConnection */ + +declare type CustomSimpleColumnInfo = { + code: (dataTypes.custom); + info: CustomSimpleTypeNames; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +declare type CustomSimpleTypeCodes = ('point' | 'polygon' | 'duration' | 'lineString' | 'dateRange'); + +declare type CustomSimpleTypeNames = (typeof customTypeNames[CustomSimpleTypeCodes]) | CustomSimpleTypeCodes | 'empty'; + +declare const customTypeNames: Readonly<{ + readonly duration: "org.apache.cassandra.db.marshal.DurationType"; + readonly lineString: "org.apache.cassandra.db.marshal.LineStringType"; + readonly point: "org.apache.cassandra.db.marshal.PointType"; + readonly polygon: "org.apache.cassandra.db.marshal.PolygonType"; + readonly dateRange: "org.apache.cassandra.db.marshal.DateRangeType"; + readonly vector: "org.apache.cassandra.db.marshal.VectorType"; +}>; + +/** + * Creates a new instance of DataCollection + * @param {String} name Name of the data object. + * @classdesc Describes a table or a view + * @alias module:metadata~DataCollection + * @constructor + * @abstract + */ +export declare class DataCollection extends EventEmitter.EventEmitter { + /** + * Name of the object + * @type {String} + */ + name: string; + /** + * False-positive probability for SSTable Bloom filters. + * @type {number} + */ + bloomFilterFalsePositiveChance: number; + /** + * Level of caching: all, keys_only, rows_only, none + * @type {String} + */ + caching: string; + /** + * A human readable comment describing the table. + * @type {String} + */ + comment: string; + /** + * Specifies the time to wait before garbage collecting tombstones (deletion markers) + * @type {number} + */ + gcGraceSeconds: number; + /** + * Compaction strategy class used for the table. + * @type {String} + */ + compactionClass: string; + /** + * Associative-array containing the compaction options keys and values. + * @type {Object} + */ + compactionOptions: { + [option: string]: any; + }; + /** + * Associative-array containing the compaction options keys and values. + * @type {Object} + */ + compression: { + class?: string; + [option: string]: any; + }; + /** + * Specifies the probability of read repairs being invoked over all replicas in the current data center. + * @type {number} + */ + localReadRepairChance: number; + /** + * Specifies the probability with which read repairs should be invoked on non-quorum reads. The value must be + * between 0 and 1. + * @type {number} + */ + readRepairChance: number; + /** + * An associative Array containing extra metadata for the table. + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Object} + */ + extensions: { + [option: string]: any; + }; + /** + * When compression is enabled, this option defines the probability + * with which checksums for compressed blocks are checked during reads. + * The default value for this options is 1.0 (always check). + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Number|null} + */ + crcCheckChance?: number; + /** + * Whether the populate I/O cache on flush is set on this table. + * @type {Boolean} + */ + populateCacheOnFlush: boolean; + /** + * Returns the default TTL for this table. + * @type {Number} + */ + defaultTtl: number; + /** + * * Returns the speculative retry option for this table. + * @type {String} + */ + speculativeRetry: string; + /** + * Returns the minimum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + minIndexInterval?: number; + /** + * Returns the maximum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + maxIndexInterval?: number; + /** + * Array describing the table columns. + * @type {Array} + */ + columns: ColumnInfo[]; + /** + * An associative Array of columns by name. + * @type {Object} + */ + columnsByName: { + [key: string]: ColumnInfo; + }; + /** + * Array describing the columns that are part of the partition key. + * @type {Array} + */ + partitionKeys: ColumnInfo[]; + /** + * Array describing the columns that form the clustering key. + * @type {Array} + */ + clusteringKeys: ColumnInfo[]; + /** + * Array describing the clustering order of the columns in the same order as the clusteringKeys. + * @type {Array} + */ + clusteringOrder: string[]; + /** + * An associative Array containing nodesync options for this table. + *

+ * For DSE versions prior to 6.0.0, this method always returns {@code null}. If nodesync + * was not explicitly configured for this table this method will also return {@code null}. + *

+ * @type {Object} + */ + nodesync?: object; + /* Excluded from this release type: __constructor */ +} + +/** + * DataStax module. + *

+ * Contains modules and classes to represent functionality that is specific to DataStax products. + *

+ * @module datastax + */ +export declare const datastax: { + graph: { + Edge: typeof Edge; + Element: typeof Element; + Path: typeof Path; + Property: typeof Property; + Vertex: typeof Vertex; + VertexProperty: typeof VertexProperty; + asInt: typeof asInt; + asDouble: typeof asDouble; + asFloat: typeof asFloat; + asTimestamp: typeof asTimestamp; + asUdt: typeof asUdt; + direction: { + both: { + typeName: any; + elementName: any; + toString(): any; + }; + in: { + typeName: any; + elementName: any; + toString(): any; + }; + out: { + typeName: any; + elementName: any; + toString(): any; + }; + in_: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + /* Excluded from this release type: getCustomTypeSerializers */ + GraphResultSet: typeof GraphResultSet; + /* Excluded from this release type: GraphTypeWrapper */ + t: { + id: { + typeName: any; + elementName: any; + toString(): any; + }; + key: { + typeName: any; + elementName: any; + toString(): any; + }; + label: { + typeName: any; + elementName: any; + toString(): any; + }; + value: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + /* Excluded from this release type: UdtGraphWrapper */ + }; + search: { + DateRange: typeof DateRange; + DateRangeBound: typeof DateRangeBound; + dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; + }; + }; +}; + +export declare type DataTypeInfo = SingleColumnInfo | CustomSimpleColumnInfo | MapColumnInfo | TupleColumnInfo | ListSetColumnInfo | VectorColumnInfo | OtherCustomColumnInfo | UdtColumnInfo | TupleListColumnInfoWithoutSubtype; + +/** + * CQL data types + * @type {Object} + * @property {Number} custom A custom type. + * @property {Number} ascii ASCII character string. + * @property {Number} bigint 64-bit signed long. + * @property {Number} blob Arbitrary bytes (no validation). + * @property {Number} boolean true or false. + * @property {Number} counter Counter column (64-bit signed value). + * @property {Number} decimal Variable-precision decimal. + * @property {Number} double 64-bit IEEE-754 floating point. + * @property {Number} float 32-bit IEEE-754 floating point. + * @property {Number} int 32-bit signed integer. + * @property {Number} text UTF8 encoded string. + * @property {Number} timestamp A timestamp. + * @property {Number} uuid Type 1 or type 4 UUID. + * @property {Number} varchar UTF8 encoded string. + * @property {Number} varint Arbitrary-precision integer. + * @property {Number} timeuuid Type 1 UUID. + * @property {Number} inet An IP address. It can be either 4 bytes long (IPv4) or 16 bytes long (IPv6). + * @property {Number} date A date without a time-zone in the ISO-8601 calendar system. + * @property {Number} time A value representing the time portion of the day. + * @property {Number} smallint 16-bit two's complement integer. + * @property {Number} tinyint 8-bit two's complement integer. + * @property {Number} list A collection of elements. + * @property {Number} map Key/value pairs. + * @property {Number} set A collection that contains no duplicate elements. + * @property {Number} udt User-defined type. + * @property {Number} tuple A sequence of values. + */ +export declare enum dataTypes { + custom = 0, + ascii = 1, + bigint = 2, + blob = 3, + boolean = 4, + counter = 5, + decimal = 6, + double = 7, + float = 8, + int = 9, + text = 10, + timestamp = 11, + uuid = 12, + varchar = 13, + varint = 14, + timeuuid = 15, + inet = 16, + date = 17, + time = 18, + smallint = 19, + tinyint = 20, + duration = 21, + list = 32, + map = 33, + set = 34, + udt = 48, + tuple = 49 +} + +export declare namespace dataTypes { + /** + * Returns the typeInfo of a given type name + * @param {string} name + * @returns {DateTypeInfo} + */ + export function getByName(name: string): DataTypeInfo; +} + +/** + * @classdesc + * Represents a range of dates, corresponding to the Apache Solr type + * DateRangeField. + *

+ * A date range can have one or two bounds, namely lower bound and upper bound, to represent an interval of time. + * Date range bounds are both inclusive. For example: + *

+ *
    + *
  • 2015 TO 2016-10 represents from the first day of 2015 to the last day of October 2016
  • + *
  • 2015 represents during the course of the year 2015.
  • + *
  • 2017 TO * represents any date greater or equals to the first day of the year 2017.
  • + *
+ *

+ * Note that this JavaScript representation of DateRangeField does not support Dates outside of the range + * supported by ECMAScript Date: –100,000,000 days to 100,000,000 days measured relative to midnight at the + * beginning of 01 January, 1970 UTC. Being -271821-04-20T00:00:00.000Z the minimum lower boundary + * and 275760-09-13T00:00:00.000Z the maximum higher boundary. + *

+ * @memberOf module:datastax/search + */ +export declare class DateRange { + lowerBound: DateRangeBound; + upperBound: DateRangeBound; + private _type; + constructor(lowerBound: DateRangeBound, upperBound?: DateRangeBound); + /** + * Returns the DateRange representation of a given string. + *

String representations of dates are always expressed in Coordinated Universal Time (UTC)

+ * @param {String} dateRangeString + */ + static fromString(dateRangeString: string): DateRange; + /** + * Deserializes the buffer into a DateRange + * @param {Buffer} buffer + * @return {DateRange} + */ + static fromBuffer(buffer: Buffer): DateRange; + /** + * Returns true if the value of this DateRange instance and other are the same. + * @param {DateRange} other + * @returns {Boolean} + */ + equals(other: DateRange): boolean; + /** + * Returns the string representation of the instance. + * @return {String} + */ + toString(): string; + /** + * @intenal + */ + toBuffer(): any; +} + +/** + * @classdesc + * Represents a date range boundary, composed by a Date and a precision. + * @param {Date} date The timestamp portion, representing a single moment in time. Consider using + * Date.UTC() method to build the Date instance. + * @param {Number} precision The precision portion. Valid values for DateRangeBound precision are + * defined in the [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. + * @constructor + * @memberOf module:datastax/search + */ +export declare class DateRangeBound { + date: Date; + precision: number; + /* Excluded from this release type: unbounded */ + /** + * Represents a date range boundary, composed by a Date and a precision. + * @param {Date} date The timestamp portion, representing a single moment in time. Consider using + * Date.UTC() method to build the Date instance. + * @param {Number} precision The precision portion. Valid values for DateRangeBound precision are + * defined in the [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. + * @constructor + */ + constructor(date: Date, precision: number); + /** + * Parses a date string and returns a DateRangeBound. + * @param {String} boundaryString + * @return {DateRangeBound} + */ + static fromString(boundaryString: string): DateRangeBound; + /** + * Converts a {DateRangeBound} into a lower-bounded bound by rounding down its date + * based on its precision. + * + * @param {DateRangeBound} bound The bound to round down. + * @returns {DateRangeBound} with the date rounded down to the given precision. + */ + static toLowerBound(bound: DateRangeBound): DateRangeBound; + /** + * Converts a {DateRangeBound} into a upper-bounded bound by rounding up its date + * based on its precision. + * + * @param {DateRangeBound} bound The bound to round up. + * @returns {DateRangeBound} with the date rounded up to the given precision. + */ + static toUpperBound(bound: DateRangeBound): DateRangeBound; + /** + * Returns the string representation of the instance. + * @return {String} + */ + toString(): string; + /** + * Returns true if the value of this DateRange instance and other are the same. + * @param {DateRangeBound} other + * @return {boolean} + */ + equals(other: DateRangeBound): boolean; + /* Excluded from this release type: isUnbounded */ +} + +/** + * Defines the possible values of date range precision. + * @type {Object} + * @property {Number} year + * @property {Number} month + * @property {Number} day + * @property {Number} hour + * @property {Number} minute + * @property {Number} second + * @property {Number} millisecond + * @memberof module:search + */ +export declare const dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; +}; + +/** + * A data-center aware Round-robin load balancing policy. + * This policy provides round-robin queries over the nodes of the local + * data center. + */ +export declare class DCAwareRoundRobinPolicy extends LoadBalancingPolicy { + /* Excluded from this release type: localDc */ + private index; + private localHostsArray; + /** + * A data-center aware Round-robin load balancing policy. + * This policy provides round-robin queries over the nodes of the local + * data center. + * @param {String} [localDc] local datacenter name. This value overrides the 'localDataCenter' Client option \ + * and is useful for cases where you have multiple execution profiles that you intend on using for routing + * requests to different data centers. + * @constructor + */ + constructor(localDc?: string); + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + /** + * Returns the distance depending on the datacenter. + * @param {Host} host + */ + getDistance(host: Host): distance; + private _cleanHostCache; + private _resolveLocalHosts; + /** + * It returns an iterator that yields local nodes. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; + getOptions(): Map; +} + +/** + * Decision information + * @typedef {Object} DecisionInfo@typedef {Object} DecisionInfo + * @property {Number} decision The decision as specified in + * [retryDecision]{@link module:policies/retry~RetryPolicy.retryDecision}. + * @property {Number} [consistency] The [consistency level]{@link module:types~consistencies}. + * @property {useCurrentHost} [useCurrentHost] Determines if it should use the same host to retry the request. + *

+ * In the case that the current host is not available anymore, it will be retried on the next host even when + * useCurrentHost is set to true. + *

+ */ +export declare type DecisionInfo = { + decision: number; + consistency?: consistencies; + useCurrentHost?: boolean; +}; + +/** + * Returns a new instance of the default address translator policy used by the driver. + * @returns {AddressTranslator} + */ +export declare const defaultAddressTranslator: () => AddressTranslator; + +/** + * A load-balancing policy implementation that attempts to fairly distribute the load based on the amount of in-flight + * request per hosts. The local replicas are initially shuffled and + * between the first two nodes in the + * shuffled list, the one with fewer in-flight requests is selected as coordinator. + * + *

+ * Additionally, it detects unresponsive replicas and reorders them at the back of the query plan. + *

+ * + *

+ * For graph analytics queries, it uses the preferred analytics graph server previously obtained by driver as first + * host in the query plan. + *

+ */ +export declare class DefaultLoadBalancingPolicy extends LoadBalancingPolicy { + private _client; + private _hosts; + private _filteredHosts; + private _preferredHost; + private _index; + private _filter; + /** + * Creates a new instance of DefaultLoadBalancingPolicy. + * @param {String|Object} [options] The local data center name or the optional policy options object. + *

+ * Note that when providing the local data center name, it overrides localDataCenter option at + * Client level. + *

+ * @param {String} [options.localDc] local data center name. This value overrides the 'localDataCenter' Client option + * and is useful for cases where you have multiple execution profiles that you intend on using for routing + * requests to different data centers. + * @param {Function} [options.filter] A function to apply to determine if hosts are included in the query plan. + * The function takes a Host parameter and returns a Boolean. + */ + constructor(options?: { + localDc?: string; + filter?: (host: Host) => boolean; + } | string); + /** + * Initializes the load balancing policy, called after the driver obtained the information of the cluster. + * @param {Client} client + * @param {HostMap} hosts + * @param {Function} callback + */ + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + /** + * Returns the distance assigned by this policy to the provided host, relatively to the client instance. + * @param {Host} host + */ + getDistance(host: Host): distance; + /** + * Returns a host iterator to be used for a query execution. + * @override + * @param {String} keyspace + * @param {ExecutionOptions} executionOptions + * @param {Function} callback + */ + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; + /** + * Yields the preferred host first, followed by the host in the provided iterable + * @param preferredHost + * @param iterable + * @private + */ + private static _getPreferredHostFirst; + /** + * Yields the local hosts without the replicas already yielded + * @param {Array} [localReplicas] The local replicas that we should avoid to include again + * @private + */ + private _getLocalHosts; + private _getReplicasAndLocalHosts; + /** + * Yields the local replicas followed by the rest of local nodes. + * @param {Array} replicas The local replicas + */ + private yieldReplicasFirst; + private _isHostNewlyUp; + /** + * Returns a boolean determining whether the host health is ok or not. + * A Host is considered unhealthy when there are enough items in the queue (10 items in-flight) but the + * Host is not responding to those requests. + * @param {Host} h + * @return {boolean} + * @private + */ + private _healthCheck; + /** + * Compares to host and returns 1 if it needs to favor the first host otherwise, -1. + * @return {number} + * @private + */ + private _compare; + private _getReplicas; + /** + * Returns an Array of hosts filtered by DC and predicate. + * @returns {Array} + * @private + */ + private _getFilteredLocalHosts; + private _getIndex; + private _sendUnhealthyToTheBack; + private _defaultFilter; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +/** + * Returns a new instance of the default load-balancing policy used by the driver. + * @param {string} [localDc] When provided, it sets the data center that is going to be used as local for the + * load-balancing policy instance. + *

When localDc is undefined, the load-balancing policy instance will use the localDataCenter + * provided in the {@link ClientOptions}.

+ * @returns {LoadBalancingPolicy} + */ +export declare const defaultLoadBalancingPolicy: (localDc?: string) => LoadBalancingPolicy; + +/** + * A default implementation of [ClientMetrics]{@link module:metrics~ClientMetrics} that exposes the driver events as + * Node.js events. + *

+ * An instance of [DefaultMetrics]{@link module:metrics~DefaultMetrics} is configured by default in the client, + * you can access this instance using [Client#metrics]{@link Client#metrics} property. + *

+ * @implements {module:metrics~ClientMetrics} + * @alias module:metrics~DefaultMetrics + * @example Listening to events emitted + * defaultMetrics.errors.on('increment', err => totalErrors++); + * defaultMetrics.errors.clientTimeout.on('increment', () => clientTimeoutErrors++); + * defaultMetrics.speculativeRetries.on('increment', () => specExecsCount++); + * defaultMetrics.responses.on('increment', latency => myHistogram.record(latency)); + */ +export declare class DefaultMetrics extends ClientMetrics { + errors: EventEmitter & { + authentication: EventEmitter; + clientTimeout: EventEmitter; + connection: EventEmitter; + other: EventEmitter; + readTimeout: EventEmitter; + unavailable: EventEmitter; + writeTimeout: EventEmitter; + }; + retries: EventEmitter & { + clientTimeout: EventEmitter; + other: EventEmitter; + readTimeout: EventEmitter; + unavailable: EventEmitter; + writeTimeout: EventEmitter; + }; + speculativeExecutions: EventEmitter & { + increment: EventEmitter; + }; + ignoredErrors: EventEmitter; + responses: EventEmitter & { + success: EventEmitter; + }; + /** + * Creates a new instance of [DefaultMetrics]{@link module:metrics~DefaultMetrics}. + */ + constructor(); + /** @override */ + onAuthenticationError(e: Error | AuthenticationError): void; + /** @override */ + onConnectionError(e: Error): void; + /** @override */ + onReadTimeoutError(e: ResponseError): void; + /** @override */ + onWriteTimeoutError(e: ResponseError): void; + /** @override */ + onUnavailableError(e: Error): void; + /** @override */ + onClientTimeoutError(e: OperationTimedOutError): void; + /** @override */ + onOtherError(e: Error): void; + /** @override */ + onClientTimeoutRetry(e: Error): void; + /** @override */ + onOtherErrorRetry(e: Error): void; + /** @override */ + onReadTimeoutRetry(e: Error): void; + /** @override */ + onUnavailableRetry(e: Error): void; + /** @override */ + onWriteTimeoutRetry(e: Error): void; + /** @override */ + onIgnoreError(e: Error): void; + /** @override */ + onSpeculativeExecution(): void; + /** @override */ + onSuccessfulResponse(latency: number[]): void; + /** @override */ + onResponse(latency: number[]): void; +} + +export declare const defaultOptions: () => ClientOptions; + +/** + * Returns a new instance of the default reconnection policy used by the driver. + * @returns {ReconnectionPolicy} + */ +export declare const defaultReconnectionPolicy: () => ReconnectionPolicy; + +/** + * Returns a new instance of the default retry policy used by the driver. + * @returns {RetryPolicy} + */ +export declare const defaultRetryPolicy: () => RetryPolicy; + +/** + * Returns a new instance of the default speculative execution policy used by the driver. + * @returns {SpeculativeExecutionPolicy} + */ +export declare const defaultSpeculativeExecutionPolicy: () => SpeculativeExecutionPolicy; + +/** + * Default implementation of [TableMappings]{@link module:mapping~TableMappings} that doesn't perform any conversion. + * @alias module:mapping~DefaultTableMappings + * @implements {module:mapping~TableMappings} + */ +export declare class DefaultTableMappings extends TableMappings { + /** + * Creates a new instance of {@link DefaultTableMappings}. + */ + constructor(); + /** @override */ + getColumnName(propName: string): string; + /** @override */ + getPropertyName(columnName: string): string; + /** + * Creates a new object instance, using object initializer. + */ + newObjectInstance(): object; +} + +/** + * Returns a new instance of the default timestamp generator used by the driver. + * @returns {TimestampGenerator} + */ +export declare const defaultTimestampGenerator: () => TimestampGenerator; + +/** + * Represents the edge direction. + */ +export declare const direction: { + both: EnumValue; + in: EnumValue; + out: EnumValue; + in_: EnumValue; +}; + +/** + * Represents the distance of Cassandra node as assigned by a LoadBalancingPolicy relatively to the driver instance. + * @type {Object} + * @property {Number} local A local node. + * @property {Number} remote A remote node. + * @property {Number} ignored A node that is meant to be ignored. + */ +export declare enum distance { + local = 0, + remote = 1, + ignored = 2 +} + +declare type DocInfo = FindDocInfo | UpdateDocInfo | InsertDocInfo | RemoveDocInfo; + +/** + * Contains the error classes exposed by the driver. + * @module errors + */ +/** + * Base Error + */ +export declare class DriverError extends Error { + info: string; + isSocketError: boolean; + innerError: any; + requestNotWritten?: boolean; + constructor(message: string); +} + +/** + * Represents a bug inside the driver or in a Cassandra host. + */ +export declare class DriverInternalError extends DriverError { + /** + * Represents a bug inside the driver or in a Cassandra host. + * @param {String} message + * @constructor + */ + constructor(message: string); +} + +declare interface DseClientOptions extends ClientOptions { + id?: Uuid; + applicationName?: string; + applicationVersion?: string; + monitorReporting?: { + enabled?: boolean; + }; + graphOptions?: GraphOptions; +} + +/** + * @classdesc + * AuthProvider that provides GSSAPI authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DseGssapiAuthProvider() + * }); + * @alias module:auth~DseGssapiAuthProvider + */ +export declare class DseGssapiAuthProvider extends AuthProvider { + private _kerberos; + private authorizationId; + private service; + private hostNameResolver; + /** + * Creates a new instance of DseGssapiAuthProvider. + * @classdesc + * AuthProvider that provides GSSAPI authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @param {Object} [gssOptions] GSSAPI authenticator options + * @param {String} [gssOptions.authorizationId] The optional authorization ID. Providing an authorization ID allows the + * currently authenticated user to act as a different user (a.k.a. proxy authentication). + * @param {String} [gssOptions.service] The service to use. Defaults to 'dse'. + * @param {Function} [gssOptions.hostNameResolver] A method to be used to resolve the name of the Cassandra node based + * on the IP Address. Defaults to [lookupServiceResolver]{@link module:auth~DseGssapiAuthProvider.lookupServiceResolver} + * which resolves the FQDN of the provided IP to generate principals in the format of + * dse/example.com@MYREALM.COM. + * Alternatively, you can use [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver} to do a + * reverse DNS lookup or [useIpResolver]{@link module:auth~DseGssapiAuthProvider.useIpResolver} to simply use the IP + * address provided. + * @param {String} [gssOptions.user] DEPRECATED, it will be removed in future versions. For proxy authentication, use + * authorizationId instead. + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DseGssapiAuthProvider() + * }); + * @alias module:auth~DseGssapiAuthProvider + * @constructor + */ + constructor(gssOptions: { + authorizationId?: string; + service?: string; + hostNameResolver?: Function; + user?: string; + }); + /** + * Returns an Authenticator instance to be used by the driver when connecting to a host. + * @param {String} endpoint The IP address and port number in the format ip:port. + * @param {String} name Authenticator name. + * @override + * @returns {Authenticator} + */ + newAuthenticator(endpoint: string, name: string): Authenticator; + /** + * Performs a lookupService query that resolves an IPv4 or IPv6 address to a hostname. This ultimately makes a + * getnameinfo() system call which depends on the OS to do hostname resolution. + *

+ * Note: Depends on dns.lookupService which was added in 0.12. For older versions falls back on + * [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver}. + * + * @param {String} ip IP address to resolve. + * @param {Function} callback The callback function with err and hostname arguments. + */ + private static lookupServiceResolver; + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to a hostname. + * @param {String} ip IP address to resolve. + * @param {Function} callback The callback function with err and hostname arguments. + */ + static reverseDnsResolver(ip: string, callback: Function): void; + /** + * Effectively a no op operation, returns the IP address provided. + * @param {String} ip IP address to use. + * @param {Function} callback The callback function with err and hostname arguments. + */ + static useIpResolver(ip: string, callback: Function): void; +} + +/** + * @classdesc + * AuthProvider that provides plain text authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @extends AuthProvider + * @alias module:auth~DsePlainTextAuthProvider + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DsePlainTextAuthProvider('user', 'p@ssword1'); + * }); + */ +export declare class DsePlainTextAuthProvider extends AuthProvider { + private username; + private password; + private authorizationId; + /** + * Creates a new instance of DsePlainTextAuthProvider. + * @classdesc + * AuthProvider that provides plain text authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @param {String} username The username; cannot be null. + * @param {String} password The password; cannot be null. + * @param {String} [authorizationId] The optional authorization ID. Providing an authorization ID allows the currently + * authenticated user to act as a different user (a.k.a. proxy authentication). + * @extends AuthProvider + * @alias module:auth~DsePlainTextAuthProvider + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DsePlainTextAuthProvider('user', 'p@ssword1'); + * }); + * @constructor + */ + constructor(username: string, password: string, authorizationId?: string); + /** + * Returns an Authenticator instance to be used by the driver when connecting to a host. + * @param {String} endpoint The IP address and port number in the format ip:port. + * @param {String} name Authenticator name. + * @override + * @returns {Authenticator} + */ + newAuthenticator(endpoint: string, name: string): Authenticator; +} + +/** + * Creates a new instance of {@link Duration}. + * @classdesc + * Represents a duration. A duration stores separately months, days, and seconds due to the fact that the number of + * days in a month varies, and a day can have 23 or 25 hours if a daylight saving is involved. + * @param {Number} months The number of months. + * @param {Number} days The number of days. + * @param {Number|Long} nanoseconds The number of nanoseconds. + * @constructor + */ +export declare class Duration { + private months; + private days; + private nanoseconds; + constructor(months: number, days: number, nanoseconds: number | Long__default); + /** + * Returns true if the value of the Duration instance and other are the same + * @param {Duration} other + * @returns {Boolean} + */ + equals(other: Duration): boolean; + /** + * Serializes the duration and returns the representation of the value in bytes. + * @returns {Buffer} + */ + toBuffer(): Buffer; + /** + * Returns the string representation of the value. + * @return {string} + */ + toString(): string; + /** + * Creates a new {@link Duration} instance from the binary representation of the value. + * @param {Buffer} buffer + * @returns {Duration} + */ + static fromBuffer(buffer: Buffer): Duration; + /** + * Creates a new {@link Duration} instance from the string representation of the value. + *

+ * Accepted formats: + *

+ *
    + *
  • multiple digits followed by a time unit like: 12h30m where the time unit can be: + *
      + *
    • {@code y}: years
    • + *
    • {@code m}: months
    • + *
    • {@code w}: weeks
    • + *
    • {@code d}: days
    • + *
    • {@code h}: hours
    • + *
    • {@code m}: minutes
    • + *
    • {@code s}: seconds
    • + *
    • {@code ms}: milliseconds
    • + *
    • {@code us} or {@code µs}: microseconds
    • + *
    • {@code ns}: nanoseconds
    • + *
    + *
  • + *
  • ISO 8601 format: P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W
  • + *
  • ISO 8601 alternative format: P[YYYY]-[MM]-[DD]T[hh]:[mm]:[ss]
  • + *
+ * @param {String} input + * @returns {Duration} + */ + static fromString(input: string): Duration; + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseStandardFormat; + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseIso8601Format; + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseIso8601WeekFormat; + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseIso8601AlternativeFormat; +} + +/** + * @class + * @classdesc + * {@link AddressTranslator} implementation for multi-region EC2 deployments where clients are also deployed in EC2. + *

+ * Its distinctive feature is that it translates addresses according to the location of the Cassandra host: + *

+ *
    + *
  • addresses in different EC2 regions (than the client) are unchanged
  • + *
  • addresses in the same EC2 region are translated to private IPs
  • + *
+ *

+ * This optimizes network costs, because Amazon charges more for communication over public IPs. + *

+ */ +export declare class EC2MultiRegionTranslator extends AddressTranslator { + /** + * Addresses in the same EC2 region are translated to private IPs and addresses in + * different EC2 regions (than the client) are unchanged + * @param {string} address The address of a node as returned by Cassandra. + * @param {number} port The port number, as specified in the protocol options. + * @param {Function} callback Callback to invoke with the translated endpoint. + */ + translate(address: string, port: number, callback: Function): void; + /** + * Log method called to log errors that occurred while performing dns resolution. + * You can assign your own method to the class instance to do proper logging. + * @param {String} address + * @param {Error} err + */ + logError(address: string, err: Error): void; +} + +/** + * Represents a graph Edge. + * @extends Element + * @memberOf module:datastax/graph + */ +export declare class Edge extends Element { + outV: Vertex; + outVLabel: string; + inV: Vertex; + inVLabel: string; + properties: { + [s: string]: any; + }; + /** + * @param id + * @param outV + * @param {String} outVLabel + * @param {String} label + * @param inV + * @param {String} inVLabel + * @param {Object} properties + */ + constructor(id: any, outV: Vertex, outVLabel: string, label: string, inV: Vertex, inVLabel?: string, properties?: { + [s: string]: Property; + }); + private adaptProperties; +} + +/** + * Represents a graph Element. + * @abstract + * @memberOf module:datastax/graph + */ +export declare abstract class Element { + id: any; + label: string; + /** + * @param id + * @param label + */ + constructor(id: any, label: string); +} + +declare type EmptyCallback = (err: Error) => void; + +export declare class Encoder { + private encodingOptions; + private handleBuffer; + private decodeCollectionLength; + private getLengthBuffer; + private collectionLengthSize; + /* Excluded from this release type: protocolVersion */ + private readonly customDecoders; + private readonly customEncoders; + /** + * Serializes and deserializes to and from a CQL type and a Javascript Type. + * @param {Number} protocolVersion + * @param {ClientOptions} options + * @constructor + */ + constructor(protocolVersion: number, options: ClientOptions); + /* Excluded from this release type: setProtocolVersion */ + private decodeBlob; + /** + * + * @param {Buffer} bytes + * @param {OtherCustomColumnInfo | VectorColumnInfo} columnInfo + * @returns + */ + private decodeCustom; + private decodeUtf8String; + private decodeAsciiString; + private decodeBoolean; + private decodeDouble; + private decodeFloat; + private decodeInt; + private decodeSmallint; + private decodeTinyint; + private decodeLong; + private _decodeCqlLongAsLong; + private _decodeCqlLongAsBigInt; + private decodeVarint; + private _decodeVarintAsInteger; + private _decodeVarintAsBigInt; + private decodeDecimal; + private decodeTimestamp; + private decodeDate; + private decodeTime; + private decodeList; + private decodeSet; + private decodeMap; + private decodeUuid; + private decodeTimeUuid; + private decodeInet; + /** + * Decodes a user defined type into an object + * @param {Buffer} bytes + * @param {UdtColumnInfo} columnInfo + * @private + */ + private decodeUdt; + private decodeTuple; + private encodeFloat; + private encodeDouble; + /** + * @param {Date|String|Long|Number} value + * @private + */ + private encodeTimestamp; + /** + * @param {Date|String|LocalDate} value + * @returns {Buffer} + * @throws {TypeError} + * @private + */ + private encodeDate; + /** + * @param {String|LocalTime} value + * @returns {Buffer} + * @throws {TypeError} + * @private + */ + private encodeTime; + /** + * @param {Uuid|String|Buffer} value + * @private + */ + private encodeUuid; + /** + * @param {String|InetAddress|Buffer} value + * @returns {Buffer} + * @private + */ + private encodeInet; + /** + * @param {Long|Buffer|String|Number} value + * @private + */ + private _encodeBigIntFromLong; + private encodeLong; + private _encodeBigIntFromBigInt; + /** + * @param {Integer|Buffer|String|Number} value + * @returns {Buffer} + * @private + */ + private _encodeVarintFromInteger; + private encodeVarint; + private _encodeVarintFromBigInt; + /** + * @param {BigDecimal|Buffer|String|Number} value + * @returns {Buffer} + * @private + */ + private encodeDecimal; + private encodeString; + private encodeUtf8String; + private encodeAsciiString; + private encodeBlob; + /** + * + * @param {any} value + * @param {OtherCustomColumnInfo | VectorColumnInfo} columnInfo + * @returns + */ + private encodeCustom; + /** + * @param {Boolean} value + * @returns {Buffer} + * @private + */ + private encodeBoolean; + /** + * @param {Number|String} value + * @private + */ + private encodeInt; + /** + * @param {Number|String} value + * @private + */ + private encodeSmallint; + /** + * @param {Number} value + * @private + */ + private encodeTinyint; + private encodeList; + private encodeSet; + /** + * Serializes a map into a Buffer + * @param value + * @param {MapColumnInfo} columnInfo + * @returns {Buffer} + * @private + */ + private encodeMap; + /** + * + * @param {any} value + * @param {UdtColumnInfo} columnInfo + * @returns + */ + private encodeUdt; + /** + * + * @param {any} value + * @param {TupleColumnInfo} columnInfo + * @returns + */ + private encodeTuple; + /** + * + * @param {Buffer} buffer + * @param {VectorColumnInfo} params + * @returns {Vector} + */ + private decodeVector; + /** + * @param {DataTypeInfo} cqlType + * @returns {Number} + */ + private serializationSizeIfFixed; + /** + * @param {Vector} value + * @param {VectorColumnInfo} params + * @returns {Buffer} + */ + private encodeVector; + /* Excluded from this release type: parseVectorTypeArgs */ + /* Excluded from this release type: setRoutingKeyFromUser */ + /* Excluded from this release type: setRoutingKeyFromMeta */ + /** + * @param {Array} parts + * @param {Array} routingIndexes + * @param {Function} encodeParam + * @returns {Number} The total length + * @private + */ + private _encodeRoutingKeyParts; + /* Excluded from this release type: parseTypeName */ + /** + * @param {String} keyspace + * @param {Array} typeNames + * @param {Function} udtResolver + * @returns {Promise} + * @private + */ + private _parseChildTypes; + /* Excluded from this release type: parseFqTypeName */ + /* Excluded from this release type: parseKeyTypes */ + /** + * + * @param {string} typeName + * @param {number} startIndex + * @param {number} length + * @returns {UdtColumnInfo} + */ + private _parseUdtName; + private decoders; + private encoders; + /** + * Decodes Cassandra bytes into Javascript values. + *

+ * This is part of an experimental API, this can be changed future releases. + *

+ * @param {Buffer} buffer Raw buffer to be decoded. + * @param {DataTypeInfo} type + */ + decode: (buffer: Buffer, type: DataTypeInfo) => any; + /** + * Encodes Javascript types into Buffer according to the Cassandra protocol. + *

+ * This is part of an experimental API, this can be changed future releases. + *

+ * @param {*} value The value to be converted. + * @param {DataTypeInfo | Number | String} typeInfo The type information. + *

It can be either a:

+ *
    + *
  • A String representing the data type.
  • + *
  • A Number with one of the values of {@link module:types~dataTypes dataTypes}.
  • + *
  • An Object containing the type.code as one of the values of + * {@link module:types~dataTypes dataTypes} and type.info. + *
  • + *
+ * @returns {Buffer} + * @throws {TypeError} When there is an encoding error + */ + encode: (value: any, typeInfo: DataTypeInfo | number | string) => Buffer; + /* Excluded from this release type: guessDataType */ + private static isTypedArray; +} + +declare class EnumValue { + /* Excluded from this release type: typeName */ + /* Excluded from this release type: elementName */ + /* Excluded from this release type: __constructor */ + toString(): any; +} + +export declare const errors: { + ArgumentError: typeof ArgumentError; + AuthenticationError: typeof AuthenticationError; + BusyConnectionError: typeof BusyConnectionError; + DriverError: typeof DriverError; + OperationTimedOutError: typeof OperationTimedOutError; + DriverInternalError: typeof DriverInternalError; + NoHostAvailableError: typeof NoHostAvailableError; + NotSupportedError: typeof NotSupportedError; + ResponseError: typeof ResponseError; + VIntOutOfRangeException: typeof VIntOutOfRangeException; +}; + +/** + * Utilities for concurrent query execution with the DataStax Node.js Driver. + * @module concurrent + */ +/** + * Executes multiple queries concurrently at the defined concurrency level. + * @static + * @param {Client} client The {@link Client} instance. + * @param {String|Array<{query, params}>} query The query to execute per each parameter item. + * @param {Array|Stream|Object} parameters An {@link Array} or a readable {@link Stream} composed of {@link Array} + * items representing each individual set of parameters. Per each item in the {@link Array} or {@link Stream}, an + * execution is going to be made. + * @param {Object} [options] The execution options. + * @param {String} [options.executionProfile] The execution profile to be used. + * @param {Number} [options.concurrencyLevel=100] The concurrency level to determine the maximum amount of in-flight + * operations at any given time + * @param {Boolean} [options.raiseOnFirstError=true] Determines whether execution should stop after the first failed + * execution and the corresponding exception will be raised. + * @param {Boolean} [options.collectResults=false] Determines whether each individual + * [ResultSet]{@link module:types~ResultSet} instance should be collected in the grouped result. + * @param {Number} [options.maxErrors=100] The maximum amount of errors to be collected before ignoring the rest of + * the error results. + * @returns {Promise} A Promise of {@link ResultSetGroup} that is resolved when all the + * executions completed and it's rejected when raiseOnFirstError is true and there is one + * or more failures. + * @example Using a fixed query and an Array of Arrays as parameters + * const query = 'INSERT INTO table1 (id, value) VALUES (?, ?)'; + * const parameters = [[1, 'a'], [2, 'b'], [3, 'c'], ]; // ... + * const result = await executeConcurrent(client, query, parameters); + * @example Using a fixed query and a readable stream + * const stream = csvStream.pipe(transformLineToArrayStream); + * const result = await executeConcurrent(client, query, stream); + * @example Using a different queries + * const queryAndParameters = [ + * { query: 'INSERT INTO videos (id, name, user_id) VALUES (?, ?, ?)', + * params: [ id, name, userId ] }, + * { query: 'INSERT INTO user_videos (user_id, id, name) VALUES (?, ?, ?)', + * params: [ userId, id, name ] }, + * { query: 'INSERT INTO latest_videos (id, name, user_id) VALUES (?, ?, ?)', + * params: [ id, name, userId ] }, + * ]; + * + * const result = await executeConcurrent(client, queryAndParameters); + */ +export declare function executeConcurrent(client: Client, query: string, parameters: any[][] | Readable, options?: Options): Promise; + +export declare function executeConcurrent(client: Client, queries: Array<{ + query: string; + params: any[]; +}>, options?: Options): Promise; + +/** + * A base class that represents a wrapper around the user provided query options with getter methods and proper + * default values. + *

+ * Note that getter methods might return undefined when not set on the query options or default + * {@link Client} options. + *

+ */ +export declare class ExecutionOptions { + /** + * Creates a new instance of {@link ExecutionOptions}. + */ + constructor(); + /* Excluded from this release type: empty */ + /** + * Determines if the stack trace before the query execution should be maintained. + * @abstract + * @returns {Boolean} + */ + getCaptureStackTrace(): boolean; + /** + * Gets the [Consistency level]{@link module:types~consistencies} to be used for the execution. + * @abstract + * @returns {Number} + */ + getConsistency(): consistencies; + /** + * Key-value payload to be passed to the server. On the server side, implementations of QueryHandler can use + * this data. + * @abstract + * @returns {{ [key: string]: any }} + */ + getCustomPayload(): { + [key: string]: any; + }; + /** + * Gets the amount of rows to retrieve per page. + * @abstract + * @returns {Number} + */ + getFetchSize(): number; + /** + * When a fixed host is set on the query options and the query plan for the load-balancing policy is not used, it + * gets the host that should handle the query. + * @returns {Host} + */ + getFixedHost(): Host; + /** + * Gets the type hints for parameters given in the query, ordered as for the parameters. + * @abstract + * @returns {string[] | string[][]} + */ + getHints(): string[] | string[][]; + /** + * Determines whether the driver must retrieve the following result pages automatically. + *

+ * This setting is only considered by the [Client#eachRow()]{@link Client#eachRow} method. + *

+ * @abstract + * @returns {Boolean} + */ + isAutoPage(): boolean; + /** + * Determines whether its a counter batch. Only valid for [Client#batch()]{@link Client#batch}, it will be ignored by + * other methods. + * @abstract + * @returns {Boolean} A Boolean value, it can't be undefined. + */ + isBatchCounter(): boolean; + /** + * Determines whether the batch should be written to the batchlog. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. + * @abstract + * @returns {Boolean} A Boolean value, it can't be undefined. + */ + isBatchLogged(): boolean; + /** + * Determines whether the query can be applied multiple times without changing the result beyond the initial + * application. + * @abstract + * @returns {Boolean} + */ + isIdempotent(): boolean; + /** + * Determines whether the query must be prepared beforehand. + * @abstract + * @returns {Boolean} A Boolean value, it can't be undefined. + */ + isPrepared(): boolean; + /** + * Determines whether query tracing is enabled for the execution. + * @abstract + * @returns {Boolean} + */ + isQueryTracing(): boolean; + /** + * Gets the keyspace for the query when set at query options level. + *

+ * Note that this method will return undefined when the keyspace is not set at query options level. + * It will only return the keyspace name when the user provided a different keyspace than the current + * {@link Client} keyspace. + *

+ * @abstract + * @returns {String} + */ + getKeyspace(): string; + /** + * Gets the load balancing policy used for this execution. + * @returns {LoadBalancingPolicy} A LoadBalancingPolicy instance, it can't be undefined. + */ + getLoadBalancingPolicy(): LoadBalancingPolicy; + /** + * Gets the Buffer representing the paging state. + * @abstract + * @returns {Buffer} + */ + getPageState(): Buffer; + /* Excluded from this release type: getPreferredHost */ + /** + * Gets the query options as provided to the execution method without setting the default values. + * @returns {QueryOptions} + */ + getRawQueryOptions(): QueryOptions; + /** + * Gets the timeout in milliseconds to be used for the execution per coordinator. + *

+ * A value of 0 disables client side read timeout for the execution. Default: undefined. + *

+ * @abstract + * @returns {Number} + */ + getReadTimeout(): number; + /** + * Gets the [retry policy]{@link module:policies/retry} to be used. + * @abstract + * @returns {RetryPolicy} A RetryPolicy instance, it can't be undefined. + */ + getRetryPolicy(): RetryPolicy; + /* Excluded from this release type: getRowCallback */ + /* Excluded from this release type: getOrGenerateTimestamp */ + /* Excluded from this release type: getRoutingIndexes */ + /** + * Gets the partition key(s) to determine which coordinator should be used for the query. + * @abstract + * @returns {Buffer|Array} + */ + getRoutingKey(): Buffer | Array; + /* Excluded from this release type: getRoutingNames */ + /** + * Gets the the consistency level to be used for the serial phase of conditional updates. + * @abstract + * @returns {consistencies} + */ + getSerialConsistency(): consistencies; + /** + * Gets the provided timestamp for the execution in microseconds from the unix epoch (00:00:00, January 1st, 1970). + *

When a timestamp generator is used, this method returns undefined.

+ * @abstract + * @returns {Number|Long|undefined|null} + */ + getTimestamp(): number | Long__default | undefined | null; + /* Excluded from this release type: setHints */ + /* Excluded from this release type: setKeyspace */ + /* Excluded from this release type: setPageState */ + /* Excluded from this release type: setPreferredHost */ + /* Excluded from this release type: setRoutingIndexes */ + /* Excluded from this release type: setRoutingKey */ +} + +/** + * @classdesc + * Represents a set configurations to be used in a statement execution to be used for a single {@link Client} instance. + *

+ * An {@link ExecutionProfile} instance should not be shared across different {@link Client} instances. + *

+ * @example + * const { Client, ExecutionProfile } = require('cassandra-driver'); + * const client = new Client({ + * contactPoints: ['host1', 'host2'], + * profiles: [ + * new ExecutionProfile('metrics-oltp', { + * consistency: consistency.localQuorum, + * retry: myRetryPolicy + * }) + * ] + * }); + * + * client.execute(query, params, { executionProfile: 'metrics-oltp' }, callback); + */ +export declare class ExecutionProfile { + /** + * Consistency level. + * @type {Number} + */ + consistency?: consistencies; + /** + * Load-balancing policy + * @type {LoadBalancingPolicy} + */ + loadBalancing?: LoadBalancingPolicy; + /** + * Name of the execution profile. + * @type {String} + */ + name: string; + /** + * Client read timeout. + * @type {Number} + */ + readTimeout?: number; + /** + * Retry policy. + * @type {RetryPolicy} + */ + retry?: RetryPolicy; + /** + * Serial consistency level. + * @type {Number} + */ + serialConsistency?: consistencies; + /** + * The graph options for this profile. + * @type {Object} + * @property {String} language The graph language. + * @property {String} name The graph name. + * @property {String} readConsistency The consistency to use for graph write queries. + * @property {String} source The graph traversal source. + * @property {String} writeConsistency The consistency to use for graph write queries. + */ + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: consistencies; + writeConsistency?: consistencies; + /* Excluded from this release type: results */ + }; + /** + * Creates a new instance of {@link ExecutionProfile}. + * Represents a set configurations to be used in a statement execution to be used for a single {@link Client} instance. + *

+ * An {@link ExecutionProfile} instance should not be shared across different {@link Client} instances. + *

+ * @param {String} name Name of the execution profile. + *

+ * Use 'default' to specify that the new instance should be the default {@link ExecutionProfile} if no + * profile is specified in the execution. + *

+ * @param {Object} [options] Profile options, when any of the options is not specified the {@link Client} will the use + * the ones defined in the default profile. + * @param {Number} [options.consistency] The consistency level to use for this profile. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * @param {Object} [options.graphOptions] + * @param {String} [options.graphOptions.language] The graph language to use for graph queries. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.results] The protocol to use for serializing and deserializing graph results. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.name] The graph name to use for graph queries. + * @param {Number} [options.graphOptions.readConsistency] The consistency level to use for graph read queries. + * @param {String} [options.graphOptions.source] The graph traversal source name to use for graph queries. + * @param {Number} [options.graphOptions.writeConsistency] The consistency level to use for graph write queries. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * Profile options, when any of the options is not specified the {@link Client} will the use + * the ones defined in the default profile. + * @param {Number} [options.consistency] The consistency level to use for this profile. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * @param {Object} [options.graphOptions] + * @param {String} [options.graphOptions.language] The graph language to use for graph queries. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.results] The protocol to use for serializing and deserializing graph results. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.name] The graph name to use for graph queries. + * @param {Number} [options.graphOptions.readConsistency] The consistency level to use for graph read queries. + * @param {String} [options.graphOptions.source] The graph traversal source name to use for graph queries. + * @param {Number} [options.graphOptions.writeConsistency] The consistency level to use for graph write queries. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * @example + * const { Client, ExecutionProfile } = require('cassandra-driver'); + * const client = new Client({ + * contactPoints: ['host1', 'host2'], + * profiles: [ + * new ExecutionProfile('metrics-oltp', { + * consistency: consistency.localQuorum, + * retry: myRetryPolicy + * }) + * ] + * }); + * + * client.execute(query, params, { executionProfile: 'metrics-oltp' }, callback); + * @constructor + */ + constructor(name: string, options?: { + consistency?: consistencies; + loadBalancing?: LoadBalancingPolicy; + readTimeout?: number; + retry?: RetryPolicy; + serialConsistency?: consistencies; + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: consistencies; + writeConsistency?: consistencies; + }; + }); +} + +/** + * A reconnection policy that waits exponentially longer between each + * reconnection attempt (but keeps a constant delay once a maximum delay is reached). + *

+ * A random amount of jitter (+/- 15%) will be added to the pure exponential delay value to avoid situations + * where many clients are in the reconnection process at exactly the same time. The jitter will never cause the + * delay to be less than the base delay, or more than the max delay. + *

+ */ +export declare class ExponentialReconnectionPolicy extends ReconnectionPolicy { + private baseDelay; + private maxDelay; + private startWithNoDelay; + /** + * A reconnection policy that waits exponentially longer between each + * reconnection attempt (but keeps a constant delay once a maximum delay is reached). + *

+ * A random amount of jitter (+/- 15%) will be added to the pure exponential delay value to avoid situations + * where many clients are in the reconnection process at exactly the same time. The jitter will never cause the + * delay to be less than the base delay, or more than the max delay. + *

+ * @param {Number} baseDelay The base delay in milliseconds to use for the schedules created by this policy. + * @param {Number} maxDelay The maximum delay in milliseconds to wait between two reconnection attempt. + * @param {Boolean} [startWithNoDelay] Determines if the first attempt should be zero delay + * @constructor + */ + constructor(baseDelay: number, maxDelay: number, startWithNoDelay?: boolean); + /** + * A new schedule that uses an exponentially growing delay between reconnection attempts. + * @returns {Iterator} An infinite iterator. + */ + newSchedule(): Iterator; + /** + * Adds a random portion of +-15% to the delay provided. + * Initially, its adds a random value of 15% to avoid reconnection before reaching the base delay. + * When the schedule reaches max delay, only subtracts a random portion of 15%. + */ + private _addJitter; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +/** + * @classdesc + * A retry policy that never retries nor ignores. + *

+ * All of the methods of this retry policy unconditionally return + * [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. If this policy is used, retry logic will have to be + * implemented in business code. + *

+ * @alias module:policies/retry~FallthroughRetryPolicy + * @extends RetryPolicy + */ +export declare class FallthroughRetryPolicy extends RetryPolicy { + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onReadTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, isDataPresent: boolean): DecisionInfo; + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo; + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onUnavailable(info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo; + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onWriteTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, writeType: string): DecisionInfo; +} + +export declare type FindDocInfo = { + fields?: string[]; + orderBy?: { + [key: string]: string; + }; + limit?: number; +}; + +/* Excluded from this release type: frameFlags */ + +/* Excluded from this release type: FrameHeader */ + +/* Excluded from this release type: generateTimestamp */ + +export declare class Geometry { + static types: { + readonly Point2D: 1; + readonly LineString: 2; + readonly Polygon: 3; + }; + /* Excluded from this release type: getEndianness */ + /* Excluded from this release type: readInt32 */ + /* Excluded from this release type: readDouble */ + /* Excluded from this release type: writeInt32 */ + /* Excluded from this release type: writeDouble */ + /* Excluded from this release type: writeEndianness */ + /* Excluded from this release type: useBESerialization */ +} + +/** + * Geometry module. + *

+ * Contains the classes to represent the set of additional CQL types for geospatial data that come with + * DSE 5.0. + *

+ * @module geometry + */ +export declare const geometry: { + Point: typeof Point; + LineString: typeof LineString; + Polygon: typeof Polygon; + Geometry: typeof Geometry; +}; + +/* Excluded from this release type: getCustomTypeSerializers */ + +/* Excluded from this release type: getDataTypeNameByCode */ + +export declare const graph: { + Edge: typeof Edge; + Element: typeof Element; + Path: typeof Path; + Property: typeof Property; + Vertex: typeof Vertex; + VertexProperty: typeof VertexProperty; + asInt: typeof asInt; + asDouble: typeof asDouble; + asFloat: typeof asFloat; + asTimestamp: typeof asTimestamp; + asUdt: typeof asUdt; + direction: { + both: EnumValue; + in: EnumValue; + out: EnumValue; + in_: EnumValue; + }; + /* Excluded from this release type: getCustomTypeSerializers */ + GraphResultSet: typeof GraphResultSet; + /* Excluded from this release type: GraphTypeWrapper */ + t: { + id: EnumValue; + key: EnumValue; + label: EnumValue; + value: EnumValue; + }; + /* Excluded from this release type: UdtGraphWrapper */ +}; + +declare type GraphOptions = { + language?: string; + name?: string; + readConsistency?: consistencies; + readTimeout?: number; + source?: string; + writeConsistency?: consistencies; +}; + +declare interface GraphQueryOptions extends QueryOptions { + graphLanguage?: string; + graphName?: string; + graphReadConsistency?: consistencies; + graphSource?: string; + graphWriteConsistency?: consistencies; + graphResults?: string; +} + +/** + * Represents the result set of a [graph query execution]{@link Client#executeGraph} containing vertices, edges, or + * scalar values depending on the query. + *

+ * It allows iteration of the items using for..of statements under ES2015 and exposes + * forEach(), first(), and toArray() to access the underlying items. + *

+ * @example + * for (let vertex of result) { ... } + * @example + * const arr = result.toArray(); + * @example + * const vertex = result.first(); + * @alias module:datastax/graph~GraphResultSet + */ +export declare class GraphResultSet implements Iterable { + info: typeof ResultSet.prototype.info; + length: number; + pageState: string; + private rows; + private rowParser; + /** + * @param {ResultSet} result The result set from the query execution. + * @param {Function} [rowParser] Optional row parser function. + * @constructor + */ + constructor(result: ResultSet, rowParser?: Function); + /** + * Returns the first element of the result or null if the result is empty. + * @returns {Object} + */ + first(): object | null; + /** + * Executes a provided function once per result element. + * @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index. + * @param {Object} [thisArg] Value to use as this when executing callback. + */ + forEach(callback: Function, thisArg?: object): void; + /** + * Returns an Array of graph result elements (vertex, edge, scalar). + * @returns {Array} + */ + toArray(): Array; + /** + * Returns a new Iterator object that contains the values for each index in the result. + * @returns {Iterator} + */ + values(): Iterator; + /** + * Gets the traversers contained in the result set. + * @returns {IterableIterator} + */ + getTraversers(): IterableIterator; + /** + * Makes the result set iterable using `for..of`. + * @returns {Iterator} + */ + [Symbol.iterator](): Iterator; +} + +/* Excluded from this release type: GraphTypeWrapper */ + +/** + * Represents a unique set of values. + * @constructor + */ +declare class HashSet { + length: number; + items: object; + constructor(); + /** + * Adds a new item to the set. + * @param {Object} key + * @returns {boolean} Returns true if it was added to the set; false if the key is already present. + */ + add(key: any): boolean; + /** + * @returns {boolean} Returns true if the key is present in the set. + */ + contains(key: any): boolean; + /** + * Removes the item from set. + * @param key + * @return {boolean} Returns true if the key existed and was removed, otherwise it returns false. + */ + remove(key: any): boolean; + /** + * Returns an array containing the set items. + * @returns {Array} + */ + toArray(): Array; +} + +/** + * Represents a Cassandra node. + * @extends EventEmitter + */ +export declare class Host extends EventEmitter.EventEmitter { + address: string; + private setDownAt; + private log; + /* Excluded from this release type: isUpSince */ + /* Excluded from this release type: pool */ + cassandraVersion: string; + datacenter: string; + rack: string; + tokens: string[]; + hostId: Uuid; + /* Excluded from this release type: dseVersion */ + /* Excluded from this release type: workloads */ + private _distance; + private _healthResponseCounter; + /* Excluded from this release type: reconnectionSchedule */ + /* Excluded from this release type: options */ + private reconnectionDelay; + private _healthResponseCountTimer; + private _metadata; + /* Excluded from this release type: __constructor */ + /* Excluded from this release type: setDown */ + /* Excluded from this release type: setUp */ + /* Excluded from this release type: checkIsUp */ + /* Excluded from this release type: shutdown */ + /** + * Determines if the node is UP now (seen as UP by the driver). + * @returns {boolean} + */ + isUp(): boolean; + /** + * Determines if the host can be considered as UP. + * Deprecated: Use {@link Host#isUp()} instead. + * @returns {boolean} + */ + canBeConsideredAsUp(): boolean; + /* Excluded from this release type: setDistance */ + /* Excluded from this release type: setProtocolVersion */ + /* Excluded from this release type: borrowConnection */ + /* Excluded from this release type: warmupPool */ + /* Excluded from this release type: initializePool */ + /* Excluded from this release type: getActiveConnection */ + /* Excluded from this release type: getResponseCount */ + /* Excluded from this release type: checkHealth */ + /* Excluded from this release type: removeFromPool */ + /* Excluded from this release type: getInFlight */ + /** + * Validates that the internal state of the connection pool. + * If the pool size is smaller than expected, schedule a new connection attempt. + * If the amount of connections is 0 for not ignored hosts, the host must be down. + * @private + */ + private _checkPoolState; + /** + * Executed after an scheduled new connection attempt finished + * @private + */ + private _onNewConnectionOpen; + /** + * Returns an array containing the Cassandra Version as an Array of Numbers having the major version in the first + * position. + * @returns {Array.} + */ + getCassandraVersion(): Array; + /** + * Gets the DSE version of the host as an Array, containing the major version in the first position. + * In case the cluster is not a DSE cluster, it returns an empty Array. + * @returns {Array.} + */ + getDseVersion(): Array; +} + +/** + * Represents an associative-array of {@link Host hosts} that can be iterated. + * It creates an internal copy when adding or removing, making it safe to iterate using the values() + * method within async operations. + * @extends events.EventEmitter + * @constructor + */ +export declare class HostMap extends EventEmitter.EventEmitter { + private _items; + private _values; + length: number; + /* Excluded from this release type: __constructor */ + /** + * Executes a provided function once per map element. + * @param {Function} callback + */ + forEach(callback: (value: Host, key: string) => void): void; + /** + * Gets a {@link Host host} by key or undefined if not found. + * @param {String} key + * @returns {Host} + */ + get(key: string): Host; + /** + * Returns an array of host addresses. + * @returns {Array.} + */ + keys(): Array; + /* Excluded from this release type: remove */ + /* Excluded from this release type: removeMultiple */ + /* Excluded from this release type: set */ + /* Excluded from this release type: slice */ + /* Excluded from this release type: push */ + /** + * Returns a shallow copy of the values of the map. + * @returns {Array.} + */ + values(): Array; + /* Excluded from this release type: clear */ + /* Excluded from this release type: inspect */ + /* Excluded from this release type: toJSON */ +} + +/** + * @classdesc + * A retry policy that avoids retrying non-idempotent statements. + *

+ * In case of write timeouts or unexpected errors, this policy will always return + * [rethrowResult()]{@link module:policies/retry~RetryPolicy#rethrowResult} if the statement is deemed non-idempotent + * (see [QueryOptions.isIdempotent]{@link QueryOptions}). + *

+ * For all other cases, this policy delegates the decision to the child policy. + * @extends module:policies/retry~RetryPolicy + * @deprecated Since version 4.0 non-idempotent operations are never tried for write timeout or request error, use the + * default retry policy instead. + */ +export declare class IdempotenceAwareRetryPolicy extends RetryPolicy { + private _childPolicy; + /** + * Creates a new instance of IdempotenceAwareRetryPolicy. + * This is a retry policy that avoids retrying non-idempotent statements. + *

+ * In case of write timeouts or unexpected errors, this policy will always return + * [rethrowResult()]{@link module:policies/retry~RetryPolicy#rethrowResult} if the statement is deemed non-idempotent + * (see [QueryOptions.isIdempotent]{@link QueryOptions}). + *

+ * For all other cases, this policy delegates the decision to the child policy. + * @param {RetryPolicy} [childPolicy] The child retry policy to wrap. When not defined, it will use an instance of + * [RetryPolicy]{@link module:policies/retry~RetryPolicy} as child policy. + * @constructor + * @deprecated Since version 4.0 non-idempotent operations are never tried for write timeout or request error, use the + * default retry policy instead. + */ + constructor(childPolicy?: RetryPolicy); + onReadTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, isDataPresent: boolean): DecisionInfo; + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo; + onUnavailable(info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo; + /** + * If the query is not idempotent, it return a rethrow decision. Otherwise, it relies on the child policy to decide. + */ + onWriteTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, writeType: string): DecisionInfo; +} + +/** + * @classdesc Describes a CQL index. + * @alias module:metadata~Index + */ +export declare class Index { + /** + * Name of the index. + * @type {String} + */ + name: string; + /** + * Target of the index. + * @type {String} + */ + target: string; + /** + * A numeric value representing index kind (0: custom, 1: keys, 2: composite); + * @type {IndexKind} + */ + kind: IndexKind; + /** + * An associative array containing the index options + * @type {Object} + */ + options: object; + /* Excluded from this release type: __constructor */ + /* Excluded from this release type: fromRows */ + /* Excluded from this release type: fromColumnRows */ + /** + * Determines if the index is of composites kind + * @returns {Boolean} + */ + isCompositesKind(): boolean; + /** + * Determines if the index is of keys kind + * @returns {Boolean} + */ + isKeysKind(): boolean; + /** + * Determines if the index is of custom kind + * @returns {Boolean} + */ + isCustomKind(): boolean; +} + +export declare enum IndexKind { + custom = 0, + keys = 1, + composites = 2 +} + +/** @module types */ +/** + * @class + * @classdesc Represents an v4 or v6 Internet Protocol (IP) address. + */ +export declare class InetAddress { + private buffer; + length: number; + version: number; + /** + * Creates a new instance of InetAddress + * @param {Buffer} buffer + * @constructor + */ + constructor(buffer: Buffer); + /** + * Parses the string representation and returns an Ip address + * @param {String} value + */ + static fromString(value: string): InetAddress; + /** + * Compares 2 addresses and returns true if the underlying bytes are the same + * @param {InetAddress} other + * @returns {Boolean} + */ + equals(other: InetAddress): boolean; + /** + * Returns the underlying buffer + * @returns {Buffer} + */ + getBuffer(): Buffer; + /* Excluded from this release type: inspect */ + /** + * Returns the string representation of the IP address. + *

For v4 IP addresses, a string in the form of d.d.d.d is returned.

+ *

+ * For v6 IP addresses, a string in the form of x:x:x:x:x:x:x:x is returned, where the 'x's are the hexadecimal + * values of the eight 16-bit pieces of the address, according to rfc5952. + * In cases where there is more than one field of only zeros, it can be shortened. For example, 2001:0db8:0:0:0:1:0:1 + * will be expressed as 2001:0db8::1:0:1. + *

+ * @param {String} [encoding] If set to 'hex', the hex representation of the buffer is returned. + * @returns {String} + */ + toString(encoding?: string): string; + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string; + /** + * Validates for a IPv4-Mapped IPv6 according to https://tools.ietf.org/html/rfc4291#section-2.5.5 + * @private + * @param {Buffer} buffer + */ + private static isValidIPv4Mapped; +} + +/* Excluded from this release type: init */ + +export declare type InsertDocInfo = { + fields?: string[]; + ttl?: number; + ifNotExists?: boolean; +}; + +declare const inspectMethod: unique symbol; + +/** + * A two's-complement integer an array containing bits of the + * integer in 32-bit (signed) pieces, given in little-endian order (i.e., + * lowest-order bits in the first piece), and the sign of -1 or 0. + * + * See the from* functions below for other convenient ways of constructing + * Integers. + * + * The internal representation of an integer is an array of 32-bit signed + * pieces, along with a sign (0 or -1) that indicates the contents of all the + * other 32-bit pieces out to infinity. We use 32-bit pieces because these are + * the size of integers on which Javascript performs bit-operations. For + * operations like addition and multiplication, we split each number into 16-bit + * pieces, which can easily be multiplied within Javascript's floating-point + * representation without overflow or change in sign. + * @final + */ +export declare class Integer { + private bits_; + private sign_; + /** + * Constructs a two's-complement integer an array containing bits of the + * integer in 32-bit (signed) pieces, given in little-endian order (i.e., + * lowest-order bits in the first piece), and the sign of -1 or 0. + * + * See the from* functions below for other convenient ways of constructing + * Integers. + * + * The internal representation of an integer is an array of 32-bit signed + * pieces, along with a sign (0 or -1) that indicates the contents of all the + * other 32-bit pieces out to infinity. We use 32-bit pieces because these are + * the size of integers on which Javascript performs bit-operations. For + * operations like addition and multiplication, we split each number into 16-bit + * pieces, which can easily be multiplied within Javascript's floating-point + * representation without overflow or change in sign. + * + * @constructor + * @param {Array.} bits Array containing the bits of the number. + * @param {number} sign The sign of the number: -1 for negative and 0 positive. + * @final + */ + constructor(bits: number[], sign: number); + /** + * A cache of the Integer representations of small integer values. + * @type {!Object} + * @private + */ + private static IntCache_; + /** + * Returns an Integer representing the given (32-bit) integer value. + * @param {number} value A 32-bit integer value. + * @return {!Integer} The corresponding Integer value. + */ + static fromInt(value: number): Integer; + /** + * Returns an Integer representing the given value, provided that it is a finite + * number. Otherwise, zero is returned. + * @param {number} value The value in question. + * @return {!Integer} The corresponding Integer value. + */ + static fromNumber(value: number): Integer; + /** + * Returns a Integer representing the value that comes by concatenating the + * given entries, each is assumed to be 32 signed bits, given in little-endian + * order (lowest order bits in the lowest index), and sign-extending the highest + * order 32-bit value. + * @param {Array.} bits The bits of the number, in 32-bit signed pieces, + * in little-endian order. + * @return {!Integer} The corresponding Integer value. + */ + static fromBits(bits: number[]): Integer; + /** + * Returns an Integer representation of the given string, written using the + * given radix. + * @param {string} str The textual representation of the Integer. + * @param {number=} opt_radix The radix in which the text is written. + * @return {!Integer} The corresponding Integer value. + */ + static fromString(str: string, opt_radix?: number): Integer; + /** + * Returns an Integer representation of a given big endian Buffer. + * The internal representation of bits contains bytes in groups of 4 + * @param {Buffer} buf + * @returns {Integer} + */ + static fromBuffer(buf: Buffer): Integer; + /** + * Returns a big endian buffer representation of an Integer. + * Internally the bits are represented using 4 bytes groups (numbers), + * in the Buffer representation there might be the case where we need less than the 4 bytes. + * For example: 0x00000001 -> '01', 0xFFFFFFFF -> 'FF', 0xFFFFFF01 -> 'FF01' + * @param {Integer} value + * @returns {Buffer} + */ + static toBuffer(value: Integer): Buffer; + /** + * A number used repeatedly in calculations. This must appear before the first + * call to the from* functions below. + * @type {number} + * @private + */ + private static TWO_PWR_32_DBL_; + /** @type {!Integer} */ + static ZERO: Integer; + /** @type {!Integer} */ + static ONE: Integer; + /** + * @type {!Integer} + * @private + */ + private static TWO_PWR_24_; + /** + * Returns the value, assuming it is a 32-bit integer. + * @return {number} The corresponding int value. + */ + toInt(): number; + /** @return {number} The closest floating-point representation to this value. */ + toNumber(): number; + /** + * @param {number=} opt_radix The radix in which the text should be written. + * @return {string} The textual representation of this value. + * @override + */ + toString(opt_radix?: number): string; + /** + * Returns the index-th 32-bit (signed) piece of the Integer according to + * little-endian order (i.e., index 0 contains the smallest bits). + * @param {number} index The index in question. + * @return {number} The requested 32-bits as a signed number. + */ + getBits(index: number): number; + /** + * Returns the index-th 32-bit piece as an unsigned number. + * @param {number} index The index in question. + * @return {number} The requested 32-bits as an unsigned number. + */ + getBitsUnsigned(index: number): number; + /** @return {number} The sign bit of this number, -1 or 0. */ + getSign(): number; + /** @return {boolean} Whether this value is zero. */ + isZero(): boolean; + /** @return {boolean} Whether this value is negative. */ + isNegative(): boolean; + /** @return {boolean} Whether this value is odd. */ + isOdd(): boolean; + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer equals the other. + */ + equals(other: Integer): boolean; + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer does not equal the other. + */ + notEquals(other: Integer): boolean; + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is greater than the other. + */ + greaterThan(other: Integer): boolean; + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is greater than or equal to the other. + */ + greaterThanOrEqual(other: Integer): boolean; + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is less than the other. + */ + lessThan(other: Integer): boolean; + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is less than or equal to the other. + */ + lessThanOrEqual(other: Integer): boolean; + /** + * Compares this Integer with the given one. + * @param {Integer} other Integer to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: Integer): number; + /** + * Returns an integer with only the first numBits bits of this value, sign + * extended from the final bit. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} The shorted integer value. + */ + shorten(numBits: number): Integer; + /** @return {!Integer} The negation of this value. */ + negate(): Integer; + /** + * Returns the sum of this and the given Integer. + * @param {Integer} other The Integer to add to this. + * @return {!Integer} The Integer result. + */ + add(other: Integer): Integer; + /** + * Returns the difference of this and the given Integer. + * @param {Integer} other The Integer to subtract from this. + * @return {!Integer} The Integer result. + */ + subtract(other: Integer): Integer; + /** + * Returns the product of this and the given Integer. + * @param {Integer} other The Integer to multiply against this. + * @return {!Integer} The product of this and the other. + */ + multiply(other: Integer): Integer; + /** + * Carries any overflow from the given index into later entries. + * @param {Array.} bits Array of 16-bit values in little-endian order. + * @param {number} index The index in question. + * @private + */ + private static carry16_; + /** + * Returns this Integer divided by the given one. + * @param {Integer} other Th Integer to divide this by. + * @return {!Integer} This value divided by the given one. + */ + divide(other: Integer): Integer; + /** + * Returns this Integer modulo the given one. + * @param {Integer} other The Integer by which to mod. + * @return {!Integer} This value modulo the given one. + */ + modulo(other: Integer): Integer; + /** @return {!Integer} The bitwise-NOT of this value. */ + not(): Integer; + /** + * Returns the bitwise-AND of this Integer and the given one. + * @param {Integer} other The Integer to AND with this. + * @return {!Integer} The bitwise-AND of this and the other. + */ + and(other: Integer): Integer; + /** + * Returns the bitwise-OR of this Integer and the given one. + * @param {Integer} other The Integer to OR with this. + * @return {!Integer} The bitwise-OR of this and the other. + */ + or(other: Integer): Integer; + /** + * Returns the bitwise-XOR of this Integer and the given one. + * @param {Integer} other The Integer to XOR with this. + * @return {!Integer} The bitwise-XOR of this and the other. + */ + xor(other: Integer): Integer; + /** + * Returns this value with bits shifted to the left by the given amount. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} This shifted to the left by the given amount. + */ + shiftLeft(numBits: number): Integer; + /** + * Returns this value with bits shifted to the right by the given amount. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} This shifted to the right by the given amount. + */ + shiftRight(numBits: number): Integer; + /* Excluded from this release type: inspect */ + /** + * Returns a Integer whose value is the absolute value of this + * @returns {Integer} + */ + abs(): Integer; + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string; +} + +declare interface Keyspace { + name: any; + durableWrites: any; + strategy: any; + strategyOptions: any; + tokenToReplica: any; + udts: any; + tables: any; + functions: any; + aggregates: any; + virtual: any; + views: any; + graphEngine: any; +} + +/** + * @classdesc + * A LineString is a one-dimensional object representing a sequence of points and the line segments connecting them. + * @example + * new LineString(new Point(10.99, 20.02), new Point(14, 26), new Point(34, 1.2)); + * @alias module:geometry~LineString + * @extends {Geometry} + */ +export declare class LineString extends Geometry { + /* Excluded from this release type: points */ + /** + * Creates a new {@link LineString} instance. + * @param {...Point} points A sequence of {@link Point} items as arguments. + */ + constructor(...points: Point[] | Point[][]); + /** + * Creates a {@link LineString} instance from + * a Well-known Text (WKT) + * representation of a line. + * @param {Buffer} buffer + * @returns {LineString} + */ + static fromBuffer(buffer: Buffer): LineString; + /** + * Creates a {@link LineString} instance from + * a Well-known Text (WKT) + * representation of a line. + * @param {String} textValue + * @returns {LineString} + */ + static fromString(textValue: string): LineString; + /* Excluded from this release type: parseSegments */ + /** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ + toBuffer(): Buffer; + /** + * Returns true if the values of the linestrings are the same, otherwise it returns false. + * @param {LineString} other + * @returns {Boolean} + */ + equals(other: LineString): boolean; + /** + * Returns Well-known Text (WKT) representation of the geometry object. + * @returns {String} + */ + toString(): string; + /* Excluded from this release type: useBESerialization */ + /** + * Returns a JSON representation of this geo-spatial type. + */ + toJSON(): object; +} + +declare type ListSetColumnInfo = { + code: (dataTypes.list | dataTypes.set); + info: DataTypeInfo; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +export declare const loadBalancing: { + AllowListPolicy: typeof AllowListPolicy; + DCAwareRoundRobinPolicy: typeof DCAwareRoundRobinPolicy; + DefaultLoadBalancingPolicy: typeof DefaultLoadBalancingPolicy; + LoadBalancingPolicy: typeof LoadBalancingPolicy; + RoundRobinPolicy: typeof RoundRobinPolicy; + TokenAwarePolicy: typeof TokenAwarePolicy; + WhiteListPolicy: typeof WhiteListPolicy; +}; + +/** + * Base class for Load Balancing Policies. + */ +export declare class LoadBalancingPolicy { + protected client: Client; + protected hosts: HostMap; + /* Excluded from this release type: localDc */ + /** + * Initializes the load balancing policy, called after the driver obtained the information of the cluster. + * @param {Client} client + * @param {HostMap} hosts + * @param {EmptyCallback} callback + */ + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + /** + * Returns the distance assigned by this policy to the provided host. + * @param {Host} host + */ + getDistance(host: Host): distance; + /** + * Returns an iterator with the hosts for a new query. + * Each new query will call this method. The first host in the result will + * then be used to perform the query. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +/** + * @class + * @classdesc A date without a time-zone in the ISO-8601 calendar system, such as 2010-08-05. + *

+ * LocalDate is an immutable object that represents a date, often viewed as year-month-day. For example, the value "1st October 2014" can be stored in a LocalDate. + *

+ *

+ * This class does not store or represent a time or time-zone. Instead, it is a description of the date, as used for birthdays. It cannot represent an instant on the time-line without additional information such as an offset or time-zone. + *

+ *

+ * Note that this type can represent dates in the range [-5877641-06-23; 5881580-07-17] while the ES5 date type can only represent values in the range of [-271821-04-20; 275760-09-13]. + * In the event that year, month, day parameters do not fall within the ES5 date range an Error will be thrown. If you wish to represent a date outside of this range, pass a single + * parameter indicating the days since epoch. For example, -1 represents 1969-12-31. + *

+ */ +export declare class LocalDate { + /** + * The date representation if falls within a range of an ES5 data type, otherwise an invalid date. + */ + date: Date; + private _value; + year: number; + month: number; + day: number; + /** + * Creates a new instance of LocalDate. + * A date without a time-zone in the ISO-8601 calendar system, such as 2010-08-05. + *

+ * LocalDate is an immutable object that represents a date, often viewed as year-month-day. For example, the value "1st October 2014" can be stored in a LocalDate. + *

+ *

+ * This class does not store or represent a time or time-zone. Instead, it is a description of the date, as used for birthdays. It cannot represent an instant on the time-line without additional information such as an offset or time-zone. + *

+ *

+ * Note that this type can represent dates in the range [-5877641-06-23; 5881580-07-17] while the ES5 date type can only represent values in the range of [-271821-04-20; 275760-09-13]. + * In the event that year, month, day parameters do not fall within the ES5 date range an Error will be thrown. If you wish to represent a date outside of this range, pass a single + * parameter indicating the days since epoch. For example, -1 represents 1969-12-31. + *

+ * @param {Number} year The year or days since epoch. If days since epoch, month and day should not be provided. + * @param {Number} [month] Between 1 and 12 inclusive. + * @param {Number} [day] Between 1 and the number of days in the given month of the given year. + * + * @constructor + */ + constructor(year: number, month?: number, day?: number); + /** + * Creates a new instance of LocalDate using the current year, month and day from the system clock in the default time-zone. + */ + static now(): LocalDate; + /** + * Creates a new instance of LocalDate using the current date from the system clock at UTC. + */ + static utcNow(): LocalDate; + /** + * Creates a new instance of LocalDate using the year, month and day from the provided local date time. + * @param {Date} date + */ + static fromDate(date: Date): LocalDate; + /** + * Creates a new instance of LocalDate using the year, month and day provided in the form: yyyy-mm-dd or + * days since epoch (i.e. -1 for Dec 31, 1969). + * @param {String} value + */ + static fromString(value: string): LocalDate; + /** + * Creates a new instance of LocalDate using the bytes representation. + * @param {Buffer} buffer + */ + static fromBuffer(buffer: Buffer): LocalDate; + /** + * Compares this LocalDate with the given one. + * @param {LocalDate} other date to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: LocalDate): number; + /** + * Returns true if the value of the LocalDate instance and other are the same + * @param {LocalDate} other + * @returns {Boolean} + */ + equals(other: LocalDate): boolean; + inspect(): string; + /** + * Gets the bytes representation of the instance. + * @returns {Buffer} + */ + toBuffer(): Buffer; + /** + * Gets the string representation of the instance in the form: yyyy-mm-dd if + * the value can be parsed as a Date, otherwise days since epoch. + * @returns {String} + */ + toString(): string; + /** + * Gets the string representation of the instance in the form: yyyy-mm-dd, valid for JSON. + * @returns {String} + */ + toJSON(): string; +} + +/** + * @class + * @classdesc A time without a time-zone in the ISO-8601 calendar system, such as 10:30:05. + *

+ * LocalTime is an immutable date-time object that represents a time, often viewed as hour-minute-second. Time is represented to nanosecond precision. For example, the value "13:45.30.123456789" can be stored in a LocalTime. + *

+ */ +export declare class LocalTime { + private value; + /** + * Gets the hour component of the time represented by the current instance, a number from 0 to 23. + * @type Number + */ + hour: number; + /** + * Gets the minute component of the time represented by the current instance, a number from 0 to 59. + * @type Number + */ + minute: number; + /** + * Gets the second component of the time represented by the current instance, a number from 0 to 59. + * @type Number + */ + second: number; + /** + * Gets the nanoseconds component of the time represented by the current instance, a number from 0 to 999999999. + * @type Number + */ + nanosecond: number; + private _partsCache?; + /** + * Creates a new instance of LocalTime. + * A time without a time-zone in the ISO-8601 calendar system, such as 10:30:05. + *

+ * LocalTime is an immutable date-time object that represents a time, often viewed as hour-minute-second. Time is represented to nanosecond precision. For example, the value "13:45.30.123456789" can be stored in a LocalTime. + *

+ * @param {Long} totalNanoseconds Total nanoseconds since midnight. + * @constructor + */ + constructor(totalNanoseconds: Long__default); + /** + * Parses a string representation and returns a new LocalTime. + * @param {String} value + * @returns {LocalTime} + */ + static fromString(value: string): LocalTime; + /** + * Uses the current local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. + * @returns {LocalTime} + */ + static now(nanoseconds?: number): LocalTime; + /** + * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Date} date Local date portion to extract the time passed since midnight. + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the nanosecond time portion. + * @returns {LocalTime} + */ + static fromDate(date: Date, nanoseconds?: number): LocalTime; + /** + * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Number} milliseconds A Number from 0 to 86,399,999. + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. + * @returns {LocalTime} + */ + static fromMilliseconds(milliseconds: number, nanoseconds?: number): LocalTime; + /** + * Creates a new instance of LocalTime from the bytes representation. + * @param {Buffer} value + * @returns {LocalTime} + */ + static fromBuffer(value: Buffer): LocalTime; + /** + * Compares this LocalTime with the given one. + * @param {LocalTime} other time to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: LocalTime): number; + /** + * Returns true if the value of the LocalTime instance and other are the same + * @param {LocalTime} other + * @returns {Boolean} + */ + equals(other: LocalTime): boolean; + /** + * Gets the total amount of nanoseconds since midnight for this instance. + * @returns {Long} + */ + getTotalNanoseconds(): Long__default; + inspect(): string; + /** + * Returns a big-endian bytes representation of the instance + * @returns {Buffer} + */ + toBuffer(): Buffer; + /** + * Returns the string representation of the instance in the form of hh:MM:ss.ns + * @returns {String} + */ + toString(): string; + /** + * Gets the string representation of the instance in the form: hh:MM:ss.ns + * @returns {String} + */ + toJSON(): string; + /* Excluded from this release type: _getParts */ +} + +export { Long } + +declare type MapColumnInfo = { + code: (dataTypes.map); + info: [DataTypeInfo, DataTypeInfo]; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +/** + * Represents an object mapper for Apache Cassandra and DataStax Enterprise. + * @alias module:mapping~Mapper + * @example Creating a Mapper instance with some options for the model 'User' + * const mappingOptions = { + * models: { + * 'User': { + * tables: ['users'], + * mappings: new UnderscoreCqlToCamelCaseMappings(), + * columnNames: { + * 'userid': 'id' + * } + * } + * } + * }; + * const mapper = new Mapper(client, mappingOptions); + * @example Creating a Mapper instance with other possible options for a model + * const mappingOptions = { + * models: { + * 'Video': { + * tables: ['videos', 'user_videos', 'latest_videos', { name: 'my_videos_view', isView: true }], + * mappings: new UnderscoreCqlToCamelCaseMappings(), + * columnNames: { + * 'videoid': 'id' + * }, + * keyspace: 'ks1' + * } + * } + * }; + * const mapper = new Mapper(client, mappingOptions); + */ +export declare class Mapper { + private client; + private _modelMappingInfos; + private _modelMappers; + /** + * Creates a new instance of Mapper. + * @param {Client} client The Client instance to use to execute the queries and fetch the metadata. + * @param {MappingOptions} [options] The [MappingOptions]{@link module:mapping~MappingOptions} containing the + * information of the models and table mappings. + */ + constructor(client: Client, options?: MappingOptions); + /** + * Gets a [ModelMapper]{@link module:mapping~ModelMapper} that is able to map documents of a certain model into + * CQL rows. + * @param {String} name The name to identify the model. Note that the name is case-sensitive. + * @returns {ModelMapper} A [ModelMapper]{@link module:mapping~ModelMapper} instance. + */ + forModel(name: string): ModelMapper; + /** + * Executes a batch of queries represented in the items. + * @param {Array} items + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * The mapper uses the generated queries to determine the default value. When an UPDATE is generated with a + * counter column or appending/prepending to a list column, the execution is marked as not idempotent. + *

+ *

+ * Additionally, the mapper uses the safest approach for queries with lightweight transactions (Compare and + * Set) by considering them as non-idempotent. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Boolean} [executionOptions.logged=true] Determines whether the batch should be written to the batchlog. + * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + * @returns {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result}. + */ + batch(items: Array, executionOptions: string | MappingExecutionOptions): Promise; +} + +export declare const mapping: { + Mapper: typeof Mapper; + ModelMapper: typeof ModelMapper; + ModelBatchMapper: typeof ModelBatchMapper; + ModelBatchItem: typeof ModelBatchItem; + Result: typeof Result; + TableMappings: typeof TableMappings; + DefaultTableMappings: typeof DefaultTableMappings; + UnderscoreCqlToCamelCaseMappings: typeof UnderscoreCqlToCamelCaseMappings; + q: { + in_: (arr: any) => QueryOperator; + gt: (value: any) => QueryOperator; + gte: (value: any) => QueryOperator; + lt: (value: any) => QueryOperator; + lte: (value: any) => QueryOperator; + notEq: (value: any) => QueryOperator; + and: (condition1: any, condition2: any) => QueryOperator; + incr: (value: any) => QueryAssignment; + decr: (value: any) => QueryAssignment; + append: (value: any) => QueryAssignment; + prepend: (value: any) => QueryAssignment; + remove: (value: any) => QueryAssignment; + }; +}; + +export declare type MappingExecutionOptions = { + executionProfile?: string; + isIdempotent?: boolean; + logged?: boolean; + timestamp?: number | Long__default; + fetchSize?: number; + pageState?: number; +}; + +/* Excluded from this release type: MappingHandler */ + +export declare type MappingOptions = { + models: { + [key: string]: ModelOptions; + }; +}; + +/** + * @classdesc Describes a CQL materialized view. + * @alias module:metadata~MaterializedView + * @augments {module:metadata~DataCollection} + * @constructor + */ +export declare class MaterializedView extends DataCollection { + /** + * Name of the table. + * @type {String} + */ + tableName: string; + /** + * View where clause. + * @type {String} + */ + whereClause: string; + /** + * Determines if all the table columns where are included in the view. + * @type {boolean} + */ + includeAllColumns: boolean; + /* Excluded from this release type: __constructor */ +} + +/** + * Represents cluster and schema information. + * The metadata class acts as a internal state of the driver. + */ +export declare class Metadata { + keyspaces: { + [name: string]: Keyspace; + }; + /* Excluded from this release type: initialized */ + private _isDbaas; + private _schemaParser; + /* Excluded from this release type: log */ + private _preparedQueries; + /* Excluded from this release type: tokenizer */ + /* Excluded from this release type: primaryReplicas */ + /* Excluded from this release type: ring */ + /* Excluded from this release type: tokenRanges */ + /* Excluded from this release type: ringTokensAsStrings */ + /* Excluded from this release type: datacenters */ + private options; + private controlConnection; + /* Excluded from this release type: __constructor */ + /* Excluded from this release type: setCassandraVersion */ + /** + * Determines whether the cluster is provided as a service. + * @returns {boolean} true when the cluster is provided as a service (DataStax Astra), false when it's a + * different deployment (on-prem). + */ + isDbaas(): boolean; + /* Excluded from this release type: setProductTypeAsDbaas */ + /* Excluded from this release type: setPartitioner */ + /* Excluded from this release type: buildTokens */ + /** + * Gets the keyspace metadata information and updates the internal state of the driver. + *

+ * If a callback is provided, the callback is invoked when the keyspaces metadata refresh completes. + * Otherwise, it returns a Promise. + *

+ * @param {String} name Name of the keyspace. + * @param {Function} [callback] Optional callback. + */ + refreshKeyspace(name: string, callback: EmptyCallback): void; + refreshKeyspace(name: string): Promise; + /** + * @param {String} name + * @private + */ + private _refreshKeyspace; + /** + * Gets the metadata information of all the keyspaces and updates the internal state of the driver. + *

+ * If a callback is provided, the callback is invoked when the keyspace metadata refresh completes. + * Otherwise, it returns a Promise. + *

+ * @param {Boolean|Function} [waitReconnect] Determines if it should wait for reconnection in case the control connection is not + * connected at the moment. Default: true. + * @param {Function} [callback] Optional callback. + */ + refreshKeyspaces(waitReconnect: boolean, callback: EmptyCallback): void; + refreshKeyspaces(waitReconnect?: boolean): Promise; + refreshKeyspaces(callback: EmptyCallback): void; + /* Excluded from this release type: refreshKeyspacesInternal */ + private _getKeyspaceReplicas; + /** + * Gets the host list representing the replicas that contain the given partition key, token or token range. + *

+ * It uses the pre-loaded keyspace metadata to retrieve the replicas for a token for a given keyspace. + * When the keyspace metadata has not been loaded, it returns null. + *

+ * @param {String} keyspaceName + * @param {Buffer|Token|TokenRange} token Can be Buffer (serialized partition key), Token or TokenRange + * @returns {Array} + */ + getReplicas(keyspaceName: string, token: Buffer | Token | TokenRange): Array; + /** + * Gets the token ranges that define data distribution in the ring. + * + * @returns {Set} The ranges of the ring or empty set if schema metadata is not enabled. + */ + getTokenRanges(): Set; + /** + * Gets the token ranges that are replicated on the given host, for + * the given keyspace. + * + * @param {String} keyspaceName The name of the keyspace to get ranges for. + * @param {Host} host The host. + * @returns {Set|null} Ranges for the keyspace on this host or null if keyspace isn't found or hasn't been loaded. + */ + getTokenRangesForHost(keyspaceName: string, host: Host): Set | null; + /** + * Constructs a Token from the input buffer(s) or string input. If a string is passed in + * it is assumed this matches the token representation reported by cassandra. + * @param {Array|Buffer|String} components + * @returns {Token} constructed token from the input buffer. + */ + newToken(components: Array | Buffer | string): Token; + /** + * Constructs a TokenRange from the given start and end tokens. + * @param {Token} start + * @param {Token} end + * @returns {TokenRange} build range spanning from start (exclusive) to end (inclusive). + */ + newTokenRange(start: Token, end: Token): TokenRange; + /* Excluded from this release type: getPreparedInfo */ + /** + * Clears the internal state related to the prepared statements. + * Following calls to the Client using the prepare flag will re-prepare the statements. + */ + clearPrepared(): void; + /* Excluded from this release type: getPreparedById */ + /* Excluded from this release type: setPreparedById */ + /* Excluded from this release type: getAllPrepared */ + /* Excluded from this release type: _uninitializedError */ + /** + * Gets the definition of an user-defined type. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same UDT definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace. + * @param {String} name Name of the UDT. + * @param {Function} [callback] The callback to invoke when retrieval completes. + */ + getUdt(keyspaceName: string, name: string, callback: ValueCallback): void; + getUdt(keyspaceName: string, name: string): Promise; + /** + * @param {String} keyspaceName + * @param {String} name + * @returns {Promise} + * @private + */ + private _getUdt; + /** + * Gets the definition of a table. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same table definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace. + * @param {String} name Name of the Table. + * @param {Function} [callback] The callback with the err as a first parameter and the {@link TableMetadata} as + * second parameter. + */ + getTable(keyspaceName: string, name: string, callback: ValueCallback): void; + getTable(keyspaceName: string, name: string): Promise; + /** + * @param {String} keyspaceName + * @param {String} name + * @private + */ + private _getTable; + /** + * Gets the definition of CQL functions for a given name. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same function definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace. + * @param {String} name Name of the Function. + * @param {Function} [callback] The callback with the err as a first parameter and the array of {@link SchemaFunction} + * as second parameter. + */ + getFunctions(keyspaceName: string, name: string, callback: ValueCallback): void; + getFunctions(keyspaceName: string, name: string): Promise; + /** + * @param {String} keyspaceName + * @param {String} name + * @private + */ + private _getFunctionsWrapper; + /** + * Gets a definition of CQL function for a given name and signature. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same function definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the Function + * @param {Array.|Array.<{code, info}>} signature Array of types of the parameters. + * @param {Function} [callback] The callback with the err as a first parameter and the {@link SchemaFunction} as second + * parameter. + */ + getFunction(keyspaceName: string, name: string, signature: string[] | Array, callback: ValueCallback): void; + getFunction(keyspaceName: string, name: string, signature: string[] | Array): Promise; + /** + * Gets the definition of CQL aggregate for a given name. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same aggregates definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the Function + * @param {Function} [callback] The callback with the err as a first parameter and the array of {@link Aggregate} as + * second parameter. + */ + getAggregates(keyspaceName: string, name: string, callback: ValueCallback): void; + getAggregates(keyspaceName: string, name: string): Promise; + /** + * @param {String} keyspaceName + * @param {String} name + * @private + */ + private _getAggregates; + /** + * Gets a definition of CQL aggregate for a given name and signature. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * When trying to retrieve the same aggregate definition concurrently, it will query once and invoke all callbacks + * with the retrieved information. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the aggregate + * @param {Array.|Array.<{code, info}>} signature Array of types of the parameters. + * @param {Function} [callback] The callback with the err as a first parameter and the {@link Aggregate} as second parameter. + */ + getAggregate(keyspaceName: string, name: string, signature: string[] | Array, callback: ValueCallback): void; + getAggregate(keyspaceName: string, name: string, signature: string[] | Array): Promise; + /** + * Gets the definition of a CQL materialized view for a given name. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ *

+ * Note that, unlike the rest of the {@link Metadata} methods, this method does not cache the result for following + * calls, as the current version of the Cassandra native protocol does not support schema change events for + * materialized views. Each call to this method will produce one or more queries to the cluster. + *

+ * @param {String} keyspaceName Name of the keyspace + * @param {String} name Name of the materialized view + * @param {Function} [callback] The callback with the err as a first parameter and the {@link MaterializedView} as + * second parameter. + */ + getMaterializedView(keyspaceName: string, name: string, callback: ValueCallback): void; + getMaterializedView(keyspaceName: string, name: string): Promise; + /** + * @param {String} keyspaceName + * @param {String} name + * @returns {Promise} + * @private + */ + private _getMaterializedView; + /** + * Gets a map of cql function definitions or aggregates based on signature. + * @param {String} keyspaceName + * @param {String} name Name of the function or aggregate + * @param {Boolean} aggregate + * @returns {Promise} + * @private + */ + private _getFunctions; + /** + * Gets a single cql function or aggregate definition + * @param {String} keyspaceName + * @param {String} name + * @param {Array} signature + * @param {Boolean} aggregate + * @returns {Promise} + * @private + */ + private _getSingleFunction; + /** + * Gets the trace session generated by Cassandra when query tracing is enabled for the + * query. The trace itself is stored in Cassandra in the sessions and + * events table in the system_traces keyspace and can be + * retrieve manually using the trace identifier. + *

+ * If a callback is provided, the callback is invoked when the metadata retrieval completes. + * Otherwise, it returns a Promise. + *

+ * @param {Uuid} traceId Identifier of the trace session. + * @param {Number} [consistency] The consistency level to obtain the trace. + * @param {Function} [callback] The callback with the err as first parameter and the query trace as second parameter. + */ + getTrace(traceId: Uuid, consistency: consistencies, callback: ValueCallback): void; + getTrace(traceId: Uuid, consistency: consistencies): Promise; + getTrace(traceId: Uuid, callback: ValueCallback): void; + getTrace(traceId: Uuid): Promise; + /** + * @param {Uuid} traceId + * @param {Number} consistency + * @returns {Promise} + * @private + */ + private _getTrace; + /* Excluded from this release type: checkSchemaAgreement */ + /** + * Async-only version of check schema agreement. + * @private + */ + private _checkSchemaAgreement; + /* Excluded from this release type: adaptUserHints */ + /** + * @param {Array} udts + * @param {{code, info}} type + * @param {string} keyspace + * @private + */ + private _checkUdtTypes; + /* Excluded from this release type: compareSchemaVersions */ +} + +export declare const metadata: { + Metadata: typeof Metadata; +}; + +export declare const metrics: { + ClientMetrics: typeof ClientMetrics; + DefaultMetrics: typeof DefaultMetrics; +}; + +/** + * Represents a query or a set of queries used to perform a mutation in a batch. + * @alias module:mapping~ModelBatchItem + */ +export declare class ModelBatchItem { + /* Excluded from this release type: doc */ + /* Excluded from this release type: docInfo */ + /* Excluded from this release type: handler */ + /* Excluded from this release type: cache */ + /* Excluded from this release type: __constructor */ + /* Excluded from this release type: getQueries */ + /* Excluded from this release type: getCacheKey */ + /* Excluded from this release type: createQueries */ + /* Excluded from this release type: pushQueries */ + /* Excluded from this release type: getMappingInfo */ +} + +/** + * Provides utility methods to group multiple mutations on a single batch. + * @alias module:mapping~ModelBatchMapper + */ +export declare class ModelBatchMapper { + private _handler; + private _cache; + /* Excluded from this release type: __constructor */ + /** + * Gets a [ModelBatchItem]{@link module:mapping~ModelBatchItem} containing the queries for the INSERT mutation to be + * used in a batch execution. + * @param {Object} doc An object containing the properties to insert. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * INSERT cql statements generated. If specified, it must include the columns to insert and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifNotExists] When set, it only inserts if the row does not exist prior to the insertion. + *

Please note that using IF NOT EXISTS will incur a non negligible performance cost so this should be used + * sparingly.

+ * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query + * or a set of queries to be included in a batch. + */ + insert(doc: object, docInfo: InsertDocInfo): ModelBatchItem; + /** + * Gets a [ModelBatchItem]{@link module:mapping~ModelBatchItem} containing the queries for the UPDATE mutation to be + * used in a batch execution. + * @param {Object} doc An object containing the properties to update. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * UPDATE cql statements generated. If specified, it must include the columns to update and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifExists] When set, it only updates if the row already exists on the server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the UPDATE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query + * or a set of queries to be included in a batch. + */ + update(doc: object, docInfo: UpdateDocInfo): ModelBatchItem; + /** + * Gets a [ModelBatchItem]{@link module:mapping~ModelBatchItem} containing the queries for the DELETE mutation to be + * used in a batch execution. + * @param {Object} doc A document containing the primary keys values of the document to delete. + * @param {Object} [docInfo] An object containing the additional doc information. + * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the DELETE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + * When the CQL query is generated, this would be used to generate the `IF` clause. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Boolean} [docInfo.ifExists] When set, it only issues the DELETE command if the row already exists on the + * server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * DELETE cql statement generated. If specified, it must include the columns to delete and the primary keys. + * @param {Boolean} [docInfo.deleteOnlyColumns] Determines that, when more document properties are specified + * besides the primary keys, the generated DELETE statement should be used to delete some column values but leave + * the row. When this is enabled and more properties are specified, a DELETE statement will have the following form: + * "DELETE col1, col2 FROM table1 WHERE pk1 = ? AND pk2 = ?" + * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query + * or a set of queries to be included in a batch. + */ + remove(doc: object, docInfo: RemoveDocInfo): ModelBatchItem; +} + +declare class ModelColumnInfo { + columnName: any; + toModel: any; + fromModel: any; + propertyName: any; + constructor(columnName: any, propertyName: any, toModel?: any, fromModel?: any); + static parse(columnName: any, value: any): ModelColumnInfo; +} + +export declare type ModelColumnOptions = { + name: string; + toModel?: (columnValue: any) => any; + fromModel?: (modelValue: any) => any; +}; + +/** + * Represents an object mapper for a specific model. + * @alias module:mapping~ModelMapper + */ +export declare class ModelMapper { + /** + * Gets the name identifier of the model. + * @type {String} + */ + name: string; + private _handler; + /** + * Gets a [ModelBatchMapper]{@link module:mapping~ModelBatchMapper} instance containing utility methods to group + * multiple doc mutations in a single batch. + * @type {ModelBatchMapper} + */ + batching: ModelBatchMapper; + /* Excluded from this release type: __constructor */ + /** + * Gets the first document matching the provided filter or null when not found. + *

+ * Note that all partition and clustering keys must be defined in order to use this method. + *

+ * @param {Object} doc The object containing the properties that map to the primary keys. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * SELECT cql statement generated, in order to restrict the amount of columns retrieved. + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @return {Promise} + * @example Get a video by id + * videoMapper.get({ id }) + * @example Get a video by id, selecting specific columns + * videoMapper.get({ id }, fields: ['name', 'description']) + */ + get(doc: { + [key: string]: any; + }, docInfo?: { + fields?: string[]; + }, executionOptions?: string | MappingExecutionOptions): Promise; + /** + * Executes a SELECT query based on the filter and returns the result as an iterable of documents. + * @param {Object} doc An object containing the properties that map to the primary keys to filter. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * SELECT cql statement generated, in order to restrict the amount of columns retrieved. + * @param {Object} [docInfo.orderBy] An associative array containing the column names as key and + * the order string (asc or desc) as value used to set the order of the results server-side. + * @param {Number} [docInfo.limit] Restricts the result of the query to a maximum number of rows on the + * server. + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Number} [executionOptions.fetchSize] The amount of rows to retrieve per page. + * @param {Number} [executionOptions.pageState] A Buffer instance or a string token representing the paging state. + *

When provided, the query will be executed starting from a given paging state.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Get user's videos + * const result = await videoMapper.find({ userId }); + * for (let video of result) { + * console.log(video.name); + * } + * @example Get user's videos from a certain date + * videoMapper.find({ userId, addedDate: q.gte(date)}); + * @example Get user's videos in reverse order + * videoMapper.find({ userId }, { orderBy: { addedDate: 'desc' }}); + */ + find(doc: { + [key: string]: any; + }, docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + /** + * Executes a SELECT query without a filter and returns the result as an iterable of documents. + *

+ * This is only recommended to be used for tables with a limited amount of results. Otherwise, breaking up the + * token ranges on the client side should be used. + *

+ * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * SELECT cql statement generated, in order to restrict the amount of columns retrieved. + * @param {Object} [docInfo.orderBy] An associative array containing the column names as key and + * the order string (asc or desc) as value used to set the order of the results server-side. + * @param {Number} [docInfo.limit] Restricts the result of the query to a maximum number of rows on the + * server. + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Number} [executionOptions.fetchSize] The mount of rows to retrieve per page. + * @param {Number} [executionOptions.pageState] A Buffer instance or a string token representing the paging state. + *

When provided, the query will be executed starting from a given paging state.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + */ + findAll(docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + /** + * Inserts a document. + *

+ * When the model is mapped to multiple tables, it will insert a row in each table when all the primary keys + * are specified. + *

+ * @param {Object} doc An object containing the properties to insert. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * INSERT cql statements generated. If specified, it must include the columns to insert and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifNotExists] When set, it only inserts if the row does not exist prior to the insertion. + *

Please note that using IF NOT EXISTS will incur a non negligible performance cost so this should be used + * sparingly.

+ * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * By default all generated INSERT statements are considered idempotent, except in the case of lightweight + * transactions. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Insert a video + * videoMapper.insert({ id, name }); + */ + insert(doc: { + [key: string]: any; + }, docInfo?: InsertDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + /** + * Updates a document. + *

+ * When the model is mapped to multiple tables, it will update a row in each table when all the primary keys + * are specified. + *

+ * @param {Object} doc An object containing the properties to update. + * @param {Object} [docInfo] An object containing the additional document information. + * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * UPDATE cql statements generated. If specified, it must include the columns to update and the primary keys. + * @param {Number} [docInfo.ttl] Specifies an optional Time To Live (in seconds) for the inserted values. + * @param {Boolean} [docInfo.ifExists] When set, it only updates if the row already exists on the server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the UPDATE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * The mapper uses the generated queries to determine the default value. When an UPDATE is generated with a + * counter column or appending/prepending to a list column, the execution is marked as not idempotent. + *

+ *

+ * Additionally, the mapper uses the safest approach for queries with lightweight transactions (Compare and + * Set) by considering them as non-idempotent. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Update the name of a video + * videoMapper.update({ id, name }); + */ + update(doc: { + [key: string]: any; + }, docInfo?: UpdateDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + /** + * Deletes a document. + * @param {Object} doc A document containing the primary keys values of the document to delete. + * @param {Object} [docInfo] An object containing the additional doc information. + * @param {Object} [docInfo.when] A document that act as the condition that has to be met for the DELETE to occur. + * Use this property only in the case you want to specify a conditional clause for lightweight transactions (CAS). + * When the CQL query is generated, this would be used to generate the `IF` clause. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Boolean} [docInfo.ifExists] When set, it only issues the DELETE command if the row already exists on the + * server. + *

+ * Please note that using IF conditions will incur a non negligible performance cost on the server-side so this + * should be used sparingly. + *

+ * @param {Array} [docInfo.fields] An Array containing the name of the properties that will be used in the + * DELETE cql statement generated. If specified, it must include the columns to delete and the primary keys. + * @param {Boolean} [docInfo.deleteOnlyColumns] Determines that, when more document properties are specified + * besides the primary keys, the generated DELETE statement should be used to delete some column values but leave + * the row. When this is enabled and more properties are specified, a DELETE statement will have the following form: + * "DELETE col1, col2 FROM table1 WHERE pk1 = ? AND pk2 = ?" + * @param {Object|String} [executionOptions] An object containing the options to be used for the requests + * execution or a string representing the name of the execution profile. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times without + * changing the result beyond the initial application. + *

+ * By default all generated DELETE statements are considered idempotent, except in the case of lightweight + * transactions. Lightweight transactions at client level with transparent retries can + * break linearizability. If that is not an issue for your application, you can manually set this field to true. + *

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. + * @example Delete a video + * videoMapper.remove({ id }); + */ + remove(doc: { + [key: string]: any; + }, docInfo?: RemoveDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + /** + * Uses the provided query and param getter function to execute a query and map the results. + * Gets a function that takes the document, executes the query and returns the mapped results. + * @param {String} query The query to execute. + * @param {Function} paramsHandler The function to execute to extract the parameters of a document. + * @param {Object|String} [executionOptions] When provided, the options for all executions generated with this + * method will use the provided options and it will not consider the executionOptions per call. + * @param {String} [executionOptions.executionProfile] The name of the execution profile. + * @param {Number} [executionOptions.fetchSize] Amount of rows to retrieve per page. + * @param {Boolean} [executionOptions.isIdempotent] Defines whether the query can be applied multiple times + * without changing the result beyond the initial application. + * @param {Number} [executionOptions.pageState] Buffer or string token representing the paging state. + *

When provided, the query will be executed starting from a given paging state.

+ * @param {Number|Long} [executionOptions.timestamp] The default timestamp for the query in microseconds from the + * unix epoch (00:00:00, January 1st, 1970). + *

When provided, this will replace the client generated and the server side assigned timestamp.

+ * @return {Function} Returns a function that takes the document and execution options as parameters and returns a + * Promise the resolves to a [Result]{@link module:mapping~Result} instance. + */ + mapWithQuery(query: string, paramsHandler: (doc: any) => any[], executionOptions?: string | MappingExecutionOptions): (doc: any, executionOptions?: string | MappingExecutionOptions) => Promise>; +} + +/* Excluded from this release type: ModelMappingInfo */ + +export declare type ModelOptions = { + tables?: string[] | ModelTables[]; + mappings?: TableMappings; + columns?: { + [key: string]: string | ModelColumnOptions; + }; + keyspace?: string; +}; + +export declare interface ModelTables { + name: string; + isView: boolean; +} + +/** + * A timestamp generator that guarantees monotonically increasing timestamps and logs warnings when timestamps + * drift in the future. + *

+ * {@link Date} has millisecond precision and client timestamps require microsecond precision. This generator + * keeps track of the last generated timestamp, and if the current time is within the same millisecond as the last, + * it fills the microsecond portion of the new timestamp with the value of an incrementing counter. + *

+ * @extends {TimestampGenerator} + */ +export declare class MonotonicTimestampGenerator extends TimestampGenerator { + private _warningThreshold; + private _minLogInterval; + private _micros; + private _lastDate; + private _lastLogDate; + /** + * A timestamp generator that guarantees monotonically increasing timestamps and logs warnings when timestamps + * drift in the future. + *

+ * {@link Date} has millisecond precision and client timestamps require microsecond precision. This generator + * keeps track of the last generated timestamp, and if the current time is within the same millisecond as the last, + * it fills the microsecond portion of the new timestamp with the value of an incrementing counter. + *

+ * @param {Number} [warningThreshold] Determines how far in the future timestamps are allowed to drift before a + * warning is logged, expressed in milliseconds. Default: 1000. + * @param {Number} [minLogInterval] In case of multiple log events, it determines the time separation between log + * events, expressed in milliseconds. Use 0 to disable. Default: 1000. + * @constructor + */ + constructor(warningThreshold?: number, minLogInterval?: number); + /** + * Returns the current time in milliseconds since UNIX epoch + * @returns {Number} + */ + getDate(): number; + next(client: Client): Long__default | number | null; + /** + * @private + * @returns {Number|Long} + */ + private _generateMicroseconds; +} + +/* Excluded from this release type: Murmur3Token */ + +/* Excluded from this release type: Murmur3Tokenizer */ + +/* Excluded from this release type: MutableLong */ + +/* Excluded from this release type: NoAuthAuthenticator */ + +/* Excluded from this release type: NoAuthProvider */ + +/* Excluded from this release type: Node */ + +/** + * Represents an error when a query cannot be performed because no host is available or could be reached by the driver. + */ +export declare class NoHostAvailableError extends DriverError { + innerErrors: object; + /** + * Represents an error when a query cannot be performed because no host is available or could be reached by the driver. + * @param {Object} innerErrors An object map containing the error per host tried + * @param {String} [message] + * @constructor + */ + constructor(innerErrors: object, message?: string); +} + +/** + * Creates a new instance of NoSpeculativeExecutionPolicy. + * @classdesc + * A {@link SpeculativeExecutionPolicy} that never schedules speculative executions. + * @extends {SpeculativeExecutionPolicy} + */ +export declare class NoSpeculativeExecutionPolicy extends SpeculativeExecutionPolicy { + private _plan; + /** + * Creates a new instance of NoSpeculativeExecutionPolicy. + */ + constructor(); + newPlan(keyspace: string, queryInfo: string | Array): { + nextExecution: () => number; + }; +} + +/** + * Represents an error that is raised when a feature is not supported in the driver or in the current Cassandra version. + */ +export declare class NotSupportedError extends DriverError { + /** + * Represents an error that is raised when a feature is not supported in the driver or in the current Cassandra version. + * @param message + * @constructor + */ + constructor(message: string); +} + +/* Excluded from this release type: opcodes */ + +/** + * Information of the execution to be used to determine whether the operation should be retried. + * @typedef {Object} OperationInfo@typedef {Object} OperationInfo + * @property {String} query The query that was executed. + * @param {ExecutionOptions} executionOptions The options related to the execution of the request. + * @property {Number} nbRetry The number of retries already performed for this operation. + */ +declare type OperationInfo = { + query: string; + executionOptions: ExecutionOptions; + nbRetry: number; +}; + +/* Excluded from this release type: OperationState */ + +/** + * Represents a client-side error that is raised when the client didn't hear back from the server within + * {@link ClientOptions.socketOptions.readTimeout}. + */ +export declare class OperationTimedOutError extends DriverError { + host?: string; + /** + * Represents a client-side error that is raised when the client didn't hear back from the server within + * {@link ClientOptions.socketOptions.readTimeout}. + * @param {String} message The error message. + * @param {String} [host] Address of the server host that caused the operation to time out. + * @constructor + */ + constructor(message: string, host?: string); +} + +export declare type Options = { + collectResults?: boolean; + concurrencyLevel?: number; + executionProfile?: string; + maxErrors?: number; + raiseOnFirstError?: boolean; +}; + +declare type OtherCustomColumnInfo = { + code: (dataTypes.custom); + info: string; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +/** + * Represents a walk through a graph as defined by a traversal. + * @memberOf module:datastax/graph + */ +export declare class Path { + labels: any[]; + objects: any[]; + /** + * @param {any[]} labels + * @param {any[]} objects + */ + constructor(labels: any[], objects: any[]); +} + +/* Excluded from this release type: PlainTextAuthenticator */ + +/** + * @classdesc Provides plain text [Authenticator]{@link module:auth~Authenticator} instances to be used when + * connecting to a host. + * @extends module:auth~AuthProvider + * @example + * var authProvider = new cassandra.auth.PlainTextAuthProvider('my_user', 'p@ssword1!'); + * //Set the auth provider in the clientOptions when creating the Client instance + * const client = new Client({ contactPoints: contactPoints, authProvider: authProvider }); + * @alias module:auth~PlainTextAuthProvider + */ +export declare class PlainTextAuthProvider extends AuthProvider { + private username; + private password; + /** + * Creates a new instance of the Authenticator provider + * @classdesc Provides plain text [Authenticator]{@link module:auth~Authenticator} instances to be used when + * connecting to a host. + * @example + * var authProvider = new cassandra.auth.PlainTextAuthProvider('my_user', 'p@ssword1!'); + * //Set the auth provider in the clientOptions when creating the Client instance + * const client = new Client({ contactPoints: contactPoints, authProvider: authProvider }); + * @param {String} username User name in plain text + * @param {String} password Password in plain text + * @alias module:auth~PlainTextAuthProvider + * @constructor + */ + constructor(username: string, password: string); + /** + * Returns a new [Authenticator]{@link module:auth~Authenticator} instance to be used for plain text authentication. + * @override + * @returns {Authenticator} + */ + newAuthenticator(): Authenticator; +} + +/** + * @classdesc + * A Point is a zero-dimensional object that represents a specific (X,Y) + * location in a two-dimensional XY-Plane. In case of Geographic Coordinate + * Systems, the X coordinate is the longitude and the Y is the latitude. + * @extends {Geometry} + * @alias module:geometry~Point + */ +export declare class Point extends Geometry { + /* Excluded from this release type: x */ + /* Excluded from this release type: y */ + /** + * Creates a new {@link Point} instance. + * @param {Number} x The X coordinate. + * @param {Number} y The Y coordinate. + */ + constructor(x: number, y: number); + /** + * Creates a {@link Point} instance from + * a Well-known Text (WKT) + * representation of a 2D point. + * @param {Buffer} buffer + * @returns {Point} + */ + static fromBuffer(buffer: Buffer): Point; + /** + * Creates a {@link Point} instance from + * a Well-known Text (WKT) + * representation of a 2D point. + * @param {String} textValue + * @returns {Point} + */ + static fromString(textValue: string): Point; + /** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ + toBuffer(): Buffer; + /** + * Returns true if the values of the point are the same, otherwise it returns false. + * @param {Point} other + * @returns {Boolean} + */ + equals(other: Point): boolean; + /** + * Returns Well-known Text (WKT) representation of the geometry object. + * @returns {String} + */ + toString(): string; + /* Excluded from this release type: useBESerialization */ + /** + * Returns a JSON representation of this geo-spatial type. + * @returns {Object} + */ + toJSON(): object; +} + +export declare const policies: { + addressResolution: { + AddressTranslator: typeof AddressTranslator; + EC2MultiRegionTranslator: typeof EC2MultiRegionTranslator; + }; + loadBalancing: { + AllowListPolicy: typeof AllowListPolicy; + DCAwareRoundRobinPolicy: typeof DCAwareRoundRobinPolicy; + DefaultLoadBalancingPolicy: typeof DefaultLoadBalancingPolicy; + LoadBalancingPolicy: typeof LoadBalancingPolicy; + RoundRobinPolicy: typeof RoundRobinPolicy; + TokenAwarePolicy: typeof TokenAwarePolicy; + WhiteListPolicy: typeof WhiteListPolicy; + }; + reconnection: { + ReconnectionPolicy: typeof ReconnectionPolicy; + ConstantReconnectionPolicy: typeof ConstantReconnectionPolicy; + ExponentialReconnectionPolicy: typeof ExponentialReconnectionPolicy; + }; + retry: { + IdempotenceAwareRetryPolicy: typeof IdempotenceAwareRetryPolicy; + FallthroughRetryPolicy: typeof FallthroughRetryPolicy; + RetryPolicy: typeof RetryPolicy; + }; + speculativeExecution: { + NoSpeculativeExecutionPolicy: typeof NoSpeculativeExecutionPolicy; + SpeculativeExecutionPolicy: typeof SpeculativeExecutionPolicy; + ConstantSpeculativeExecutionPolicy: typeof ConstantSpeculativeExecutionPolicy; + }; + timestampGeneration: { + TimestampGenerator: typeof TimestampGenerator; + MonotonicTimestampGenerator: typeof MonotonicTimestampGenerator; + }; + defaultAddressTranslator: () => AddressTranslator; + defaultLoadBalancingPolicy: (localDc?: string) => LoadBalancingPolicy; + defaultRetryPolicy: () => RetryPolicy; + defaultReconnectionPolicy: () => ReconnectionPolicy; + defaultSpeculativeExecutionPolicy: () => SpeculativeExecutionPolicy; + defaultTimestampGenerator: () => TimestampGenerator; +}; + +/** + * @classdesc + * Represents is a plane geometry figure that is bounded by a finite chain of straight line segments closing in a loop + * to form a closed chain or circuit. + * @example + * new Polygon([ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ]); + * @example + * //polygon with a hole + * new Polygon( + * [ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ], + * [ new Point(25, 20), new Point(30, 30), new Point(20, 20), new Point(25, 20) ] + * ); + * @alias module:geometry~Polygon + */ +export declare class Polygon extends Geometry { + /* Excluded from this release type: rings */ + /** + * Creates a new {@link Polygon} instance. + * @param {...Array.}[ringPoints] A sequence of Array of [Point]{@link module:geometry~Point} items as arguments + * representing the rings of the polygon. + * @example + * new Polygon([ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ]); + * @example + * //polygon with a hole + * new Polygon( + * [ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ], + * [ new Point(25, 20), new Point(30, 30), new Point(20, 20), new Point(25, 20) ] + * ); + * @constructor + */ + constructor(...ringPoints: Point[][]); + /** + * Creates a {@link Polygon} instance from + * a Well-known Text (WKT) + * representation of a polygon. + * @param {Buffer} buffer + * @returns {Polygon} + */ + static fromBuffer(buffer: Buffer): Polygon; + /** + * Creates a {@link Polygon} instance from a Well-known Text (WKT) representation. + * @param {String} textValue + * @returns {Polygon} + */ + static fromString(textValue: string): Polygon; + /** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ + toBuffer(): Buffer; + /** + * Returns true if the values of the polygons are the same, otherwise it returns false. + * @param {Polygon} other + * @returns {Boolean} + */ + equals(other: Polygon): boolean; + /* Excluded from this release type: useBESerialization */ + /** + * Returns Well-known Text (WKT) representation of the geometry object. + * @returns {String} + */ + toString(): string; + /** + * Returns a JSON representation of this geo-spatial type. + */ + toJSON(): object; +} + +export declare type PreparedQueryInfo = { + queryId?: Buffer; + preparing?: boolean; + query: string; + keyspace: string; + meta?: DataCollection; +} & EventEmitter_2; + +/* Excluded from this release type: ProfileManager */ + +/** + * Represents a property. + * @memberOf module:datastax/graph + */ +export declare class Property { + key: string; + value: any; + /** + * @param key + * @param value + */ + constructor(key: string, value: any); +} + +/* Excluded from this release type: protocolEvents */ + +/** + * Contains information for the different protocol versions supported by the driver. + * @type {Object} + * @property {Number} v1 Cassandra protocol v1, supported in Apache Cassandra 1.2-->2.2. + * @property {Number} v2 Cassandra protocol v2, supported in Apache Cassandra 2.0-->2.2. + * @property {Number} v3 Cassandra protocol v3, supported in Apache Cassandra 2.1-->3.x. + * @property {Number} v4 Cassandra protocol v4, supported in Apache Cassandra 2.2-->3.x. + * @property {Number} v5 Cassandra protocol v5, in beta from Apache Cassandra 3.x+. Currently not supported by the + * driver. + * @property {Number} dseV1 DataStax Enterprise protocol v1, DSE 5.1+ + * @property {Number} dseV2 DataStax Enterprise protocol v2, DSE 6.0+ + * @property {Number} maxSupported Returns the higher protocol version that is supported by this driver. + * @property {Number} minSupported Returns the lower protocol version that is supported by this driver. + * @property {Function} isSupported A function that returns a boolean determining whether a given protocol version + * is supported. + * @alias module:types~protocolVersion + */ +export declare enum protocolVersion { + v1 = 1, + v2 = 2, + v3 = 3, + v4 = 4, + v5 = 5, + v6 = 6, + dseV1 = 65, + dseV2 = 66, + maxSupported = 66, + minSupported = 1 +} + +export declare namespace protocolVersion { + /* Excluded from this release type: isDse */ + /* Excluded from this release type: isSupportedCassandra */ + /** + * Determines whether the protocol version is supported by this driver. + * @param {Number} version + * @returns {Boolean} + */ + export function isSupported(version: number): boolean; + /* Excluded from this release type: supportsPrepareFlags */ + /* Excluded from this release type: supportsKeyspaceInRequest */ + /* Excluded from this release type: supportsResultMetadataId */ + /* Excluded from this release type: supportsPreparedPartitionKey */ + /* Excluded from this release type: supportsSchemaChangeFullMetadata */ + /* Excluded from this release type: supportsContinuousPaging */ + /* Excluded from this release type: supportsPaging */ + /* Excluded from this release type: supportsTimestamp */ + /* Excluded from this release type: supportsNamedParameters */ + /* Excluded from this release type: supportsUnset */ + /* Excluded from this release type: supportsFailureReasonMap */ + /* Excluded from this release type: uses2BytesStreamIds */ + /* Excluded from this release type: uses4BytesCollectionLength */ + /* Excluded from this release type: uses4BytesQueryFlags */ + /* Excluded from this release type: canStartupResponseErrorBeWrapped */ + /* Excluded from this release type: getLowerSupported */ + /* Excluded from this release type: getHighestCommon */ + /* Excluded from this release type: isBeta */ +} + +/** + * Contains functions that represents operators in a query. + * @alias module:mapping~q + * @type {Object} + * @property {function} in_ Represents the CQL operator "IN". + * @property {function} gt Represents the CQL operator greater than ">". + * @property {function} gte Represents the CQL operator greater than or equals to ">=" . + * @property {function} lt Represents the CQL operator less than "<" . + * @property {function} lte Represents the CQL operator less than or equals to "<=" . + * @property {function} notEq Represents the CQL operator not equals to "!=" . + * @property {function} and When applied to a property, it represents two CQL conditions on the same column separated + * by the logical AND operator, e.g: "col1 >= x col < y" + * @property {function} incr Represents the CQL increment assignment used for counters, e.g: "col = col + x" + * @property {function} decr Represents the CQL decrement assignment used for counters, e.g: "col = col - x" + * @property {function} append Represents the CQL append assignment used for collections, e.g: "col = col + x" + * @property {function} prepend Represents the CQL prepend assignment used for lists, e.g: "col = x + col" + * @property {function} remove Represents the CQL remove assignment used for collections, e.g: "col = col - x" + */ +export declare const q: { + in_: (arr: any) => QueryOperator; + gt: (value: any) => QueryOperator; + gte: (value: any) => QueryOperator; + lt: (value: any) => QueryOperator; + lte: (value: any) => QueryOperator; + notEq: (value: any) => QueryOperator; + and: (condition1: any, condition2: any) => QueryOperator; + incr: (value: any) => QueryAssignment; + decr: (value: any) => QueryAssignment; + append: (value: any) => QueryAssignment; + prepend: (value: any) => QueryAssignment; + remove: (value: any) => QueryAssignment; +}; + +/** + * Represents a CQL assignment operation, like col = col + x. + * @ignore + */ +declare class QueryAssignment { + /* Excluded from this release type: sign */ + /* Excluded from this release type: value */ + /* Excluded from this release type: inverted */ + /* Excluded from this release type: __constructor */ +} + +/** + * Represents a CQL query operator, like >=, IN, <, ... + * @ignore + */ +declare class QueryOperator { + /* Excluded from this release type: key */ + /* Excluded from this release type: value */ + /* Excluded from this release type: hasChildValues */ + /* Excluded from this release type: isInOperator */ + /* Excluded from this release type: __constructor */ +} + +/** + * Query options + * @typedef {Object} QueryOptions@typedef {Object} QueryOptions + * @property {Boolean} [autoPage] Determines if the driver must retrieve the following result pages automatically. + *

+ * This setting is only considered by the [Client#eachRow()]{@link Client#eachRow} method. For more information, + * check the + * [paging results documentation]{@link https://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. + *

+ * @property {Boolean} [captureStackTrace] Determines if the stack trace before the query execution should be + * maintained. + *

+ * Useful for debugging purposes, it should be set to false under production environment as it adds an + * unnecessary overhead to each execution. + *

+ * Default: false. + * @property {Number} [consistency] [Consistency level]{@link module:types~consistencies}. + *

+ * Defaults to localOne for Apache Cassandra and DSE deployments. + * For DataStax Astra, it defaults to localQuorum. + *

+ * @property {Object} [customPayload] Key-value payload to be passed to the server. On the Cassandra side, + * implementations of QueryHandler can use this data. + * @property {String} [executeAs] The user or role name to act as when executing this statement. + *

When set, it executes as a different user/role than the one currently authenticated (a.k.a. proxy execution).

+ *

This feature is only available in DSE 5.1+.

+ * @property {String|ExecutionProfile} [executionProfile] Name or instance of the [profile]{@link ExecutionProfile} to + * be used for this execution. If not set, it will the use "default" execution profile. + * @property {Number} [fetchSize] Amount of rows to retrieve per page. + * @property {Array|Array} [hints] Type hints for parameters given in the query, ordered as for the parameters. + *

For batch queries, an array of such arrays, ordered as with the queries in the batch.

+ * @property {Host} [host] The host that should handle the query. + *

+ * Use of this option is heavily discouraged and should only be used in the following cases: + *

+ *
    + *
  1. + * Querying node-local tables, such as tables in the system and system_views + * keyspaces. + *
  2. + *
  3. + * Applying a series of schema changes, where it may be advantageous to execute schema changes in sequence on the + * same node. + *
  4. + *
+ *

+ * Configuring a specific host causes the configured + * [LoadBalancingPolicy]{@link module:policies/loadBalancing~LoadBalancingPolicy} to be completely bypassed. + * However, if the load balancing policy dictates that the host is at a + * [distance of ignored]{@link module:types~distance} or there is no active connectivity to the host, the request will + * fail with a [NoHostAvailableError]{@link module:errors~NoHostAvailableError}. + *

+ * @property {Boolean} [isIdempotent] Defines whether the query can be applied multiple times without changing the result + * beyond the initial application. + *

+ * The query execution idempotence can be used at [RetryPolicy]{@link module:policies/retry~RetryPolicy} level to + * determine if an statement can be retried in case of request error or write timeout. + *

+ *

Default: false.

+ * @property {String} [keyspace] Specifies the keyspace for the query. It is used for the following: + *
    + *
  1. To indicate what keyspace the statement is applicable to (protocol V5+ only). This is useful when the + * query does not provide an explicit keyspace and you want to override the current {@link Client#keyspace}.
  2. + *
  3. For query routing when the query operates on a different keyspace than the current {@link Client#keyspace}.
  4. + *
+ * @property {Boolean} [logged] Determines if the batch should be written to the batchlog. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: true. + * @property {Boolean} [counter] Determines if its a counter batch. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: false. + * @property {Buffer|String} [pageState] Buffer or string token representing the paging state. + *

Useful for manual paging, if provided, the query will be executed starting from a given paging state.

+ * @property {Boolean} [prepare] Determines if the query must be executed as a prepared statement. + * @property {Number} [readTimeout] When defined, it overrides the default read timeout + * (socketOptions.readTimeout) in milliseconds for this execution per coordinator. + *

+ * Suitable for statements for which the coordinator may allow a longer server-side timeout, for example aggregation + * queries. + *

+ *

+ * A value of 0 disables client side read timeout for the execution. Default: undefined. + *

+ * @property {RetryPolicy} [retry] Retry policy for the query. + *

+ * This property can be used to specify a different [retry policy]{@link module:policies/retry} to the one specified + * in the {@link ClientOptions}.policies. + *

+ * @property {Array} [routingIndexes] Index of the parameters that are part of the partition key to determine + * the routing. + * @property {Buffer|Array} [routingKey] Partition key(s) to determine which coordinator should be used for the query. + * @property {Array} [routingNames] Array of the parameters names that are part of the partition key to determine the + * routing. Only valid for non-prepared requests, it's recommended that you use the prepare flag instead. + * @property {Number} [serialConsistency] Serial consistency is the consistency level for the serial phase of + * conditional updates. + * This option will be ignored for anything else that a conditional update/insert. + * @property {Number|Long} [timestamp] The default timestamp for the query in microseconds from the unix epoch + * (00:00:00, January 1st, 1970). + *

If provided, this will replace the server side assigned timestamp as default timestamp.

+ *

Use [generateTimestamp()]{@link module:types~generateTimestamp} utility method to generate a valid timestamp + * based on a Date and microseconds parts.

+ * @property {Boolean} [traceQuery] Enable query tracing for the execution. Use query tracing to diagnose performance + * problems related to query executions. Default: false. + *

To retrieve trace, you can call [Metadata.getTrace()]{@link module:metadata~Metadata#getTrace} method.

+ * @property {Object} [graphOptions] Default options for graph query executions. + *

+ * These options are meant to provide defaults for all graph query executions. Consider using + * [execution profiles]{@link ExecutionProfile} if you plan to reuse different set of options across different + * query executions. + *

+ * @property {String} [graphOptions.language] The graph language to use in graph queries. Default: + * 'gremlin-groovy'. + * @property {String} [graphOptions.name] The graph name to be used in all graph queries. + *

+ * This property is required but there is no default value for it. This value can be overridden at query level. + *

+ * @property {Number} [graphOptions.readConsistency] Overrides the + * [consistency level]{@link module:types~consistencies} + * defined in the query options for graph read queries. + * @property {Number} [graphOptions.readTimeout] Overrides the default per-host read timeout (in milliseconds) for all + * graph queries. Default: 0. + *

+ * Use null to reset the value and use the default on socketOptions.readTimeout . + *

+ * @property {String} [graphOptions.source] The graph traversal source name to use in graph queries. Default: + * 'g'. + * @property {Number} [graphOptions.writeConsistency] Overrides the [consistency + * level]{@link module:types~consistencies} defined in the query options for graph write queries. + Default options for graph query executions. + *

+ * These options are meant to provide defaults for all graph query executions. Consider using + * [execution profiles]{@link ExecutionProfile} if you plan to reuse different set of options across different + * query executions. + *

+ * @property {String} [graphOptions.language] The graph language to use in graph queries. Default: + * 'gremlin-groovy'. + * @property {String} [graphOptions.name] The graph name to be used in all graph queries. + *

+ * This property is required but there is no default value for it. This value can be overridden at query level. + *

+ * @property {Number} [graphOptions.readConsistency] Overrides the + * [consistency level]{@link module:types~consistencies} + * defined in the query options for graph read queries. + * @property {Number} [graphOptions.readTimeout] Overrides the default per-host read timeout (in milliseconds) for all + * graph queries. Default: 0. + *

+ * Use null to reset the value and use the default on socketOptions.readTimeout . + *

+ * @property {String} [graphOptions.source] The graph traversal source name to use in graph queries. Default: + * 'g'. + * @property {Number} [graphOptions.writeConsistency] Overrides the [consistency + * level]{@link module:types~consistencies} defined in the query options for graph write queries. + */ +export declare interface QueryOptions { + autoPage?: boolean; + captureStackTrace?: boolean; + consistency?: consistencies; + customPayload?: object; + executeAs?: string; + executionProfile?: string | ExecutionProfile; + fetchSize?: number; + hints?: Array | Array>; + host?: Host; + isIdempotent?: boolean; + keyspace?: string; + logged?: boolean; + counter?: boolean; + pageState?: Buffer | string; + prepare?: boolean; + readTimeout?: number; + retry?: RetryPolicy; + routingIndexes?: number[]; + routingKey?: Buffer | Buffer[]; + routingNames?: string[]; + serialConsistency?: number; + timestamp?: number | Long__default; + traceQuery?: boolean; + graphOptions?: { + language?: string; + name?: string; + readConsistency?: number; + readTimeout?: number; + source?: string; + writeConsistency?: number; + }; +} + +export declare interface QueryTrace { + requestType: string; + coordinator: InetAddress; + parameters: { + [key: string]: any; + }; + startedAt: number | Long__default; + duration: number; + clientAddress: string; + events: Array<{ + id: Uuid; + activity: any; + source: any; + elapsed: any; + thread: any; + }>; +} + +/* Excluded from this release type: RandomToken */ + +/* Excluded from this release type: RandomTokenizer */ + +export declare const reconnection: { + ReconnectionPolicy: typeof ReconnectionPolicy; + ConstantReconnectionPolicy: typeof ConstantReconnectionPolicy; + ExponentialReconnectionPolicy: typeof ExponentialReconnectionPolicy; +}; + +/** @module policies/reconnection */ +/** + * Base class for Reconnection Policies + */ +export declare class ReconnectionPolicy { + constructor(); + /** + * A new reconnection schedule. + * @returns {Iterator} An infinite iterator + */ + newSchedule(): Iterator; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +export declare type RemoveDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { + [key: string]: any; + }; + deleteOnlyColumns?: boolean; +}; + +/** + * Abstract class Request + */ +declare class Request_2 { + length: number; + constructor(); + /** + * @abstract + * @param {Encoder} encoder + * @param {Number} streamId + * @throws {TypeError} + * @returns {Buffer} + */ + write(encoder: Encoder, streamId: number): Buffer; + /** + * Creates a new instance using the same constructor as the current instance, copying the properties. + * @return {Request} + */ + clone(): Request_2; +} + +/** + * A request tracker that logs the requests executed through the session, according to a set of + * configurable options. + * @implements {module:tracker~RequestTracker} + * @alias module:tracker~RequestLogger + * @example Logging slow queries + * const requestLogger = new RequestLogger({ slowThreshold: 1000 }); + * requestLogger.emitter.on('show', message => console.log(message)); + * // Add the requestLogger to the client options + * const client = new Client({ contactPoints, requestTracker: requestLogger }); + */ +export declare class RequestLogger extends RequestTracker { + private _options; + /** + * Determines whether it should emit 'normal' events for every EXECUTE, QUERY and BATCH request executed + * successfully, useful only for debugging + * @type {Boolean} + */ + private logNormalRequests; + /** + * Determines whether it should emit 'failure' events for every EXECUTE, QUERY and BATCH request execution that + * resulted in an error + * @type {Boolean} + */ + private logErroredRequests; + /** + * The object instance that emits 'slow', 'large', 'normal' and + * 'failure' events. + * @type {EventEmitter} + */ + private emitter; + /** + * Creates a new instance of {@link RequestLogger}. + * @param {Object} options + * @param {Number} [options.slowThreshold] The threshold in milliseconds beyond which queries are considered 'slow' + * and logged as such by the driver. + * @param {Number} [options.requestSizeThreshold] The threshold in bytes beyond which requests are considered 'large' + * and logged as such by the driver. + * @param {Boolean} [options.logNormalRequests] Determines whether it should emit 'normal' events for every + * EXECUTE, QUERY and BATCH request executed successfully, useful only for debugging. This option can be modified + * after the client is connected using the property {@link RequestLogger#logNormalRequests}. + * @param {Boolean} [options.logErroredRequests] Determines whether it should emit 'failure' events for every + * EXECUTE, QUERY and BATCH request execution that resulted in an error. This option can be modified + * after the client is connected using the property {@link RequestLogger#logErroredRequests}. + * @param {Number} [options.messageMaxQueryLength] The maximum amount of characters that are logged from the query + * portion of the message. Defaults to 500. + * @param {Number} [options.messageMaxParameterValueLength] The maximum amount of characters of each query parameter + * value that will be included in the message. Defaults to 50. + * @param {Number} [options.messageMaxErrorStackTraceLength] The maximum amount of characters of the stack trace + * that will be included in the message. Defaults to 200. + */ + constructor(options: { + slowThreshold?: number; + requestSizeThreshold?: number; + logNormalRequests?: boolean; + logErroredRequests?: boolean; + messageMaxQueryLength?: number; + messageMaxParameterValueLength?: number; + messageMaxErrorStackTraceLength?: number; + }); + /** + * Logs message if request execution was deemed too slow, large or if normal requests are logged. + * @override + */ + onSuccess(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [p: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, responseLength: number, latency: number[]): void; + /** + * Logs message if request execution was too large and/or encountered an error. + * @override + */ + onError(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [p: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, err: Error, latency: number[]): void; + private _logSlow; + private _logLargeRequest; + private _logNormalRequest; + private _logLargeErrorRequest; + private _logErrorRequest; +} + +/** + * Tracks request execution for a {@link Client}. + *

+ * A {@link RequestTracker} can be configured in the client options. The Client will execute + * {@link RequestTracker#onSuccess} or {@link RequestTracker#onError} for every query or batch + * executed (QUERY, EXECUTE and BATCH requests). + *

+ * @interface + * @alias module:tracker~RequestTracker + */ +export declare class RequestTracker { + /** + * Invoked each time a query or batch request succeeds. + * @param {Host} host The node that acted as coordinator of the request. + * @param {String|Array} query In the case of prepared or unprepared query executions, the provided + * query string. For batch requests, an Array containing the queries and parameters provided. + * @param {Array|Object|null} parameters In the case of prepared or unprepared query executions, the provided + * parameters. + * @param {ExecutionOptions} executionOptions The information related to the execution of the request. + * @param {Number} requestLength Length of the body of the request. + * @param {Number} responseLength Length of the body of the response. + * @param {Array} latency An array containing [seconds, nanoseconds] tuple, where nanoseconds is the + * remaining part of the real time that can't be represented in second precision (see process.hrtime()). + */ + onSuccess?(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [key: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, responseLength: number, latency: number[]): void; + /** + * Invoked each time a query or batch request fails. + * @param {Host} host The node that acted as coordinator of the request. + * @param {String|Array} query In the case of prepared or unprepared query executions, the provided + * query string. For batch requests, an Array containing the queries and parameters provided. + * @param {Array|Object|null} parameters In the case of prepared or unprepared query executions, the provided + * parameters. + * @param {ExecutionOptions} executionOptions The information related to the execution of the request. + * @param {Number} requestLength Length of the body of the request. When the failure occurred before the request was + * written to the wire, the length will be 0. + * @param {Error} err The error that caused that caused the request to fail. + * @param {Array} latency An array containing [seconds, nanoseconds] tuple, where nanoseconds is the + * remaining part of the real time that can't be represented in second precision (see process.hrtime()). + */ + onError?(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [key: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, err: Error, latency: number[]): void; + /** + * Invoked when the Client is being shutdown. + */ + shutdown?(): void; +} + +/** + * Represents an error message from the server + */ +export declare class ResponseError extends DriverError { + code: number; + consistencies?: consistencies; + required?: number; + alive?: number; + received?: number; + blockFor?: number; + failures?: number; + reasons?: object; + isDataPresent?: any; + writeType?: any; + queryId?: any; + keyspace?: string; + functionName?: string; + argTypes?: string[]; + table?: string; + /** + * Represents an error message from the server + * @param {Number} code Cassandra exception code + * @param {String} message + * @constructor + */ + constructor(code: number, message: string); +} + +/** + * Server error codes returned by Cassandra + * @type {Object} + * @property {Number} serverError Something unexpected happened. + * @property {Number} protocolError Some client message triggered a protocol violation. + * @property {Number} badCredentials Authentication was required and failed. + * @property {Number} unavailableException Raised when coordinator knows there is not enough replicas alive to perform a query with the requested consistency level. + * @property {Number} overloaded The request cannot be processed because the coordinator is overloaded. + * @property {Number} isBootstrapping The request was a read request but the coordinator node is bootstrapping. + * @property {Number} truncateError Error encountered during a truncate request. + * @property {Number} writeTimeout Timeout encountered on write query on coordinator waiting for response(s) from replicas. + * @property {Number} readTimeout Timeout encountered on read query on coordinator waitign for response(s) from replicas. + * @property {Number} readFailure A non-timeout error encountered during a read request. + * @property {Number} functionFailure A (user defined) function encountered during execution. + * @property {Number} writeFailure A non-timeout error encountered during a write request. + * @property {Number} syntaxError The submitted query has a syntax error. + * @property {Number} unauthorized The logged user doesn't have the right to perform the query. + * @property {Number} invalid The query is syntactically correct but invalid. + * @property {Number} configError The query is invalid because of some configuration issue. + * @property {Number} alreadyExists The query attempted to create a schema element (i.e. keyspace, table) that already exists. + * @property {Number} unprepared Can be thrown while a prepared statement tries to be executed if the provided statement is not known by the coordinator. + */ +export declare enum responseErrorCodes { + serverError = 0, + protocolError = 10, + badCredentials = 256, + unavailableException = 4096, + overloaded = 4097, + isBootstrapping = 4098, + truncateError = 4099, + writeTimeout = 4352, + readTimeout = 4608, + readFailure = 4864, + functionFailure = 5120, + writeFailure = 5376, + syntaxError = 8192, + unauthorized = 8448, + invalid = 8704, + configError = 8960, + alreadyExists = 9216, + unprepared = 9472, + clientWriteFailure = 32768 +} + +/** + * Represents the result of an execution as an iterable of objects in the Mapper. + * @alias module:mapping~Result + */ +export declare class Result implements IterableIterator { + private _rs; + private _info; + private _rowAdapter; + private _isEmptyLwt; + private _iteratorIndex; + /* Excluded from this release type: length */ + /* Excluded from this release type: pageState */ + /* Excluded from this release type: __constructor */ + /** + * When this instance is the result of a conditional update query, it returns whether it was successful. + * Otherwise, it returns true. + *

+ * For consistency, this method always returns true for non-conditional queries (although there is + * no reason to call the method in that case). This is also the case for conditional DDL statements + * (CREATE KEYSPACE... IF NOT EXISTS, CREATE TABLE... IF NOT EXISTS), for which the server doesn't return + * information whether it was applied or not. + *

+ */ + wasApplied(): boolean; + /** + * Gets the first document in this result or null when the result is empty. + */ + first(): T | null; + /** + * Returns a new Iterator object that contains the document values. + */ + [Symbol.iterator](): IterableIterator; + /** + * Converts the current instance to an Array of documents. + * @return {Array} + */ + toArray(): T[]; + /** + * Executes a provided function once per result element. + * @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index. + * @param {Object} [thisArg] Value to use as this when executing callback. + */ + forEach(callback: (currentValue: T, index: number) => void, thisArg: any): void; + [inspectMethod](): T[]; + next(): { + done: boolean; + value: T; + }; +} + +/* Excluded from this release type: resultKind */ + +/** @module types */ +/** + * @class + * @classdesc Represents the result of a query. + */ +export declare class ResultSet implements Iterable, AsyncIterable { + info: { + queriedHost: string; + triedHosts: { + [key: string]: any; + }; + speculativeExecutions: number; + achievedConsistency: consistencies; + traceId: Uuid; + warnings: string[]; + customPayload: any; + isSchemaInAgreement: boolean; + }; + columns: Array<{ + name: string; + type: DataTypeInfo; + }>; + nextPage: (() => void) | null; + pageState: string; + rowLength: number; + rows: Row[]; + /* Excluded from this release type: nextPageAsync */ + /* Excluded from this release type: rawPageState */ + /* Excluded from this release type: __constructor */ + /** + * Returns the first row or null if the result rows are empty. + */ + first(): Row; + getPageState(): string; + getColumns(): Array<{ + name: string; + type: DataTypeInfo; + }>; + /** + * When this instance is the result of a conditional update query, it returns whether it was successful. + * Otherwise, it returns true. + *

+ * For consistency, this method always returns true for non-conditional queries (although there is + * no reason to call the method in that case). This is also the case for conditional DDL statements + * (CREATE KEYSPACE... IF NOT EXISTS, CREATE TABLE... IF NOT EXISTS), for which the server doesn't return + * information whether it was applied or not. + *

+ */ + wasApplied(): boolean; + /** + * Gets the iterator function. + *

+ * Retrieves the iterator of the underlying fetched rows, without causing the driver to fetch the following + * result pages. For more information on result paging, + * [visit the documentation]{@link http://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. + *

+ * @alias module:types~ResultSet#@@iterator + * @see {@link module:types~ResultSet#@@asyncIterator} + * @example Using for...of statement + * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * for (const row of result) { + * console.log(row['email']); + * } + * @returns {Iterator.} + */ + [Symbol.iterator](): Iterator; + /** + * Gets the async iterator function. + *

+ * Retrieves the async iterator representing the entire query result, the driver will fetch the following result + * pages. + *

+ *

Use the async iterator when the query result might contain more rows than the fetchSize.

+ *

+ * Note that using the async iterator will not affect the internal state of the ResultSet instance. + * You should avoid using both rows property that contains the row instances of the first page of + * results, and the async iterator, that will yield all the rows in the result regardless on the number of pages. + *

+ *

Multiple concurrent async iterations are not supported.

+ * @alias module:types~ResultSet#@@asyncIterator + * @example Using for await...of statement + * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * for await (const row of result) { + * console.log(row['email']); + * } + * @returns {AsyncIterator} + */ + [Symbol.asyncIterator](): AsyncIterator; + /** + * Determines whether there are more pages of results. + * If so, the driver will initially retrieve and contain only the first page of results. + * To obtain all the rows, use the [AsyncIterator]{@linkcode module:types~ResultSet#@@asyncIterator}. + * @returns {boolean} + */ + isPaged(): boolean; +} + +/** + * Represents results from different related executions. + */ +export declare class ResultSetGroup { + private _collectResults; + private _maxErrors; + totalExecuted: number; + errors: Error[]; + resultItems: any[]; + /* Excluded from this release type: __constructor */ + /* Excluded from this release type: setResultItem */ + /* Excluded from this release type: setError */ +} + +/** @module types */ +/** + * Readable stream using to yield data from a result or a field + */ +export declare class ResultStream extends Readable { + buffer: any[]; + paused: boolean; + private _cancelAllowed; + private _handlersObject; + private _highWaterMarkRows; + private _readableState; + private _readNext; + /* Excluded from this release type: __constructor */ + /* Excluded from this release type: _read */ + /* Excluded from this release type: _valve */ + add(chunk: any): number; + private _checkAboveHighWaterMark; + private _checkBelowHighWaterMark; + /* Excluded from this release type: cancel */ + /* Excluded from this release type: setHandlers */ +} + +export declare const retry: { + IdempotenceAwareRetryPolicy: typeof IdempotenceAwareRetryPolicy; + FallthroughRetryPolicy: typeof FallthroughRetryPolicy; + RetryPolicy: typeof RetryPolicy; +}; + +/** + * Base and default RetryPolicy. + * Determines what to do when the driver encounters specific Cassandra exceptions. + */ +export declare class RetryPolicy { + /** + * Determines what to do when the driver gets an UnavailableException response from a Cassandra node. + * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} required The number of replicas whose response is required to achieve the + * required [consistency]{@link module:types~consistencies}. + * @param {Number} alive The number of replicas that were known to be alive when the request had been processed + * (since an unavailable exception has been triggered, there will be alive < required) + * @returns {DecisionInfo} + */ + onUnavailable(info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo; + /** + * Determines what to do when the driver gets a ReadTimeoutException response from a Cassandra node. + * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} received The number of nodes having answered the request. + * @param {Number} blockFor The number of replicas whose response is required to achieve the + * required [consistency]{@link module:types~consistencies}. + * @param {Boolean} isDataPresent When false, it means the replica that was asked for data has not responded. + * @returns {DecisionInfo} + */ + onReadTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, isDataPresent: boolean): DecisionInfo; + /** + * Determines what to do when the driver gets a WriteTimeoutException response from a Cassandra node. + * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} received The number of nodes having acknowledged the request. + * @param {Number} blockFor The number of replicas whose acknowledgement is required to achieve the required + * [consistency]{@link module:types~consistencies}. + * @param {String} writeType A string that describes the type of the write that timed out ("SIMPLE" + * / "BATCH" / "BATCH_LOG" / "UNLOGGED_BATCH" / "COUNTER"). + * @returns {DecisionInfo} + */ + onWriteTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, writeType: string): DecisionInfo; + /** + * Defines whether to retry and at which consistency level on an unexpected error. + *

+ * This method might be invoked in the following situations: + *

+ *
    + *
  1. On a client timeout, while waiting for the server response + * (see [socketOptions.readTimeout]{@link ClientOptions}), being the error an instance of + * [OperationTimedOutError]{@link module:errors~OperationTimedOutError}.
  2. + *
  3. On a connection error (socket closed, etc.).
  4. + *
  5. When the contacted host replies with an error, such as overloaded, isBootstrapping, + * serverError, etc. In this case, the error is instance of [ResponseError]{@link module:errors~ResponseError}. + *
  6. + *
+ *

+ * Note that when this method is invoked, the driver cannot guarantee that the mutation has been effectively + * applied server-side; a retry should only be attempted if the request is known to be idempotent. + *

+ * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Error} err The error that caused this request to fail. + * @returns {DecisionInfo} + */ + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo; + /** + * Returns a {@link DecisionInfo} to retry the request with the given [consistency]{@link module:types~consistencies}. + * @param {consistencies} [consistency] When specified, it retries the request with the given consistency. + * @param {Boolean} [useCurrentHost] When specified, determines if the retry should be made using the same coordinator. + * Default: true. + * @returns {DecisionInfo} + */ + retryResult(consistency?: consistencies, useCurrentHost?: boolean): DecisionInfo; + /** + * Returns a {@link DecisionInfo} to callback in error when a err is obtained for a given request. + * @returns {DecisionInfo} + */ + rethrowResult(): DecisionInfo; +} + +/** + * namespace RetryDecision { + enum retryDecision { + ignore, + rethrow, + retry + } + } + */ +/** + * Determines the retry decision for the retry policies. + * @type {Object} + * @property {Number} rethrow + * @property {Number} retry + * @property {Number} ignore + * @static + */ +export declare namespace RetryPolicy { + export enum retryDecision { + rethrow = 0, + retry = 1, + ignore = 2 + } +} + +/** + * This policy yield nodes in a round-robin fashion. + */ +export declare class RoundRobinPolicy extends LoadBalancingPolicy { + private index; + constructor(); + /** + * Returns an iterator with the hosts to be used as coordinator for a query. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; +} + +/** @module types */ +/** + * Represents a result row + * @param {Array} columns + * @constructor + */ +export declare class Row { + private readonly __columns; + [key: string]: any; + /* Excluded from this release type: __constructor */ + /** + * Returns the cell value. + * @param {String|Number} columnName Name or index of the column + */ + get(columnName: string | number): any; + /** + * Returns an array of the values of the row + * @returns {Array} + */ + values(): Array; + /** + * Returns an array of the column names of the row + * @returns {Array} + */ + keys(): string[]; + /** + * Executes the callback for each field in the row, containing the value as first parameter followed by the columnName + * @param {Function} callback + */ + forEach(callback: (val: any, key: string) => void): void; +} + +/** + * @classdesc Describes a CQL function. + * @alias module:metadata~SchemaFunction + */ +export declare class SchemaFunction { + /** + * Name of the cql function. + * @type {String} + */ + name: string; + /** + * Name of the keyspace where the cql function is declared. + */ + keyspaceName: string; + /** + * Signature of the function. + * @type {Array.} + */ + signature: Array; + /** + * List of the function argument names. + * @type {Array.} + */ + argumentNames: Array; + /** + * List of the function argument types. + * @type {Array.<{code, info}>} + */ + argumentTypes: Array; + /** + * Body of the function. + * @type {String} + */ + body: string; + /** + * Determines if the function is called when the input is null. + * @type {Boolean} + */ + calledOnNullInput: boolean; + /** + * Name of the programming language, for example: java, javascript, ... + * @type {String} + */ + language: string; + /** + * Type of the return value. + * @type {DataTypeInfo} + */ + returnType: DataTypeInfo; + /** + * Indicates whether or not this function is deterministic. This means that + * given a particular input, the function will always produce the same output. + * @type {Boolean} + */ + deterministic: boolean; + /** + * Indicates whether or not this function is monotonic on all of its + * arguments. This means that it is either entirely non-increasing or + * non-decreasing. Even if the function is not monotonic on all of its + * arguments, it's possible to specify that it is monotonic on one of + * its arguments, meaning that partial applications of the function over + * that argument will be monotonic. + * + * Monotonicity is required to use the function in a GROUP BY clause. + * @type {Boolean} + */ + monotonic: boolean; + /** + * The argument names that the function is monotonic on. + * + * If {@link monotonic} is true, this will return all argument names. + * Otherwise, this will return either one argument or an empty array. + * @type {Array.} + */ + monotonicOn: Array; + /* Excluded from this release type: __constructor */ +} + +/** + * Search module. + *

+ * Contains the classes to represent the set of types for search data that come with DSE 5.1+ + *

+ * @module datastax/search + */ +export declare const search: { + DateRange: typeof DateRange; + DateRangeBound: typeof DateRangeBound; + dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; + }; +}; + +declare type SingleColumnInfo = { + code: SingleTypeCodes; + info?: null; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +declare type SingleTypeCodes = (typeof singleTypeNames[keyof typeof singleTypeNames] | dataTypes.duration | dataTypes.text); + +declare const singleTypeNames: Readonly<{ + readonly 'org.apache.cassandra.db.marshal.UTF8Type': dataTypes.varchar; + readonly 'org.apache.cassandra.db.marshal.AsciiType': dataTypes.ascii; + readonly 'org.apache.cassandra.db.marshal.UUIDType': dataTypes.uuid; + readonly 'org.apache.cassandra.db.marshal.TimeUUIDType': dataTypes.timeuuid; + readonly 'org.apache.cassandra.db.marshal.Int32Type': dataTypes.int; + readonly 'org.apache.cassandra.db.marshal.BytesType': dataTypes.blob; + readonly 'org.apache.cassandra.db.marshal.FloatType': dataTypes.float; + readonly 'org.apache.cassandra.db.marshal.DoubleType': dataTypes.double; + readonly 'org.apache.cassandra.db.marshal.BooleanType': dataTypes.boolean; + readonly 'org.apache.cassandra.db.marshal.InetAddressType': dataTypes.inet; + readonly 'org.apache.cassandra.db.marshal.SimpleDateType': dataTypes.date; + readonly 'org.apache.cassandra.db.marshal.TimeType': dataTypes.time; + readonly 'org.apache.cassandra.db.marshal.ShortType': dataTypes.smallint; + readonly 'org.apache.cassandra.db.marshal.ByteType': dataTypes.tinyint; + readonly 'org.apache.cassandra.db.marshal.DateType': dataTypes.timestamp; + readonly 'org.apache.cassandra.db.marshal.TimestampType': dataTypes.timestamp; + readonly 'org.apache.cassandra.db.marshal.LongType': dataTypes.bigint; + readonly 'org.apache.cassandra.db.marshal.DecimalType': dataTypes.decimal; + readonly 'org.apache.cassandra.db.marshal.IntegerType': dataTypes.varint; + readonly 'org.apache.cassandra.db.marshal.CounterColumnType': dataTypes.counter; +}>; + +export declare const speculativeExecution: { + NoSpeculativeExecutionPolicy: typeof NoSpeculativeExecutionPolicy; + SpeculativeExecutionPolicy: typeof SpeculativeExecutionPolicy; + ConstantSpeculativeExecutionPolicy: typeof ConstantSpeculativeExecutionPolicy; +}; + +/** @module policies/speculativeExecution */ +/** + * @classdesc + * The policy that decides if the driver will send speculative queries to the next hosts when the current host takes too + * long to respond. + *

Note that only idempotent statements will be speculatively retried.

+ * @abstract + */ +export declare class SpeculativeExecutionPolicy { + constructor(); + /** + * Initialization method that gets invoked on Client startup. + * @param {Client} client + * @abstract + */ + init(client: Client): void; + /** + * Gets invoked at client shutdown, giving the opportunity to the implementor to perform cleanup. + * @abstract + */ + shutdown(): void; + /** + * Gets the plan to use for a new query. + * Returns an object with a nextExecution() method, which returns a positive number representing the + * amount of milliseconds to delay the next execution or a non-negative number to avoid further executions. + * @param {String} keyspace The currently logged keyspace. + * @param {String|Array} queryInfo The query, or queries in the case of batches, for which to build a plan. + * @return {{nextExecution: function}} + * @abstract + */ + newPlan(keyspace: string, queryInfo: string | Array): { + nextExecution: () => number; + }; + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map; +} + +/** + * Represents a queue of ids from 0 to maximum stream id supported by the protocol version. + * Clients can dequeue a stream id using {@link StreamIdStack#shift()} and enqueue (release) using + * {@link StreamIdStack#push()} + */ +declare class StreamIdStack { + currentGroup: any[]; + groupIndex: number; + groups: any[]; + releaseTimeout: NodeJS.Timeout; + inUse: number; + releaseDelay: number; + maxGroups: number; + /** + * Creates a new instance of StreamIdStack. + * @param {number} version Protocol version + * @constructor + */ + constructor(version: number); + /** + * Sets the protocol version + * @param {Number} version + */ + setVersion(version: number): void; + /** + * Dequeues an id. + * Similar to {@link Array#pop()}. + * @returns {Number} Returns an id or null + */ + pop(): number; + /** + * Enqueue an id for future use. + * Similar to {@link Array#push()}. + * @param {Number} id + */ + push(id: number): void; + /** + * Clears all timers + */ + clear(): void; + /** + * Tries to create an additional group and returns a new id + * @returns {Number} Returns a new id or null if it's not possible to create a new group + * @private + */ + _tryCreateGroup(): number; + _tryIssueRelease(): void; + _releaseGroups(): void; +} + +/** + * Represents a collection of tokens for more concise Traversal definitions. + */ +export declare const t: { + id: EnumValue; + key: EnumValue; + label: EnumValue; + value: EnumValue; +}; + +/** + * Contains a set of methods to represent a row into a document and a document into a row. + * @alias module:mapping~TableMappings + * @interface + */ +export declare class TableMappings { + /** + * Method that is called by the mapper to create the instance of the document. + * @return {Object} + */ + newObjectInstance(): object; + /** + * Gets the name of the column based on the document property name. + * @param {String} propName The name of the property. + * @returns {String} + */ + getColumnName(propName: string): string; + /** + * Gets the name of the document property based on the column name. + * @param {String} columnName The name of the column. + * @returns {String} + */ + getPropertyName(columnName: string): string; +} + +/** + * @classdesc Describes a table + * @augments {module:metadata~DataCollection} + * @alias module:metadata~TableMetadata + */ +export declare class TableMetadata extends DataCollection { + /** + * Applies only to counter tables. + * When set to true, replicates writes to all affected replicas regardless of the consistency level specified by + * the client for a write request. For counter tables, this should always be set to true. + * @type {Boolean} + */ + replicateOnWrite: boolean; + /** + * Returns the memtable flush period (in milliseconds) option for this table. + * @type {Number} + */ + memtableFlushPeriod: number; + /** + * Returns the index interval option for this table. + *

+ * Note: this option is only available in Apache Cassandra 2.0. It is deprecated in Apache Cassandra 2.1 and + * above, and will therefore return null for 2.1 nodes. + *

+ * @type {Number|null} + */ + indexInterval?: number; + /** + * Determines whether the table uses the COMPACT STORAGE option. + * @type {Boolean} + */ + isCompact: boolean; + /** + * + * @type {Array.} + */ + indexes: Array; + /** + * Determines whether the Change Data Capture (CDC) flag is set for the table. + * @type {Boolean|null} + */ + cdc?: boolean; + /** + * Determines whether the table is a virtual table or not. + * @type {Boolean} + */ + virtual: boolean; + /* Excluded from this release type: __constructor */ +} + +/** @private */ +export declare class TimeoutError extends errors.DriverError { + /** + * @param {string} message + */ + constructor(message: string); +} + +export declare const timestampGeneration: { + TimestampGenerator: typeof TimestampGenerator; + MonotonicTimestampGenerator: typeof MonotonicTimestampGenerator; +}; + +/** + * Creates a new instance of {@link TimestampGenerator}. + * @classdesc + * Generates client-side, microsecond-precision query timestamps. + *

+ * Given that Cassandra uses those timestamps to resolve conflicts, implementations should generate + * monotonically increasing timestamps for successive invocations of {@link TimestampGenerator.next()}. + *

+ * @constructor + */ +export declare class TimestampGenerator { + constructor(); + /** + * Returns the next timestamp. + *

+ * Implementors should enforce increasing monotonicity of timestamps, that is, + * a timestamp returned should always be strictly greater that any previously returned + * timestamp. + *

+ *

+ * Implementors should strive to achieve microsecond precision in the best possible way, + * which is usually largely dependent on the underlying operating system's capabilities. + *

+ * @param {Client} client The {@link Client} instance to generate timestamps to. + * @returns {Long|Number|null} the next timestamp (in microseconds). If it's equals to null, it won't be + * sent by the driver, letting the server to generate the timestamp. + * @abstract + */ + next(client: Client): Long__default | number | null; +} + +/** + * If any of the arguments is not provided, it will be randomly generated, except for the date that will use the current + * date. + *

+ * Note that when nodeId and/or clockId portions are not provided, the constructor will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of the static methods fromDate() or + * now() in that case. + *

+ * @class + * @classdesc Represents an immutable version 1 universally unique identifier (UUID). A UUID represents a 128-bit value. + *

Usage: TimeUuid.now()

+ * @extends module:types~Uuid + */ +export declare class TimeUuid extends Uuid { + /** + * Creates a new instance of Uuid based on the parameters provided according to rfc4122. + * If any of the arguments is not provided, it will be randomly generated, except for the date that will use the current + * date. + *

+ * Note that when nodeId and/or clockId portions are not provided, the constructor will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of the static methods fromDate() or + * now() in that case. + *

+ * This class represents an immutable version 1 universally unique identifier (UUID). A UUID represents a 128-bit value. + *

Usage: TimeUuid.now()

+ * @param {Date} [value] The datetime for the instance, if not provided, it will use the current Date. + * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, + * as Ecmascript Dates have only milliseconds precision. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * @constructor + */ + constructor(value: Date | Buffer, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer); + /** + * Generates a TimeUuid instance based on the Date provided using random node and clock values. + * @param {Date} date Date to generate the v1 uuid. + * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, + * as Ecmascript Dates have only milliseconds precision. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * If not provided, a random nodeId will be generated. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * If not provided a random clockId will be generated. + * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created + * TimeUuid as second parameter. When a callback is provided, the random portions of the + * TimeUuid instance are created asynchronously. + *

+ * When nodeId and/or clockId portions are not provided, this method will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of this method in that case. + *

+ * @example Generate a TimeUuid from a ECMAScript Date + * const timeuuid = TimeUuid.fromDate(new Date()); + * @example Generate a TimeUuid from a Date with ticks portion + * const timeuuid = TimeUuid.fromDate(new Date(), 1203); + * @example Generate a TimeUuid from a Date without any random portion + * const timeuuid = TimeUuid.fromDate(new Date(), 1203, 'host01', '02'); + * @example Generate a TimeUuid from a Date with random node and clock identifiers + * TimeUuid.fromDate(new Date(), 1203, function (err, timeuuid) { + * // do something with the generated timeuuid + * }); + */ + static fromDate(date: Date, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer): TimeUuid; + static fromDate(date: Date, ticks: number, nodeId: string | Buffer, clockId: string | Buffer, callback: ValueCallback): void; + /** + * Parses a string representation of a TimeUuid + * @param {String} value + * @returns {TimeUuid} + */ + static fromString(value: string): TimeUuid; + /** + * Returns the smaller possible type 1 uuid with the provided Date. + */ + static min(date: Date, ticks?: number): TimeUuid; + /** + * Returns the biggest possible type 1 uuid with the provided Date. + */ + static max(date: Date, ticks?: number): TimeUuid; + /** + * Generates a TimeUuid instance based on the current date using random node and clock values. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * If not provided, a random nodeId will be generated. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * If not provided a random clockId will be generated. + * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created + * TimeUuid as second parameter. When a callback is provided, the random portions of the + * TimeUuid instance are created asynchronously. + *

+ * When nodeId and/or clockId portions are not provided, this method will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of this method in that case. + *

+ * @example Generate a TimeUuid from a Date without any random portion + * const timeuuid = TimeUuid.now('host01', '02'); + * @example Generate a TimeUuid with random node and clock identifiers + * TimeUuid.now(function (err, timeuuid) { + * // do something with the generated timeuuid + * }); + * @example Generate a TimeUuid based on the current date (might block) + * const timeuuid = TimeUuid.now(); + */ + static now(): TimeUuid; + static now(nodeId: string | Buffer, clockId?: string | Buffer): TimeUuid; + static now(nodeId: string | Buffer, clockId: string | Buffer, callback: ValueCallback): void; + static now(callback: ValueCallback): void; + /** + * Gets the Date and 100-nanoseconds units representation of this instance. + * @returns {{date: Date, ticks: Number}} + */ + getDatePrecision(): { + date: Date; + ticks: number; + }; + /** + * Gets the Date representation of this instance. + * @returns {Date} + */ + getDate(): Date; + /** + * Returns the node id this instance + * @returns {Buffer} + */ + getNodeId(): Buffer; + /** + * Returns the clock id this instance, with the variant applied (first 2 msb being 1 and 0). + * @returns {Buffer} + */ + getClockId(): Buffer; + /** + * Returns the node id this instance as an ascii string + * @returns {String} + */ + getNodeIdString(): string; +} + +/** + *

Backward compatibility only, use [TimeUuid]{@link module:types~TimeUuid} instead.

+ * Generates and returns a RFC4122 v1 (timestamp based) UUID in a string representation. + * @param {{msecs, node, clockseq, nsecs}} [options] + * @param {Buffer} [buffer] + * @param {Number} [offset] + * @deprecated Use [TimeUuid]{@link module:types~TimeUuid} instead + */ +export declare function timeuuid(options: { + msecs: any; + node: any; + clockseq: any; + nsecs: any; +}, buffer: Buffer, offset: number): string | Buffer; + +/** + * Represents a token on the Cassandra ring. + */ +declare class Token { + protected _value: any; + /* Excluded from this release type: __constructor */ + /** + * @returns {DataTypeInfo} The type info for the + * type of the value of the token. + */ + getType(): DataTypeInfo; + /** + * @returns {*} The raw value of the token. + */ + getValue(): any; + /* Excluded from this release type: toString */ + /** + * Returns 0 if the values are equal, 1 if greater than other, -1 + * otherwise. + * + * @param {Token} other + * @returns {Number} + */ + compare(other: Token): number; + equals(other: Token): boolean; + /* Excluded from this release type: inspect */ +} + +export declare const token: { + Token: typeof Token; + TokenRange: typeof TokenRange; +}; + +/** + * A wrapper load balancing policy that adds token awareness to a child policy. + */ +export declare class TokenAwarePolicy extends LoadBalancingPolicy { + private childPolicy; + /** + * A wrapper load balancing policy that add token awareness to a child policy. + * @param {LoadBalancingPolicy} childPolicy + * @constructor + */ + constructor(childPolicy: LoadBalancingPolicy); + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + getDistance(host: Host): distance; + /** + * Returns the hosts to use for a new query. + * The returned plan will return local replicas first, if replicas can be determined, followed by the plan of the + * child policy. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; + getOptions(): Map; +} + +/* Excluded from this release type: Tokenizer */ + +/** + * Represents a range of tokens on a Cassandra ring. + * + * A range is start-exclusive and end-inclusive. It is empty when + * start and end are the same token, except if that is the minimum + * token, in which case the range covers the whole ring (this is + * consistent with the behavior of CQL range queries). + * + * Note that CQL does not handle wrapping. To query all partitions + * in a range, see {@link unwrap}. + */ +declare class TokenRange { + end: Token; + start: Token; + private _tokenizer; + /* Excluded from this release type: __constructor */ + /** + * Splits this range into a number of smaller ranges of equal "size" + * (referring to the number of tokens, not the actual amount of data). + * + * Splitting an empty range is not permitted. But not that, in edge + * cases, splitting a range might produce one or more empty ranges. + * + * @param {Number} numberOfSplits Number of splits to make. + * @returns {TokenRange[]} Split ranges. + * @throws {Error} If splitting an empty range. + */ + splitEvenly(numberOfSplits: number): TokenRange[]; + /** + * A range is empty when start and end are the same token, except if + * that is the minimum token, in which case the range covers the + * whole ring. This is consistent with the behavior of CQL range + * queries. + * + * @returns {boolean} Whether this range is empty. + */ + isEmpty(): boolean; + /** + * A range wraps around the end of the ring when the start token + * is greater than the end token and the end token is not the + * minimum token. + * + * @returns {boolean} Whether this range wraps around. + */ + isWrappedAround(): boolean; + /** + * Splits this range into a list of two non-wrapping ranges. + * + * This will return the range itself if it is non-wrapped, or two + * ranges otherwise. + * + * This is useful for CQL range queries, which do not handle + * wrapping. + * + * @returns {TokenRange[]} The list of non-wrapping ranges. + */ + unwrap(): TokenRange[]; + /** + * Whether this range contains a given Token. + * + * @param {Token} token Token to check for. + * @returns {boolean} Whether or not the Token is in this range. + */ + contains(token: Token): boolean; + /** + * Determines if the input range is equivalent to this one. + * + * @param {TokenRange} other Range to compare with. + * @returns {boolean} Whether or not the ranges are equal. + */ + equals(other: TokenRange): boolean; + /** + * Returns 0 if the values are equal, otherwise compares against + * start, if start is equal, compares against end. + * + * @param {TokenRange} other Range to compare with. + * @returns {Number} + */ + compare(other: TokenRange): number; + /* Excluded from this release type: toString */ +} + +/** + * Tracker module. + * @module tracker + */ +export declare const tracker: { + RequestTracker: typeof RequestTracker; + RequestLogger: typeof RequestLogger; +}; + +/* Excluded from this release type: TransitionalModePlainTextAuthenticator */ + +/* Excluded from this release type: Tree */ + +/** @module types */ +/** + * @class + * @classdesc A tuple is a sequence of immutable objects. + * Tuples are sequences, just like [Arrays]{@link Array}. The only difference is that tuples can't be changed. + *

+ * As tuples can be used as a Map keys, the {@link Tuple#toString toString()} method calls toString of each element, + * to try to get a unique string key. + *

+ */ +export declare class Tuple { + elements: any[]; + length: number; + /** + * Creates a new sequence of immutable objects with the parameters provided. + * A tuple is a sequence of immutable objects. + * Tuples are sequences, just like [Arrays]{@link Array}. The only difference is that tuples can't be changed. + *

+ * As tuples can be used as a Map keys, the {@link Tuple#toString toString()} method calls toString of each element, + * to try to get a unique string key. + *

+ * @param {any[]} args The sequence elements as arguments. + * @constructor + */ + constructor(...args: any[]); + /** + * Creates a new instance of a tuple based on the Array + * @param {Array} elements + * @returns {Tuple} + */ + static fromArray(elements: any[]): Tuple; + /** + * Returns the value located at the index. + * @param {Number} index Element index + */ + get(index: number): any; + /** + * Returns the string representation of the sequence surrounded by parenthesis, ie: (1, 2). + *

+ * The returned value attempts to be a unique string representation of its values. + *

+ * @returns {string} + */ + toString(): string; + /** + * Returns the Array representation of the sequence. + * @returns {Array} + */ + toJSON(): any[]; + /** + * Gets the elements as an array + * @returns {Array} + */ + values(): any[]; +} + +declare type TupleColumnInfo = { + code: (dataTypes.tuple); + info: Array; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +declare type TupleListColumnInfoWithoutSubtype = { + code: (dataTypes.tuple | dataTypes.list); +}; + +export declare const types: { + opcodes: { + error: number; + startup: number; + ready: number; + authenticate: number; + credentials: number; + options: number; + supported: number; + query: number; + result: number; + prepare: number; + execute: number; + register: number; + event: number; + batch: number; + authChallenge: number; + authResponse: number; + authSuccess: number; + cancel: number; + /** + * Determines if the code is a valid opcode + */ + isInRange: (code: any) => boolean; + }; + consistencies: typeof consistencies; + consistencyToString: {}; + dataTypes: typeof dataTypes; + /* Excluded from this release type: getDataTypeNameByCode */ + distance: typeof distance; + frameFlags: { + compression: number; + tracing: number; + customPayload: number; + warning: number; + }; + protocolEvents: { + topologyChange: string; + statusChange: string; + schemaChange: string; + }; + protocolVersion: typeof protocolVersion; + responseErrorCodes: typeof responseErrorCodes; + resultKind: { + voidResult: number; + rows: number; + setKeyspace: number; + prepared: number; + schemaChange: number; + }; + timeuuid: typeof timeuuid; + uuid: typeof uuid; + BigDecimal: typeof BigDecimal; + Duration: typeof Duration; + /* Excluded from this release type: FrameHeader */ + InetAddress: typeof InetAddress; + Integer: typeof Integer; + LocalDate: typeof LocalDate; + LocalTime: typeof LocalTime; + Long: typeof Long__default; + ResultSet: typeof ResultSet; + ResultStream: typeof ResultStream; + Row: typeof Row; + DriverError: typeof DriverError; + TimeoutError: typeof TimeoutError; + TimeUuid: typeof TimeUuid; + Tuple: typeof Tuple; + Uuid: typeof Uuid; + unset: Readonly<{ + readonly unset: true; + }>; + /* Excluded from this release type: generateTimestamp */ + Vector: typeof Vector; +}; + +export declare interface Udt { + name: string; + fields: ColumnInfo[]; +} + +declare type UdtColumnInfo = { + code: (dataTypes.udt); + info: { + name: string; + fields: Array<{ + name: string; + type: DataTypeInfo; + }>; + }; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +/* Excluded from this release type: UdtGraphWrapper */ + +/** + * A [TableMappings]{@link module:mapping~TableMappings} implementation that converts CQL column names in all-lowercase + * identifiers with underscores (snake case) to camel case (initial lowercase letter) property names. + *

+ * The conversion is performed without any checks for the source format, you should make sure that the source + * format is snake case for CQL identifiers and camel case for properties. + *

+ * @alias module:mapping~UnderscoreCqlToCamelCaseMappings + * @implements {module:mapping~TableMappings} + */ +export declare class UnderscoreCqlToCamelCaseMappings extends TableMappings { + /** + * Creates a new instance of {@link UnderscoreCqlToCamelCaseMappings} + */ + constructor(); + /** + * Converts a property name in camel case to snake case. + * @param {String} propName Name of the property to convert to snake case. + * @return {String} + */ + getColumnName(propName: string): string; + /** + * Converts a column name in snake case to camel case. + * @param {String} columnName The column name to convert to camel case. + * @return {String} + */ + getPropertyName(columnName: string): string; +} + +/** + * Unset representation. + *

+ * Use this field if you want to set a parameter to unset. Valid for Cassandra 2.2 and above. + *

+ */ +export declare const unset: Readonly<{ + readonly unset: true; +}>; + +export declare type UpdateDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { + [key: string]: any; + }; + orderBy?: { + [key: string]: string; + }; + limit?: number; + deleteOnlyColumns?: boolean; +}; + +/** @module types */ +/** + * @class + * @classdesc Represents an immutable universally unique identifier (UUID). A UUID represents a 128-bit value. + */ +export declare class Uuid { + /* Excluded from this release type: buffer */ + /** + * Creates a new instance of Uuid based on a Buffer + * Represents an immutable universally unique identifier (UUID). A UUID represents a 128-bit value. + * @param {Buffer} buffer The 16-length buffer. + * @constructor + */ + constructor(buffer: Buffer); + /** + * Parses a string representation of a Uuid + * @param {String} value + * @returns {Uuid} + */ + static fromString(value: string): Uuid; + /** + * Creates a new random (version 4) Uuid. + * @param {function} [callback] Optional callback to be invoked with the error as first parameter and the created Uuid as + * second parameter. + * @returns {Uuid} + */ + static random(): Uuid; + static random(callback: ValueCallback): void; + /** + * Gets the bytes representation of a Uuid + * @returns {Buffer} + */ + getBuffer(): Buffer; + /** + * Compares this object to the specified object. + * The result is true if and only if the argument is not null, is a UUID object, and contains the same value, bit for bit, as this UUID. + * @param {Uuid} other The other value to test for equality. + */ + equals(other: Uuid): boolean; + /** + * Returns a string representation of the value of this Uuid instance. + * 32 hex separated by hyphens, in the form of 00000000-0000-0000-0000-000000000000. + * @returns {String} + */ + toString(): string; + /** + * Provide the name of the constructor and the string representation + * @returns {string} + */ + inspect(): string; + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string; +} + +/** + *

Backward compatibility only, use [Uuid]{@link module:types~Uuid} class instead.

+ * Generate and return a RFC4122 v4 UUID in a string representation. + * @deprecated Use [Uuid]{@link module:types~Uuid} class instead + */ +export declare function uuid(options: any, buffer: any, offset: any): any; + +declare type ValueCallback = (err: Error, val: T) => void; + +export declare class Vector { + /** + * Returns the number of the elements. + * @type Number + */ + length: number; + subtype: string; + elements: any[]; + /** + * + * @param {Float32Array | Array} elements + * @param {string} [subtype] + */ + constructor(elements: Float32Array | Array, subtype?: string); + /** + * Returns the string representation of the vector. + * @returns {string} + */ + toString(): string; + /** + * + * @param {number} index + */ + at(index: number): any; + /** + * + * @returns {IterableIterator} an iterator over the elements of the vector + */ + [Symbol.iterator](): IterableIterator; + static get [Symbol.species](): typeof Vector; + /** + * + * @param {(value: any, index: number, array: any[]) => void} callback + */ + forEach(callback: (value: any, index: number, array: any[]) => void): void; + /** + * @returns {string | null} get the subtype string, e.g., "float", but it's optional so it can return null + */ + getSubtype(): string | null; +} + +declare type VectorColumnInfo = { + code: (dataTypes.custom); + customTypeName: ('vector'); + info: [DataTypeInfo, number]; + options?: { + frozen?: boolean; + reversed?: boolean; + }; +}; + +export declare const version: string; + +/** + * Represents a graph Vertex. + * @extends Element + * @memberOf module:datastax/graph + */ +export declare class Vertex extends Element { + properties: { + [key: string]: any[]; + }; + /** + * @param id + * @param {String} label + * @param {{ [key: string]: any[] }} properties + */ + constructor(id: any, label: string, properties?: { + [key: string]: any[]; + }); +} + +/** + * Represents a graph vertex property. + * @extends Element + * @memberOf module:datastax/graph + */ +export declare class VertexProperty extends Element { + value: any; + key: string; + properties: any; + /** + * @param id + * @param {String} label + * @param value + * @param {Object} properties + */ + constructor(id: any, label: string, value: any, properties: object); +} + +/** + * Represents a run-time exception when attempting to decode a vint and the JavaScript Number doesn't have enough space to fit the value that was decoded + */ +export declare class VIntOutOfRangeException extends DriverError { + /** + * Represents a run-time exception when attempting to decode a vint and the JavaScript Number doesn't have enough space to fit the value that was decoded + * @param {Long} long + */ + constructor(long: Long__default); +} + +/** + * @classdesc + * Exposed for backward-compatibility only, it's recommended that you use {@link AllowListPolicy} instead. + * @extends AllowListPolicy + * @deprecated Use allow-list instead. It will be removed in future major versions. + */ +export declare class WhiteListPolicy extends AllowListPolicy { + /** + * Creates a new instance of WhiteListPolicy. + * @param {LoadBalancingPolicy} childPolicy - The wrapped policy. + * @param {Array.} allowList - The hosts address in the format ipAddress:port. + * @deprecated Use AllowListPolicy instead. It will be removed in future major versions. + */ + constructor(childPolicy: LoadBalancingPolicy, allowList: Array); +} + +/* Excluded from this release type: WriteQueue */ + +export { } +export namespace auth { + export type Authenticator = InstanceType; + export type AuthProvider = InstanceType; + export type PlainTextAuthProvider = InstanceType; + export type DsePlainTextAuthProvider = InstanceType; + export type DseGssapiAuthProvider = InstanceType; +} + +type _Options = Options; + +export namespace concurrent { + export type ResultSetGroup = InstanceType; + export type executeConcurrent = typeof concurrent.executeConcurrent; + export type Options = _Options; +} + +export namespace datastax { + export namespace graph { + export type asDouble = typeof datastax.graph.asDouble; + export type asFloat = typeof datastax.graph.asFloat; + export type asInt = typeof datastax.graph.asInt; + export type asTimestamp = typeof datastax.graph.asTimestamp; + export type asUdt = typeof datastax.graph.asUdt; + export type direction = typeof datastax.graph.direction; + export type Edge = InstanceType; + export type Element = InstanceType; + export type GraphResultSet = InstanceType; + export type Path = InstanceType; + export type Property = InstanceType; + export type t = typeof datastax.graph.t; + export type Vertex = InstanceType; + export type VertexProperty = InstanceType; + } + export namespace search { + export type DateRange = InstanceType; + export type DateRangeBound = InstanceType; + export type dateRangePrecision = typeof datastax.search.dateRangePrecision; + } +} + +export namespace geometry { + export type LineString = InstanceType; + export type Point = InstanceType; + export type Polygon = InstanceType; +} + +type _MappingExecutionOptions = MappingExecutionOptions; +type _MappingOptions = MappingOptions; +type _FindDocInfo = FindDocInfo; +type _UpdateDocInfo = UpdateDocInfo; +type _RemoveDocInfo = RemoveDocInfo; +type _ModelOptions = ModelOptions; +type _ModelColumnOptions = ModelColumnOptions; +type _ModelTables = ModelTables; +type _QueryOperator = QueryOperator; +type _QueryAssignment = QueryAssignment; + +export namespace mapping { + export type TableMappings = InstanceType; + export type DefaultTableMappings = InstanceType; + export type UnderscoreCqlToCamelCaseMappings = InstanceType; + export type Result = InstanceType; + export type MappingExecutionOptions = _MappingExecutionOptions; + export type ModelTables = _ModelTables; + export type Mapper = InstanceType; + export type MappingOptions = _MappingOptions; + export type FindDocInfo = _FindDocInfo; + export type UpdateDocInfo = _UpdateDocInfo; + export type RemoveDocInfo = _RemoveDocInfo; + export type ModelOptions = _ModelOptions; + export type ModelColumnOptions = _ModelColumnOptions; + export type ModelBatchItem = InstanceType; + export type ModelBatchMapper = InstanceType; + export type ModelMapper = InstanceType; + export namespace q{ + export type QueryOperator = _QueryOperator; + export type QueryAssignment = _QueryAssignment; + export type in_ = typeof mapping.q.in_; + export type gt = typeof mapping.q.gt; + export type gte = typeof mapping.q.gte; + export type lt = typeof mapping.q.lt; + export type lte = typeof mapping.q.lte; + export type notEq = typeof mapping.q.notEq; + export type and = typeof mapping.q.and; + export type incr = typeof mapping.q.incr; + export type decr = typeof mapping.q.decr; + export type append = typeof mapping.q.append; + export type prepend = typeof mapping.q.prepend; + export type remove = typeof mapping.q.remove; + } +} + +type _IndexKind = IndexKind; +type _DataTypeInfo = DataTypeInfo; +type _ColumnInfo = ColumnInfo; +type _QueryTrace = QueryTrace; +type _Udt = Udt; +export namespace metadata { + export type Aggregate = InstanceType; + export type ClientState = InstanceType; + export type DataTypeInfo = _DataTypeInfo; + export type ColumnInfo = _ColumnInfo; + export type IndexKind = _IndexKind; + export type Index = InstanceType; + export type DataCollection = InstanceType; + export type MaterializedView = InstanceType; + export type TableMetadata = InstanceType; + export type QueryTrace = _QueryTrace; + export type SchemaFunction = InstanceType; + export type Udt = _Udt; + export type Metadata = InstanceType; +} + +export namespace metrics{ + export type ClientMetrics = InstanceType; + export type DefaultMetrics = InstanceType; +} + +type _DecisionInfo = DecisionInfo; +type _OperationInfo = OperationInfo; +export namespace policies{ + export type defaultAddressTranslator = typeof defaultAddressTranslator; + export type defaultLoadBalancingPolicy = typeof defaultLoadBalancingPolicy; + export type defaultReconnectionPolicy = typeof defaultReconnectionPolicy; + export type defaultRetryPolicy = typeof defaultRetryPolicy; + export type defaultSpeculativeExecutionPolicy = typeof defaultSpeculativeExecutionPolicy; + export type defaultTimestampGenerator = typeof defaultTimestampGenerator; + export namespace addressResolution{ + export type AddressTranslator = InstanceType; + export type EC2MultiRegionTranslator = InstanceType; + } + export namespace loadBalancing{ + export type LoadBalancingPolicy = InstanceType; + export type DCAwareRoundRobinPolicy = InstanceType; + export type TokenAwarePolicy = InstanceType; + export type AllowListPolicy = InstanceType; + export type WhiteListPolicy = InstanceType; + export type RoundRobinPolicy = InstanceType; + export type DefaultLoadBalancingPolicy = InstanceType; + } + export namespace reconnection{ + export type ReconnectionPolicy = InstanceType; + export type ConstantReconnectionPolicy = InstanceType; + export type ExponentialReconnectionPolicy = InstanceType; + } + export namespace retry{ + export type DecisionInfo = _DecisionInfo; + export type OperationInfo = _OperationInfo; + export type IdempotenceAwareRetryPolicy = InstanceType; + export type FallthroughRetryPolicy = InstanceType; + export type RetryPolicy = InstanceType; + export namespace RetryDecision{ + export type retryDecision = RetryPolicy.retryDecision; + } + export namespace speculativeExecution{ + export type ConstantSpeculativeExecutionPolicy = InstanceType; + export type NoSpeculativeExecutionPolicy = InstanceType; + export type SpeculativeExecutionPolicy = InstanceType; + } + export namespace timestampGeneration{ + export type TimestampGenerator = InstanceType; + export type MonotonicTimestampGenerator = InstanceType; + } + } +} + +export namespace tracker{ + export type RequestTracker = InstanceType; + export type RequestLogger = InstanceType; +} + +export namespace types { + export type Long = InstanceType; + export type consistencies = typeof types.consistencies; + export type dataTypes = typeof types.dataTypes; + export type distance = typeof types.distance; + export type responseErrorCodes = typeof types.responseErrorCodes; + export type protocolVersion = typeof types.protocolVersion; + export type unset = Readonly<{readonly unset: true; }>; + export type BigDecimal = InstanceType; + export type Duration = InstanceType; + export type InetAddress = InstanceType; + export type Integer = InstanceType; + export type LocalDate = InstanceType; + export type LocalTime = InstanceType; + export type ResultSet = InstanceType; + export type ResultStream = InstanceType; + export type Row = InstanceType; + export type TimeUuid = InstanceType; + export type Tuple = InstanceType; + export type Uuid = InstanceType; + export type Vector = InstanceType; +} + +export namespace errors { + export type ArgumentError = InstanceType; + export type AuthenticationError = InstanceType; + export type BusyConnectionError = InstanceType; + export type DriverError = InstanceType; + export type DriverInternalError = InstanceType; + export type NoHostAvailableError = InstanceType; + export type NotSupportedError = InstanceType; + export type OperationTimedOutError = InstanceType; + export type ResponseError = InstanceType; + export type VIntOutOfRangeException = InstanceType; +} + +export namespace token{ + export type Token = InstanceType; + export type TokenRange = InstanceType; +} \ No newline at end of file diff --git a/etc/cassandra-driver.api.md b/etc/cassandra-driver.api.md new file mode 100644 index 000000000..6d0fb248f --- /dev/null +++ b/etc/cassandra-driver.api.md @@ -0,0 +1,3118 @@ +## API Report File for "cassandra-driver" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import { ConnectionOptions } from 'tls'; +import EventEmitter from 'events'; +import { EventEmitter as EventEmitter_2 } from 'stream'; +import { default as Long } from 'long'; +import * as Long_2 from 'long'; +import Long__default from 'long'; +import { Readable } from 'stream'; +import { Socket } from 'net'; + +// @public (undocumented) +export const addressResolution: { + AddressTranslator: typeof AddressTranslator; + EC2MultiRegionTranslator: typeof EC2MultiRegionTranslator; +}; + +// @public +export class AddressTranslator { + translate(address: string, port: number, callback: Function): void; +} + +// @public +export class Aggregate { + // @internal + constructor(); + argumentTypes: Array<{ + code: number; + info?: (object | Array | string); + }>; + deterministic: boolean; + finalFunction: string; + initCondition: string; + // @internal (undocumented) + initConditionRaw: any; + keyspaceName: string; + // @internal + name: string; + returnType: { + code: number; + info?: (object | Array | string); + }; + signature: Array; + stateFunction: string; + stateType: { + code: number; + info?: (object | Array | string); + }; +} + +// @public +export class AllowListPolicy extends LoadBalancingPolicy { + constructor(childPolicy: LoadBalancingPolicy, allowList: Array); + getDistance(host: Host): distance; + getOptions(): Map; + // Warning: (ae-forgotten-export) The symbol "EmptyCallback" needs to be exported by the entry point cassandra-rollup.d.ts + // + // (undocumented) + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + // @internal + newQueryPlan(keyspace: string, info: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; +} + +// @public +export class ArgumentError extends DriverError { + constructor(message: string); +} + +// @public +export function asDouble(value: number): object; + +// @public +export function asFloat(value: number): object; + +// @public +export function asInt(value: number): object; + +// @public +export function asTimestamp(value: Date): object; + +// @public +export function asUdt(value: object, udtInfo: { + name: string; + keyspace: string; + fields: Array; +}): object; + +// @public +export const auth: { + Authenticator: typeof Authenticator; + AuthProvider: typeof AuthProvider; + DseGssapiAuthProvider: typeof DseGssapiAuthProvider; + DsePlainTextAuthProvider: typeof DsePlainTextAuthProvider; + NoAuthProvider: typeof NoAuthProvider; + PlainTextAuthProvider: typeof PlainTextAuthProvider; +}; + +// @public +export class AuthenticationError extends DriverError { + constructor(message: string); + // (undocumented) + additionalInfo: ResponseError; +} + +// @public +export class Authenticator { + evaluateChallenge(challenge: Buffer, callback: Function): void; + initialResponse(callback: Function): void; + onAuthenticationSuccess(token?: Buffer): void; +} + +// @public +export class AuthProvider { + newAuthenticator(endpoint: string, name: string): Authenticator; +} + +// @public +export class BigDecimal { + constructor(unscaledValue: Integer | number, scale: number); + add(other: BigDecimal): BigDecimal; + compare(other: BigDecimal): number; + equals(other: BigDecimal): boolean; + static fromBuffer(buf: Buffer): BigDecimal; + static fromNumber(value: number): BigDecimal; + static fromString(value: string): BigDecimal; + greaterThan(other: BigDecimal): boolean; + // @internal (undocumented) + inspect(): string; + isNegative(): boolean; + isZero(): boolean; + // (undocumented) + notEquals(other: BigDecimal): boolean; + subtract(other: BigDecimal): BigDecimal; + static toBuffer(value: BigDecimal): Buffer; + toJSON(): string; + toNumber(): number; + toString(): string; +} + +// @public +export class BusyConnectionError extends DriverError { + constructor(address: string, maxRequestsPerConnection: number, connectionLength: number); +} + +// @public (undocumented) +const cassandra: { + Client: typeof Client; + ExecutionProfile: typeof ExecutionProfile; + ExecutionOptions: typeof ExecutionOptions; + types: { + getDataTypeNameByCode: typeof getDataTypeNameByCode; + FrameHeader: typeof FrameHeader; + generateTimestamp: typeof generateTimestamp; + opcodes: { + error: number; + startup: number; + ready: number; + authenticate: number; + credentials: number; + options: number; + supported: number; + query: number; + result: number; + prepare: number; + execute: number; + register: number; + event: number; + batch: number; + authChallenge: number; + authResponse: number; + authSuccess: number; + cancel: number; + isInRange: (code: any) => boolean; + }; + consistencies: typeof consistencies; + consistencyToString: {}; + dataTypes: typeof dataTypes; + distance: typeof distance; + frameFlags: { + compression: number; + tracing: number; + customPayload: number; + warning: number; + }; + protocolEvents: { + topologyChange: string; + statusChange: string; + schemaChange: string; + }; + protocolVersion: typeof protocolVersion; + responseErrorCodes: typeof responseErrorCodes; + resultKind: { + voidResult: number; + rows: number; + setKeyspace: number; + prepared: number; + schemaChange: number; + }; + timeuuid: typeof timeuuid; + uuid: typeof uuid; + BigDecimal: typeof BigDecimal; + Duration: typeof Duration; + InetAddress: typeof InetAddress; + Integer: typeof Integer; + LocalDate: typeof LocalDate; + LocalTime: typeof LocalTime; + Long: typeof Long_2.default; + ResultSet: typeof ResultSet; + ResultStream: typeof ResultStream; + Row: typeof Row; + DriverError: typeof DriverError; + TimeoutError: typeof TimeoutError; + TimeUuid: typeof TimeUuid; + Tuple: typeof Tuple; + Uuid: typeof Uuid; + unset: Readonly<{ + readonly unset: true; + }>; + Vector: typeof Vector; + }; + errors: { + ArgumentError: typeof ArgumentError; + AuthenticationError: typeof AuthenticationError; + BusyConnectionError: typeof BusyConnectionError; + DriverError: typeof DriverError; + OperationTimedOutError: typeof OperationTimedOutError; + DriverInternalError: typeof DriverInternalError; + NoHostAvailableError: typeof NoHostAvailableError; + NotSupportedError: typeof NotSupportedError; + ResponseError: typeof ResponseError; + VIntOutOfRangeException: typeof VIntOutOfRangeException; + }; + policies: { + addressResolution: { + AddressTranslator: typeof AddressTranslator; + EC2MultiRegionTranslator: typeof EC2MultiRegionTranslator; + }; + loadBalancing: { + AllowListPolicy: typeof AllowListPolicy; + DCAwareRoundRobinPolicy: typeof DCAwareRoundRobinPolicy; + DefaultLoadBalancingPolicy: typeof DefaultLoadBalancingPolicy; + LoadBalancingPolicy: typeof LoadBalancingPolicy; + RoundRobinPolicy: typeof RoundRobinPolicy; + TokenAwarePolicy: typeof TokenAwarePolicy; + WhiteListPolicy: typeof WhiteListPolicy; + }; + reconnection: { + ReconnectionPolicy: typeof ReconnectionPolicy; + ConstantReconnectionPolicy: typeof ConstantReconnectionPolicy; + ExponentialReconnectionPolicy: typeof ExponentialReconnectionPolicy; + }; + retry: { + IdempotenceAwareRetryPolicy: typeof IdempotenceAwareRetryPolicy; + FallthroughRetryPolicy: typeof FallthroughRetryPolicy; + RetryPolicy: typeof RetryPolicy; + }; + speculativeExecution: { + NoSpeculativeExecutionPolicy: typeof NoSpeculativeExecutionPolicy; + SpeculativeExecutionPolicy: typeof SpeculativeExecutionPolicy; + ConstantSpeculativeExecutionPolicy: typeof ConstantSpeculativeExecutionPolicy; + }; + timestampGeneration: { + TimestampGenerator: typeof TimestampGenerator; + MonotonicTimestampGenerator: typeof MonotonicTimestampGenerator; + }; + defaultAddressTranslator: () => AddressTranslator; + defaultLoadBalancingPolicy: (localDc?: string) => LoadBalancingPolicy; + defaultRetryPolicy: () => RetryPolicy; + defaultReconnectionPolicy: () => ReconnectionPolicy; + defaultSpeculativeExecutionPolicy: () => SpeculativeExecutionPolicy; + defaultTimestampGenerator: () => TimestampGenerator; + }; + auth: { + NoAuthProvider: typeof NoAuthProvider; + Authenticator: typeof Authenticator; + AuthProvider: typeof AuthProvider; + DseGssapiAuthProvider: typeof DseGssapiAuthProvider; + DsePlainTextAuthProvider: typeof DsePlainTextAuthProvider; + PlainTextAuthProvider: typeof PlainTextAuthProvider; + }; + mapping: { + Mapper: typeof Mapper; + ModelMapper: typeof ModelMapper; + ModelBatchMapper: typeof ModelBatchMapper; + ModelBatchItem: typeof ModelBatchItem; + Result: typeof Result; + TableMappings: typeof TableMappings; + DefaultTableMappings: typeof DefaultTableMappings; + UnderscoreCqlToCamelCaseMappings: typeof UnderscoreCqlToCamelCaseMappings; + q: { + in_: (arr: any) => QueryOperator; + gt: (value: any) => QueryOperator; + gte: (value: any) => QueryOperator; + lt: (value: any) => QueryOperator; + lte: (value: any) => QueryOperator; + notEq: (value: any) => QueryOperator; + and: (condition1: any, condition2: any) => QueryOperator; + incr: (value: any) => QueryAssignment; + decr: (value: any) => QueryAssignment; + append: (value: any) => QueryAssignment; + prepend: (value: any) => QueryAssignment; + remove: (value: any) => QueryAssignment; + }; + }; + tracker: { + RequestTracker: typeof RequestTracker; + RequestLogger: typeof RequestLogger; + }; + metrics: { + ClientMetrics: typeof ClientMetrics; + DefaultMetrics: typeof DefaultMetrics; + }; + concurrent: { + executeConcurrent: typeof executeConcurrent; + ResultSetGroup: typeof ResultSetGroup; + }; + token: { + Token: typeof Token; + TokenRange: typeof TokenRange; + }; + metadata: { + Metadata: typeof Metadata; + }; + Encoder: typeof Encoder; + geometry: { + Point: typeof Point; + LineString: typeof LineString; + Polygon: typeof Polygon; + Geometry: typeof Geometry; + }; + datastax: { + graph: { + getCustomTypeSerializers: typeof getCustomTypeSerializers; + GraphTypeWrapper: typeof GraphTypeWrapper; + UdtGraphWrapper: typeof UdtGraphWrapper; + Edge: typeof Edge; + Element: typeof Element; + Path: typeof Path; + Property: typeof Property; + Vertex: typeof Vertex; + VertexProperty: typeof VertexProperty; + asInt: typeof asInt; + asDouble: typeof asDouble; + asFloat: typeof asFloat; + asTimestamp: typeof asTimestamp; + asUdt: typeof asUdt; + direction: { + both: { + typeName: any; + elementName: any; + toString(): any; + }; + in: { + typeName: any; + elementName: any; + toString(): any; + }; + out: { + typeName: any; + elementName: any; + toString(): any; + }; + in_: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + GraphResultSet: typeof GraphResultSet; + t: { + id: { + typeName: any; + elementName: any; + toString(): any; + }; + key: { + typeName: any; + elementName: any; + toString(): any; + }; + label: { + typeName: any; + elementName: any; + toString(): any; + }; + value: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + }; + search: { + DateRange: typeof DateRange; + DateRangeBound: typeof DateRangeBound; + dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; + }; + }; + }; + defaultOptions: () => ClientOptions; + version: string; +}; +export default cassandra; + +// Warning: (ae-internal-missing-underscore) The name "checkServerIdentity" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal (undocumented) +export function checkServerIdentity(cert: { + subject: { + CN: string; + }; + subjectaltname?: string; +}, sniAddress: string): Error | undefined; + +// @public +export class Client extends EventEmitter.EventEmitter { + // Warning: (ae-forgotten-export) The symbol "DseClientOptions" needs to be exported by the entry point cassandra-rollup.d.ts + constructor(options: DseClientOptions); + batch(queries: Array, options?: QueryOptions): Promise; + // (undocumented) + batch(queries: Array, options: QueryOptions, callback: ValueCallback): void; + // (undocumented) + batch(queries: Array, callback: ValueCallback): void; + connect(): Promise; + // (undocumented) + connect(callback: EmptyCallback): void; + // Warning: (ae-forgotten-export) The symbol "ControlConnection" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal (undocumented) + controlConnection: ControlConnection; + eachRow(query: string, params: ArrayOrObject, options: QueryOptions, rowCallback: (n: number, row: Row) => void, callback?: ValueCallback): void; + // (undocumented) + eachRow(query: string, params: ArrayOrObject, rowCallback: (n: number, row: Row) => void, callback?: ValueCallback): void; + // (undocumented) + eachRow(query: string, rowCallback: (n: number, row: Row) => void): void; + // Warning: (ae-forgotten-export) The symbol "ArrayOrObject" needs to be exported by the entry point cassandra-rollup.d.ts + execute(query: string, params?: ArrayOrObject, options?: QueryOptions): Promise; + // Warning: (ae-forgotten-export) The symbol "ValueCallback" needs to be exported by the entry point cassandra-rollup.d.ts + // + // (undocumented) + execute(query: string, params: ArrayOrObject, options: QueryOptions, callback: ValueCallback): void; + // (undocumented) + execute(query: string, params: ArrayOrObject, callback: ValueCallback): void; + // (undocumented) + execute(query: string, callback: ValueCallback): void; + // Warning: (ae-forgotten-export) The symbol "GraphQueryOptions" needs to be exported by the entry point cassandra-rollup.d.ts + executeGraph(traversal: string, parameters: { + [name: string]: any; + } | undefined, options: GraphQueryOptions, callback: ValueCallback): void; + // (undocumented) + executeGraph(traversal: string, parameters: { + [name: string]: any; + } | undefined, callback: ValueCallback): void; + // (undocumented) + executeGraph(traversal: string, callback: ValueCallback): void; + // (undocumented) + executeGraph(traversal: string, parameters?: { + [name: string]: any; + }, options?: GraphQueryOptions): Promise; + getReplicas(keyspace: string, token: Buffer): Array; + getState(): ClientState; + // Warning: (ae-forgotten-export) The symbol "Connection" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal + handleSchemaAgreementAndRefresh(connection: Connection, event: any): Promise; + hosts: HostMap; + keyspace: string; + // @internal (undocumented) + log: any; + metadata: Metadata; + metrics: ClientMetrics; + // @internal (undocumented) + options: ClientOptions; + // Warning: (ae-forgotten-export) The symbol "ProfileManager" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal (undocumented) + profileManager: ProfileManager; + shutdown(): Promise; + // (undocumented) + shutdown(callback: EmptyCallback): void; + stream(query: string, params?: ArrayOrObject, options?: QueryOptions, callback?: EmptyCallback): ResultStream; +} + +// @public +export class ClientMetrics { + onAuthenticationError(e: AuthenticationError | Error): void; + onClientTimeoutError(e: OperationTimedOutError): void; + onClientTimeoutRetry(e: Error): void; + onConnectionError(e: Error): void; + onIgnoreError(e: Error): void; + onOtherError(e: Error): void; + onOtherErrorRetry(e: Error): void; + onReadTimeoutError(e: ResponseError): void; + onReadTimeoutRetry(e: Error): void; + onResponse(latency: Array): void; + onSpeculativeExecution(): void; + onSuccessfulResponse(latency: Array): void; + onUnavailableError(e: ResponseError): void; + onUnavailableRetry(e: Error): void; + onWriteTimeoutError(e: ResponseError): void; + onWriteTimeoutRetry(e: Error): void; +} + +// @public +export interface ClientOptions { + // @internal (undocumented) + applicationName?: string; + // @internal (undocumented) + applicationVersion?: string; + // (undocumented) + authProvider?: AuthProvider; + // (undocumented) + cloud?: { + secureConnectBundle: string | URL; + }; + // (undocumented) + contactPoints?: string[]; + // (undocumented) + credentials?: { + username: string; + password: string; + }; + // (undocumented) + encoding?: { + map?: Function; + set?: Function; + copyBuffer?: boolean; + useUndefinedAsUnset?: boolean; + useBigIntAsLong?: boolean; + useBigIntAsVarint?: boolean; + }; + // @internal (undocumented) + id?: Uuid; + // (undocumented) + isMetadataSyncEnabled?: boolean; + // (undocumented) + keyspace?: string; + // (undocumented) + localDataCenter?: string; + // @internal (undocumented) + logEmitter?: any; + // (undocumented) + maxPrepared?: number; + // (undocumented) + metrics?: ClientMetrics; + // @internal (undocumented) + monitorReporting?: { + enabled?: boolean; + }; + // (undocumented) + policies?: { + addressResolution?: AddressTranslator; + loadBalancing?: LoadBalancingPolicy; + reconnection?: ReconnectionPolicy; + retry?: RetryPolicy; + speculativeExecution?: SpeculativeExecutionPolicy; + timestampGeneration?: TimestampGenerator; + }; + // (undocumented) + pooling?: { + coreConnectionsPerHost?: { + [key: number]: number; + }; + heartBeatInterval?: number; + maxRequestsPerConnection?: number; + warmup?: boolean; + }; + // (undocumented) + prepareOnAllHosts?: boolean; + // (undocumented) + profiles?: ExecutionProfile[]; + // (undocumented) + promiseFactory?: (handler: (callback: (err: Error, result?: any) => void) => void) => Promise; + // (undocumented) + protocolOptions?: { + maxSchemaAgreementWaitSeconds?: number; + maxVersion?: number; + noCompact?: boolean; + port?: number; + }; + // (undocumented) + queryOptions?: QueryOptions; + // (undocumented) + refreshSchemaDelay?: number; + // (undocumented) + rePrepareOnUp?: boolean; + // (undocumented) + requestTracker?: RequestTracker; + // @internal (undocumented) + sni?: { + address?: string; + port?: string; + addressResolver?: AddressResolver; + }; + // (undocumented) + socketOptions?: { + coalescingThreshold?: number; + connectTimeout?: number; + defunctReadTimeoutThreshold?: number; + keepAlive?: boolean; + keepAliveDelay?: number; + readTimeout?: number; + tcpNoDelay?: boolean; + }; + // (undocumented) + sslOptions?: ConnectionOptions; +} + +// @public +export class ClientState { + // @internal + constructor(hosts: Array, openConnections: { + [key: string]: number; + }, inFlightQueries: { + [key: string]: number; + }); + // @internal + static from(client: Client): ClientState; + getConnectedHosts(): Array; + getInFlightQueries(host: Host): number; + getOpenConnections(host: Host): number; + toString(): string; +} + +// @public (undocumented) +export interface ColumnInfo { + // (undocumented) + name: string; + // (undocumented) + type: DataTypeInfo; +} + +// @public (undocumented) +export const concurrent: { + executeConcurrent: typeof executeConcurrent; + ResultSetGroup: typeof ResultSetGroup; +}; + +// @public +export enum consistencies { + // (undocumented) + all = 5, + // (undocumented) + any = 0, + // (undocumented) + eachQuorum = 7, + // (undocumented) + localOne = 10, + // (undocumented) + localQuorum = 6, + // (undocumented) + localSerial = 9, + // (undocumented) + one = 1, + // (undocumented) + quorum = 4, + // (undocumented) + serial = 8, + // (undocumented) + three = 3, + // (undocumented) + two = 2 +} + +// @public +export const consistencyToString: {}; + +// @public +export class ConstantReconnectionPolicy extends ReconnectionPolicy { + constructor(delay: number); + getOptions(): Map; + newSchedule(): Iterator; +} + +// @public +export class ConstantSpeculativeExecutionPolicy extends SpeculativeExecutionPolicy { + constructor(delay: number, maxSpeculativeExecutions: number); + getOptions(): Map; + // (undocumented) + newPlan(keyspace: string, queryInfo: string | Array): { + nextExecution: () => number; + }; +} + +// @public +export class DataCollection extends EventEmitter.EventEmitter { + // @internal + constructor(name: string); + bloomFilterFalsePositiveChance: number; + caching: string; + clusteringKeys: ColumnInfo[]; + clusteringOrder: string[]; + columns: ColumnInfo[]; + columnsByName: { + [key: string]: ColumnInfo; + }; + comment: string; + compactionClass: string; + compactionOptions: { + [option: string]: any; + }; + compression: { + class?: string; + [option: string]: any; + }; + crcCheckChance?: number; + defaultTtl: number; + extensions: { + [option: string]: any; + }; + gcGraceSeconds: number; + localReadRepairChance: number; + maxIndexInterval?: number; + minIndexInterval?: number; + name: string; + nodesync?: object; + partitionKeys: ColumnInfo[]; + populateCacheOnFlush: boolean; + readRepairChance: number; + speculativeRetry: string; +} + +// @public +export const datastax: { + graph: { + Edge: typeof Edge; + Element: typeof Element; + Path: typeof Path; + Property: typeof Property; + Vertex: typeof Vertex; + VertexProperty: typeof VertexProperty; + asInt: typeof asInt; + asDouble: typeof asDouble; + asFloat: typeof asFloat; + asTimestamp: typeof asTimestamp; + asUdt: typeof asUdt; + direction: { + both: { + typeName: any; + elementName: any; + toString(): any; + }; + in: { + typeName: any; + elementName: any; + toString(): any; + }; + out: { + typeName: any; + elementName: any; + toString(): any; + }; + in_: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + getCustomTypeSerializers: typeof getCustomTypeSerializers; + GraphResultSet: typeof GraphResultSet; + GraphTypeWrapper: typeof GraphTypeWrapper; + t: { + id: { + typeName: any; + elementName: any; + toString(): any; + }; + key: { + typeName: any; + elementName: any; + toString(): any; + }; + label: { + typeName: any; + elementName: any; + toString(): any; + }; + value: { + typeName: any; + elementName: any; + toString(): any; + }; + }; + UdtGraphWrapper: typeof UdtGraphWrapper; + }; + search: { + DateRange: typeof DateRange; + DateRangeBound: typeof DateRangeBound; + dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; + }; + }; +}; + +// Warning: (ae-forgotten-export) The symbol "SingleColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "CustomSimpleColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "MapColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "TupleColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "ListSetColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "VectorColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "OtherCustomColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "UdtColumnInfo" needs to be exported by the entry point cassandra-rollup.d.ts +// Warning: (ae-forgotten-export) The symbol "TupleListColumnInfoWithoutSubtype" needs to be exported by the entry point cassandra-rollup.d.ts +// +// @public (undocumented) +export type DataTypeInfo = SingleColumnInfo | CustomSimpleColumnInfo | MapColumnInfo | TupleColumnInfo | ListSetColumnInfo | VectorColumnInfo | OtherCustomColumnInfo | UdtColumnInfo | TupleListColumnInfoWithoutSubtype; + +// @public +export enum dataTypes { + // (undocumented) + ascii = 1, + // (undocumented) + bigint = 2, + // (undocumented) + blob = 3, + // (undocumented) + boolean = 4, + // (undocumented) + counter = 5, + // (undocumented) + custom = 0, + // (undocumented) + date = 17, + // (undocumented) + decimal = 6, + // (undocumented) + double = 7, + // (undocumented) + duration = 21, + // (undocumented) + float = 8, + // (undocumented) + inet = 16, + // (undocumented) + int = 9, + // (undocumented) + list = 32, + // (undocumented) + map = 33, + // (undocumented) + set = 34, + // (undocumented) + smallint = 19, + // (undocumented) + text = 10, + // (undocumented) + time = 18, + // (undocumented) + timestamp = 11, + // (undocumented) + timeuuid = 15, + // (undocumented) + tinyint = 20, + // (undocumented) + tuple = 49, + // (undocumented) + udt = 48, + // (undocumented) + uuid = 12, + // (undocumented) + varchar = 13, + // (undocumented) + varint = 14 +} + +// @public (undocumented) +export namespace dataTypes { + export function getByName(name: string): DataTypeInfo; +} + +// @public +export class DateRange { + constructor(lowerBound: DateRangeBound, upperBound?: DateRangeBound); + equals(other: DateRange): boolean; + static fromBuffer(buffer: Buffer): DateRange; + static fromString(dateRangeString: string): DateRange; + // (undocumented) + lowerBound: DateRangeBound; + // (undocumented) + toBuffer(): any; + toString(): string; + // (undocumented) + upperBound: DateRangeBound; +} + +// @public +export class DateRangeBound { + constructor(date: Date, precision: number); + // (undocumented) + date: Date; + equals(other: DateRangeBound): boolean; + static fromString(boundaryString: string): DateRangeBound; + // @internal (undocumented) + isUnbounded(): boolean; + // (undocumented) + precision: number; + static toLowerBound(bound: DateRangeBound): DateRangeBound; + toString(): string; + static toUpperBound(bound: DateRangeBound): DateRangeBound; + // @internal (undocumented) + static unbounded: Readonly; +} + +// @public +export const dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; +}; + +// @public +export class DCAwareRoundRobinPolicy extends LoadBalancingPolicy { + constructor(localDc?: string); + getDistance(host: Host): distance; + // (undocumented) + getOptions(): Map; + // (undocumented) + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + // @internal (undocumented) + localDc: string; + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; +} + +// @public +export type DecisionInfo = { + decision: number; + consistency?: consistencies; + useCurrentHost?: boolean; +}; + +// @public +export const defaultAddressTranslator: () => AddressTranslator; + +// @public +export class DefaultLoadBalancingPolicy extends LoadBalancingPolicy { + constructor(options?: { + localDc?: string; + filter?: (host: Host) => boolean; + } | string); + getDistance(host: Host): distance; + getOptions(): Map; + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + // @override + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; +} + +// @public +export const defaultLoadBalancingPolicy: (localDc?: string) => LoadBalancingPolicy; + +// @public +export class DefaultMetrics extends ClientMetrics { + constructor(); + // (undocumented) + errors: EventEmitter & { + authentication: EventEmitter; + clientTimeout: EventEmitter; + connection: EventEmitter; + other: EventEmitter; + readTimeout: EventEmitter; + unavailable: EventEmitter; + writeTimeout: EventEmitter; + }; + // (undocumented) + ignoredErrors: EventEmitter; + // @override (undocumented) + onAuthenticationError(e: Error | AuthenticationError): void; + // @override (undocumented) + onClientTimeoutError(e: OperationTimedOutError): void; + // @override (undocumented) + onClientTimeoutRetry(e: Error): void; + // @override (undocumented) + onConnectionError(e: Error): void; + // @override (undocumented) + onIgnoreError(e: Error): void; + // @override (undocumented) + onOtherError(e: Error): void; + // @override (undocumented) + onOtherErrorRetry(e: Error): void; + // @override (undocumented) + onReadTimeoutError(e: ResponseError): void; + // @override (undocumented) + onReadTimeoutRetry(e: Error): void; + // @override (undocumented) + onResponse(latency: number[]): void; + // @override (undocumented) + onSpeculativeExecution(): void; + // @override (undocumented) + onSuccessfulResponse(latency: number[]): void; + // @override (undocumented) + onUnavailableError(e: Error): void; + // @override (undocumented) + onUnavailableRetry(e: Error): void; + // @override (undocumented) + onWriteTimeoutError(e: ResponseError): void; + // @override (undocumented) + onWriteTimeoutRetry(e: Error): void; + // (undocumented) + responses: EventEmitter & { + success: EventEmitter; + }; + // (undocumented) + retries: EventEmitter & { + clientTimeout: EventEmitter; + other: EventEmitter; + readTimeout: EventEmitter; + unavailable: EventEmitter; + writeTimeout: EventEmitter; + }; + // (undocumented) + speculativeExecutions: EventEmitter & { + increment: EventEmitter; + }; +} + +// @public (undocumented) +export const defaultOptions: () => ClientOptions; + +// @public +export const defaultReconnectionPolicy: () => ReconnectionPolicy; + +// @public +export const defaultRetryPolicy: () => RetryPolicy; + +// @public +export const defaultSpeculativeExecutionPolicy: () => SpeculativeExecutionPolicy; + +// @public +export class DefaultTableMappings extends TableMappings { + constructor(); + // @override (undocumented) + getColumnName(propName: string): string; + // @override (undocumented) + getPropertyName(columnName: string): string; + newObjectInstance(): object; +} + +// @public +export const defaultTimestampGenerator: () => TimestampGenerator; + +// @public +export const direction: { + both: EnumValue; + in: EnumValue; + out: EnumValue; + in_: EnumValue; +}; + +// @public +export enum distance { + // (undocumented) + ignored = 2, + // (undocumented) + local = 0, + // (undocumented) + remote = 1 +} + +// @public +export class DriverError extends Error { + constructor(message: string); + // (undocumented) + info: string; + // (undocumented) + innerError: any; + // (undocumented) + isSocketError: boolean; + // (undocumented) + requestNotWritten?: boolean; +} + +// @public +export class DriverInternalError extends DriverError { + constructor(message: string); +} + +// @public +export class DseGssapiAuthProvider extends AuthProvider { + constructor(gssOptions: { + authorizationId?: string; + service?: string; + hostNameResolver?: Function; + user?: string; + }); + // @override + newAuthenticator(endpoint: string, name: string): Authenticator; + static reverseDnsResolver(ip: string, callback: Function): void; + static useIpResolver(ip: string, callback: Function): void; +} + +// @public +export class DsePlainTextAuthProvider extends AuthProvider { + constructor(username: string, password: string, authorizationId?: string); + // @override + newAuthenticator(endpoint: string, name: string): Authenticator; +} + +// @public +export class Duration { + constructor(months: number, days: number, nanoseconds: number | Long__default); + equals(other: Duration): boolean; + static fromBuffer(buffer: Buffer): Duration; + static fromString(input: string): Duration; + toBuffer(): Buffer; + toString(): string; +} + +// @public +export class EC2MultiRegionTranslator extends AddressTranslator { + logError(address: string, err: Error): void; + translate(address: string, port: number, callback: Function): void; +} + +// @public +export class Edge extends Element { + constructor(id: any, outV: Vertex, outVLabel: string, label: string, inV: Vertex, inVLabel?: string, properties?: { + [s: string]: Property; + }); + // (undocumented) + inV: Vertex; + // (undocumented) + inVLabel: string; + // (undocumented) + outV: Vertex; + // (undocumented) + outVLabel: string; + // (undocumented) + properties: { + [s: string]: any; + }; +} + +// @public +export abstract class Element { + constructor(id: any, label: string); + // (undocumented) + id: any; + // (undocumented) + label: string; +} + +// @public (undocumented) +export class Encoder { + constructor(protocolVersion: number, options: ClientOptions); + decode: (buffer: Buffer, type: DataTypeInfo) => any; + encode: (value: any, typeInfo: DataTypeInfo | number | string) => Buffer; + // @internal + static guessDataType: (value: any) => DataTypeInfo | null; + // @internal + parseFqTypeName: (typeName: string, startIndex?: number, length?: number) => DataTypeInfo; + // @internal + parseKeyTypes: (typesString: string) => { + types: Array; + isComposite: boolean; + hasCollections: boolean; + }; + // @internal + parseTypeName: (keyspace: string, typeName: string, startIndex: number, length: number | null, udtResolver: Function) => Promise; + // @internal (undocumented) + protocolVersion: number; + // @internal + setProtocolVersion: (value: number) => void; + // @internal + setRoutingKeyFromMeta: (meta: any, params: Array, execOptions: ExecutionOptions) => void; + // @internal + setRoutingKeyFromUser: (params: Array, execOptions: ExecutionOptions, keys?: any) => void; +} + +// @public (undocumented) +export const errors: { + ArgumentError: typeof ArgumentError; + AuthenticationError: typeof AuthenticationError; + BusyConnectionError: typeof BusyConnectionError; + DriverError: typeof DriverError; + OperationTimedOutError: typeof OperationTimedOutError; + DriverInternalError: typeof DriverInternalError; + NoHostAvailableError: typeof NoHostAvailableError; + NotSupportedError: typeof NotSupportedError; + ResponseError: typeof ResponseError; + VIntOutOfRangeException: typeof VIntOutOfRangeException; +}; + +// @public +export function executeConcurrent(client: Client, query: string, parameters: any[][] | Readable, options?: Options): Promise; + +// @public (undocumented) +export function executeConcurrent(client: Client, queries: Array<{ + query: string; + params: any[]; +}>, options?: Options): Promise; + +// @public +export class ExecutionOptions { + constructor(); + // @internal + static empty(): ExecutionOptions; + getCaptureStackTrace(): boolean; + getConsistency(): consistencies; + getCustomPayload(): { + [key: string]: any; + }; + getFetchSize(): number; + getFixedHost(): Host; + getHints(): string[] | string[][]; + getKeyspace(): string; + getLoadBalancingPolicy(): LoadBalancingPolicy; + // @internal + getOrGenerateTimestamp(): Long__default | null; + getPageState(): Buffer; + // @internal + getPreferredHost(): Host; + getRawQueryOptions(): QueryOptions; + getReadTimeout(): number; + getRetryPolicy(): RetryPolicy; + // @internal + getRoutingIndexes(): Array; + getRoutingKey(): Buffer | Array; + // @internal + getRoutingNames(): any; + // @internal + getRowCallback(): any; + getSerialConsistency(): consistencies; + getTimestamp(): number | Long__default | undefined | null; + isAutoPage(): boolean; + isBatchCounter(): boolean; + isBatchLogged(): boolean; + isIdempotent(): boolean; + isPrepared(): boolean; + isQueryTracing(): boolean; + // @internal (undocumented) + setHints(hints: string[]): any; + // @internal + setKeyspace(keyspace: string): any; + // @internal (undocumented) + setPageState(pageState: Buffer): any; + // @internal + setPreferredHost(host: Host): any; + // @internal + setRoutingIndexes(routingIndexes: Array): any; + // @internal + setRoutingKey(value: any): any; +} + +// @public +export class ExecutionProfile { + constructor(name: string, options?: { + consistency?: consistencies; + loadBalancing?: LoadBalancingPolicy; + readTimeout?: number; + retry?: RetryPolicy; + serialConsistency?: consistencies; + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: consistencies; + writeConsistency?: consistencies; + }; + }); + consistency?: consistencies; + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: consistencies; + writeConsistency?: consistencies; + results?: any; + }; + loadBalancing?: LoadBalancingPolicy; + name: string; + readTimeout?: number; + retry?: RetryPolicy; + serialConsistency?: consistencies; +} + +// @public +export class ExponentialReconnectionPolicy extends ReconnectionPolicy { + constructor(baseDelay: number, maxDelay: number, startWithNoDelay?: boolean); + getOptions(): Map; + newSchedule(): Iterator; +} + +// @public +export class FallthroughRetryPolicy extends RetryPolicy { + // Warning: (ae-forgotten-export) The symbol "OperationInfo" needs to be exported by the entry point cassandra-rollup.d.ts + onReadTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, isDataPresent: boolean): DecisionInfo; + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo; + onUnavailable(info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo; + onWriteTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, writeType: string): DecisionInfo; +} + +// @public (undocumented) +export type FindDocInfo = { + fields?: string[]; + orderBy?: { + [key: string]: string; + }; + limit?: number; +}; + +// Warning: (ae-internal-missing-underscore) The name "frameFlags" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export const frameFlags: { + compression: number; + tracing: number; + customPayload: number; + warning: number; +}; + +// Warning: (ae-internal-missing-underscore) The name "FrameHeader" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal (undocumented) +export class FrameHeader { + constructor(version: number, flags: number, streamId: number, opcode: number, bodyLength: number); + // (undocumented) + bodyLength: number; + // (undocumented) + flags: number; + // (undocumented) + static fromBuffer(buf: Buffer, offset?: number): FrameHeader; + static getProtocolVersion(buffer: Buffer): number; + // (undocumented) + opcode: number; + static size(version: any): number; + // (undocumented) + streamId: number; + // (undocumented) + toBuffer(): Buffer; + // (undocumented) + version: number; +} + +// Warning: (ae-internal-missing-underscore) The name "generateTimestamp" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export function generateTimestamp(date: any, microseconds: any): Long__default; + +// @public (undocumented) +export class Geometry { + // @internal (undocumented) + static getEndianness(code: number): string; + // @internal + static readDouble(buffer: Buffer, endianness: string, offset: number): number; + // @internal + static readInt32(buffer: Buffer, endianness: string, offset: number): number; + // (undocumented) + static types: { + readonly Point2D: 1; + readonly LineString: 2; + readonly Polygon: 3; + }; + // @internal + useBESerialization(): boolean; + // @internal + writeDouble(val: number, buffer: Buffer, offset: number): void; + // @internal + writeEndianness(buffer: Buffer, offset: number): void; + // @internal + writeInt32(val: number, buffer: Buffer, offset: number): void; +} + +// @public +export const geometry: { + Point: typeof Point; + LineString: typeof LineString; + Polygon: typeof Polygon; + Geometry: typeof Geometry; +}; + +// Warning: (ae-internal-missing-underscore) The name "getCustomTypeSerializers" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal (undocumented) +export function getCustomTypeSerializers(): {}; + +// Warning: (ae-internal-missing-underscore) The name "getDataTypeNameByCode" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export function getDataTypeNameByCode(item: any): any; + +// @public (undocumented) +export const graph: { + Edge: typeof Edge; + Element: typeof Element; + Path: typeof Path; + Property: typeof Property; + Vertex: typeof Vertex; + VertexProperty: typeof VertexProperty; + asInt: typeof asInt; + asDouble: typeof asDouble; + asFloat: typeof asFloat; + asTimestamp: typeof asTimestamp; + asUdt: typeof asUdt; + direction: { + both: EnumValue; + in: EnumValue; + out: EnumValue; + in_: EnumValue; + }; + getCustomTypeSerializers: typeof getCustomTypeSerializers; + GraphResultSet: typeof GraphResultSet; + GraphTypeWrapper: typeof GraphTypeWrapper; + t: { + id: EnumValue; + key: EnumValue; + label: EnumValue; + value: EnumValue; + }; + UdtGraphWrapper: typeof UdtGraphWrapper; +}; + +// @public +export class GraphResultSet implements Iterable { + [Symbol.iterator](): Iterator; + constructor(result: ResultSet, rowParser?: Function); + first(): object | null; + forEach(callback: Function, thisArg?: object): void; + getTraversers(): IterableIterator; + // (undocumented) + info: typeof ResultSet.prototype.info; + // (undocumented) + length: number; + // (undocumented) + pageState: string; + toArray(): Array; + values(): Iterator; +} + +// Warning: (ae-internal-missing-underscore) The name "GraphTypeWrapper" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export class GraphTypeWrapper { + constructor(value: any, typeInfo: any); + // (undocumented) + typeInfo: any; + // (undocumented) + value: any; +} + +// @public +export class Host extends EventEmitter.EventEmitter { + // @internal + constructor(address: any, protocolVersion: any, options: any, metadata: any); + // (undocumented) + address: string; + // @internal + borrowConnection(previousConnection?: Connection): Connection; + canBeConsideredAsUp(): boolean; + // (undocumented) + cassandraVersion: string; + // @internal + checkHealth(connection: Connection): void; + // @internal + checkIsUp(): void; + // (undocumented) + datacenter: string; + // @internal (undocumented) + dseVersion: string; + // @internal + getActiveConnection(): Connection; + getCassandraVersion(): Array; + getDseVersion(): Array; + // @internal + getInFlight(): any; + // @internal + getResponseCount(): number; + // (undocumented) + hostId: Uuid; + // @internal + initializePool(): void; + isUp(): boolean; + // @internal (undocumented) + isUpSince: number; + // @internal (undocumented) + options: any; + // @internal (undocumented) + pool: any; + // (undocumented) + rack: string; + // @internal (undocumented) + reconnectionSchedule: any; + // @internal (undocumented) + removeFromPool(connection: Connection): void; + // @internal + setDistance(distance: number): number; + // @internal + setDown(): void; + // @internal + setProtocolVersion(value: number): void; + // @internal + setUp(clearReconnection?: boolean): void; + // @internal (undocumented) + shutdown(waitForPending?: boolean): Promise; + // (undocumented) + tokens: string[]; + // @internal + warmupPool(keyspace: string): any; + // @internal (undocumented) + workloads: readonly any[]; +} + +// @public +export class HostMap extends EventEmitter.EventEmitter { + // @internal + constructor(); + // @internal + clear(): Array; + forEach(callback: (value: Host, key: string) => void): void; + get(key: string): Host; + // @internal (undocumented) + inspect(): Map; + keys(): Array; + // (undocumented) + length: number; + // @internal @deprecated + push(k: any, v: any): void; + // @internal + remove(key: string): void; + // @internal + removeMultiple(keys: Array): void; + // @internal + set(key: string, value: Host): Host; + // @internal + slice(begin: number, end: number): Array; + // @internal (undocumented) + toJSON(): any; + values(): Array; +} + +// @public @deprecated +export class IdempotenceAwareRetryPolicy extends RetryPolicy { + // @deprecated + constructor(childPolicy?: RetryPolicy); + // (undocumented) + onReadTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, isDataPresent: boolean): DecisionInfo; + // (undocumented) + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo; + // (undocumented) + onUnavailable(info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo; + onWriteTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, writeType: string): DecisionInfo; +} + +// @public +export class Index { + // @internal + constructor(name: string, target: string, kind: number | string, options: object); + // @internal @deprecated + static fromColumnRows(columnRows: Array, columnsByName: { + [key: string]: { + name: any; + type: any; + }; + }): Array; + // @internal @deprecated + static fromRows(indexRows: Array): Array; + isCompositesKind(): boolean; + isCustomKind(): boolean; + isKeysKind(): boolean; + kind: IndexKind; + name: string; + options: object; + target: string; +} + +// @public (undocumented) +export enum IndexKind { + // (undocumented) + composites = 2, + // (undocumented) + custom = 0, + // (undocumented) + keys = 1 +} + +// @public +export class InetAddress { + constructor(buffer: Buffer); + equals(other: InetAddress): boolean; + static fromString(value: string): InetAddress; + getBuffer(): Buffer; + // @internal + inspect(): string; + // (undocumented) + length: number; + toJSON(): string; + toString(encoding?: string): string; + // (undocumented) + version: number; +} + +// Warning: (ae-internal-missing-underscore) The name "init" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export function init(options: ClientOptions): Promise; + +// @public (undocumented) +export type InsertDocInfo = { + fields?: string[]; + ttl?: number; + ifNotExists?: boolean; +}; + +// @public +export class Integer { + constructor(bits: number[], sign: number); + abs(): Integer; + add(other: Integer): Integer; + and(other: Integer): Integer; + compare(other: Integer): number; + divide(other: Integer): Integer; + // (undocumented) + equals(other: Integer): boolean; + static fromBits(bits: number[]): Integer; + static fromBuffer(buf: Buffer): Integer; + static fromInt(value: number): Integer; + static fromNumber(value: number): Integer; + static fromString(str: string, opt_radix?: number): Integer; + getBits(index: number): number; + getBitsUnsigned(index: number): number; + getSign(): number; + // (undocumented) + greaterThan(other: Integer): boolean; + // (undocumented) + greaterThanOrEqual(other: Integer): boolean; + // @internal + inspect(): string; + isNegative(): boolean; + isOdd(): boolean; + isZero(): boolean; + // (undocumented) + lessThan(other: Integer): boolean; + // (undocumented) + lessThanOrEqual(other: Integer): boolean; + modulo(other: Integer): Integer; + multiply(other: Integer): Integer; + negate(): Integer; + not(): Integer; + // (undocumented) + notEquals(other: Integer): boolean; + static ONE: Integer; + or(other: Integer): Integer; + shiftLeft(numBits: number): Integer; + shiftRight(numBits: number): Integer; + shorten(numBits: number): Integer; + subtract(other: Integer): Integer; + static toBuffer(value: Integer): Buffer; + toInt(): number; + toJSON(): string; + toNumber(): number; + // @override (undocumented) + toString(opt_radix?: number): string; + xor(other: Integer): Integer; + static ZERO: Integer; +} + +// @public +export class LineString extends Geometry { + constructor(...points: Point[] | Point[][]); + equals(other: LineString): boolean; + static fromBuffer(buffer: Buffer): LineString; + static fromString(textValue: string): LineString; + // @internal + static parseSegments(textValue: string): Point[]; + // @internal (undocumented) + points: ReadonlyArray; + toBuffer(): Buffer; + toJSON(): object; + toString(): string; + // @internal + useBESerialization(): boolean; +} + +// @public (undocumented) +export const loadBalancing: { + AllowListPolicy: typeof AllowListPolicy; + DCAwareRoundRobinPolicy: typeof DCAwareRoundRobinPolicy; + DefaultLoadBalancingPolicy: typeof DefaultLoadBalancingPolicy; + LoadBalancingPolicy: typeof LoadBalancingPolicy; + RoundRobinPolicy: typeof RoundRobinPolicy; + TokenAwarePolicy: typeof TokenAwarePolicy; + WhiteListPolicy: typeof WhiteListPolicy; +}; + +// @public +export class LoadBalancingPolicy { + // (undocumented) + protected client: Client; + getDistance(host: Host): distance; + getOptions(): Map; + // (undocumented) + protected hosts: HostMap; + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + // @internal (undocumented) + localDc: string; + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; +} + +// @public +export class LocalDate { + constructor(year: number, month?: number, day?: number); + compare(other: LocalDate): number; + date: Date; + // (undocumented) + day: number; + equals(other: LocalDate): boolean; + static fromBuffer(buffer: Buffer): LocalDate; + static fromDate(date: Date): LocalDate; + static fromString(value: string): LocalDate; + // (undocumented) + inspect(): string; + // (undocumented) + month: number; + static now(): LocalDate; + toBuffer(): Buffer; + toJSON(): string; + toString(): string; + static utcNow(): LocalDate; + // (undocumented) + year: number; +} + +// @public +export class LocalTime { + constructor(totalNanoseconds: Long__default); + compare(other: LocalTime): number; + equals(other: LocalTime): boolean; + static fromBuffer(value: Buffer): LocalTime; + static fromDate(date: Date, nanoseconds?: number): LocalTime; + static fromMilliseconds(milliseconds: number, nanoseconds?: number): LocalTime; + static fromString(value: string): LocalTime; + getTotalNanoseconds(): Long__default; + hour: number; + // (undocumented) + inspect(): string; + minute: number; + nanosecond: number; + static now(nanoseconds?: number): LocalTime; + second: number; + toBuffer(): Buffer; + toJSON(): string; + toString(): string; +} + +export { Long } + +// @public +export class Mapper { + constructor(client: Client, options?: MappingOptions); + batch(items: Array, executionOptions: string | MappingExecutionOptions): Promise; + forModel(name: string): ModelMapper; +} + +// @public (undocumented) +export const mapping: { + Mapper: typeof Mapper; + ModelMapper: typeof ModelMapper; + ModelBatchMapper: typeof ModelBatchMapper; + ModelBatchItem: typeof ModelBatchItem; + Result: typeof Result; + TableMappings: typeof TableMappings; + DefaultTableMappings: typeof DefaultTableMappings; + UnderscoreCqlToCamelCaseMappings: typeof UnderscoreCqlToCamelCaseMappings; + q: { + in_: (arr: any) => QueryOperator; + gt: (value: any) => QueryOperator; + gte: (value: any) => QueryOperator; + lt: (value: any) => QueryOperator; + lte: (value: any) => QueryOperator; + notEq: (value: any) => QueryOperator; + and: (condition1: any, condition2: any) => QueryOperator; + incr: (value: any) => QueryAssignment; + decr: (value: any) => QueryAssignment; + append: (value: any) => QueryAssignment; + prepend: (value: any) => QueryAssignment; + remove: (value: any) => QueryAssignment; + }; +}; + +// @public (undocumented) +export type MappingExecutionOptions = { + executionProfile?: string; + isIdempotent?: boolean; + logged?: boolean; + timestamp?: number | Long__default; + fetchSize?: number; + pageState?: number; +}; + +// @public (undocumented) +export type MappingOptions = { + models: { + [key: string]: ModelOptions; + }; +}; + +// @public +export class MaterializedView extends DataCollection { + // @internal + constructor(name: string); + includeAllColumns: boolean; + tableName: string; + whereClause: string; +} + +// @public +export class Metadata { + // @internal + constructor(options: ClientOptions, controlConnection: ControlConnection); + // @internal + adaptUserHints(keyspace: string, hints: Array): Promise; + // @internal + buildTokens(hosts: HostMap): void; + // @internal + checkSchemaAgreement(callback: Function): Promise; + clearPrepared(): void; + // @internal + compareSchemaVersions(connection: Connection): Promise; + // @internal (undocumented) + datacenters: { + [datacenter: string]: { + hostLength: number; + racks: HashSet; + }; + }; + getAggregate(keyspaceName: string, name: string, signature: string[] | Array, callback: ValueCallback): void; + // (undocumented) + getAggregate(keyspaceName: string, name: string, signature: string[] | Array): Promise; + getAggregates(keyspaceName: string, name: string, callback: ValueCallback): void; + // (undocumented) + getAggregates(keyspaceName: string, name: string): Promise; + // @internal (undocumented) + getAllPrepared(): PreparedQueryInfo[]; + getFunction(keyspaceName: string, name: string, signature: string[] | Array, callback: ValueCallback): void; + // (undocumented) + getFunction(keyspaceName: string, name: string, signature: string[] | Array): Promise; + getFunctions(keyspaceName: string, name: string, callback: ValueCallback): void; + // (undocumented) + getFunctions(keyspaceName: string, name: string): Promise; + getMaterializedView(keyspaceName: string, name: string, callback: ValueCallback): void; + // (undocumented) + getMaterializedView(keyspaceName: string, name: string): Promise; + // @internal (undocumented) + getPreparedById(id: any): any; + // @internal + getPreparedInfo(keyspaceName: string, query: string): PreparedQueryInfo; + getReplicas(keyspaceName: string, token: Buffer | Token | TokenRange): Array; + getTable(keyspaceName: string, name: string, callback: ValueCallback): void; + // (undocumented) + getTable(keyspaceName: string, name: string): Promise; + getTokenRanges(): Set; + getTokenRangesForHost(keyspaceName: string, host: Host): Set | null; + getTrace(traceId: Uuid, consistency: consistencies, callback: ValueCallback): void; + // (undocumented) + getTrace(traceId: Uuid, consistency: consistencies): Promise; + // (undocumented) + getTrace(traceId: Uuid, callback: ValueCallback): void; + // (undocumented) + getTrace(traceId: Uuid): Promise; + getUdt(keyspaceName: string, name: string, callback: ValueCallback): void; + // (undocumented) + getUdt(keyspaceName: string, name: string): Promise; + // @internal (undocumented) + initialized: boolean; + isDbaas(): boolean; + // (undocumented) + keyspaces: { + [name: string]: Keyspace; + }; + // @internal (undocumented) + log: (type: string, info: string, furtherInfo?: any, options?: any) => void; + newToken(components: Array | Buffer | string): Token; + newTokenRange(start: Token, end: Token): TokenRange; + // @internal (undocumented) + primaryReplicas: Record; + refreshKeyspace(name: string, callback: EmptyCallback): void; + // (undocumented) + refreshKeyspace(name: string): Promise; + refreshKeyspaces(waitReconnect: boolean, callback: EmptyCallback): void; + // (undocumented) + refreshKeyspaces(waitReconnect?: boolean): Promise; + // (undocumented) + refreshKeyspaces(callback: EmptyCallback): void; + // @internal (undocumented) + refreshKeyspacesInternal(waitReconnect: boolean): Promise<{ + [s: string]: object; + }>; + // @internal (undocumented) + ring: any[]; + // @internal (undocumented) + ringTokensAsStrings: any[]; + // @internal + setCassandraVersion(version: Array): void; + // Warning: (ae-forgotten-export) The symbol "Murmur3Tokenizer" needs to be exported by the entry point cassandra-rollup.d.ts + // Warning: (ae-forgotten-export) The symbol "RandomTokenizer" needs to be exported by the entry point cassandra-rollup.d.ts + // Warning: (ae-forgotten-export) The symbol "ByteOrderedTokenizer" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal (undocumented) + setPartitioner(partitionerName: string): Murmur3Tokenizer | RandomTokenizer | ByteOrderedTokenizer; + // @internal (undocumented) + setPreparedById(info: any): void; + // @internal + setProductTypeAsDbaas(): void; + // Warning: (ae-forgotten-export) The symbol "Tokenizer" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal (undocumented) + tokenizer: Tokenizer; + // @internal (undocumented) + tokenRanges: Set; +} + +// @public (undocumented) +export const metadata: { + Metadata: typeof Metadata; +}; + +// @public (undocumented) +export const metrics: { + ClientMetrics: typeof ClientMetrics; + DefaultMetrics: typeof DefaultMetrics; +}; + +// @public +export class ModelBatchItem { + // @internal + constructor(doc: object, docInfo: DocInfo, handler: MappingHandler, cache: Tree); + // Warning: (ae-forgotten-export) The symbol "Tree" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal (undocumented) + cache: Tree; + // @internal + createQueries(docKeys: Array): Promise>; + // @internal (undocumented) + doc: object; + // Warning: (ae-forgotten-export) The symbol "DocInfo" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal (undocumented) + docInfo: DocInfo; + // @internal + getCacheKey(docKeys: Array): Iterator; + // Warning: (ae-forgotten-export) The symbol "ModelMappingInfo" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal + getMappingInfo(): ModelMappingInfo; + // @internal (undocumented) + getQueries(): any; + // Warning: (ae-forgotten-export) The symbol "MappingHandler" needs to be exported by the entry point cassandra-rollup.d.ts + // + // @internal (undocumented) + handler: MappingHandler; + // @internal + pushQueries(arr: Array): Promise<{ + isIdempotent: any; + isCounter: any; + }>; +} + +// @public +export class ModelBatchMapper { + // @internal + constructor(handler: MappingHandler); + insert(doc: object, docInfo: InsertDocInfo): ModelBatchItem; + remove(doc: object, docInfo: RemoveDocInfo): ModelBatchItem; + update(doc: object, docInfo: UpdateDocInfo): ModelBatchItem; +} + +// @public (undocumented) +export type ModelColumnOptions = { + name: string; + toModel?: (columnValue: any) => any; + fromModel?: (modelValue: any) => any; +}; + +// @public +export class ModelMapper { + // @internal + constructor(name: any, handler: any); + batching: ModelBatchMapper; + find(doc: { + [key: string]: any; + }, docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + findAll(docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + get(doc: { + [key: string]: any; + }, docInfo?: { + fields?: string[]; + }, executionOptions?: string | MappingExecutionOptions): Promise; + insert(doc: { + [key: string]: any; + }, docInfo?: InsertDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + mapWithQuery(query: string, paramsHandler: (doc: any) => any[], executionOptions?: string | MappingExecutionOptions): (doc: any, executionOptions?: string | MappingExecutionOptions) => Promise>; + name: string; + remove(doc: { + [key: string]: any; + }, docInfo?: RemoveDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; + update(doc: { + [key: string]: any; + }, docInfo?: UpdateDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; +} + +// @public (undocumented) +export type ModelOptions = { + tables?: string[] | ModelTables[]; + mappings?: TableMappings; + columns?: { + [key: string]: string | ModelColumnOptions; + }; + keyspace?: string; +}; + +// @public (undocumented) +export interface ModelTables { + // (undocumented) + isView: boolean; + // (undocumented) + name: string; +} + +// @public +export class MonotonicTimestampGenerator extends TimestampGenerator { + constructor(warningThreshold?: number, minLogInterval?: number); + getDate(): number; + // (undocumented) + next(client: Client): Long__default | number | null; +} + +// Warning: (ae-internal-missing-underscore) The name "NoAuthProvider" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export class NoAuthProvider extends AuthProvider { + // Warning: (ae-forgotten-export) The symbol "TransitionalModePlainTextAuthenticator" needs to be exported by the entry point cassandra-rollup.d.ts + // Warning: (ae-forgotten-export) The symbol "NoAuthAuthenticator" needs to be exported by the entry point cassandra-rollup.d.ts + // + // (undocumented) + newAuthenticator(endpoint: any, name: any): TransitionalModePlainTextAuthenticator | NoAuthAuthenticator; +} + +// @public +export class NoHostAvailableError extends DriverError { + constructor(innerErrors: object, message?: string); + // (undocumented) + innerErrors: object; +} + +// @public +export class NoSpeculativeExecutionPolicy extends SpeculativeExecutionPolicy { + constructor(); + // (undocumented) + newPlan(keyspace: string, queryInfo: string | Array): { + nextExecution: () => number; + }; +} + +// @public +export class NotSupportedError extends DriverError { + constructor(message: string); +} + +// Warning: (ae-internal-missing-underscore) The name "opcodes" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export const opcodes: { + error: number; + startup: number; + ready: number; + authenticate: number; + credentials: number; + options: number; + supported: number; + query: number; + result: number; + prepare: number; + execute: number; + register: number; + event: number; + batch: number; + authChallenge: number; + authResponse: number; + authSuccess: number; + cancel: number; + isInRange: (code: any) => boolean; +}; + +// @public +export class OperationTimedOutError extends DriverError { + constructor(message: string, host?: string); + // (undocumented) + host?: string; +} + +// @public (undocumented) +export type Options = { + collectResults?: boolean; + concurrencyLevel?: number; + executionProfile?: string; + maxErrors?: number; + raiseOnFirstError?: boolean; +}; + +// @public +export class Path { + constructor(labels: any[], objects: any[]); + // (undocumented) + labels: any[]; + // (undocumented) + objects: any[]; +} + +// @public +export class PlainTextAuthProvider extends AuthProvider { + constructor(username: string, password: string); + // @override + newAuthenticator(): Authenticator; +} + +// @public +export class Point extends Geometry { + constructor(x: number, y: number); + equals(other: Point): boolean; + static fromBuffer(buffer: Buffer): Point; + static fromString(textValue: string): Point; + toBuffer(): Buffer; + toJSON(): object; + toString(): string; + // @internal (undocumented) + useBESerialization(): boolean; + // @internal (undocumented) + x: number; + // @internal (undocumented) + y: number; +} + +// @public (undocumented) +export const policies: { + addressResolution: { + AddressTranslator: typeof AddressTranslator; + EC2MultiRegionTranslator: typeof EC2MultiRegionTranslator; + }; + loadBalancing: { + AllowListPolicy: typeof AllowListPolicy; + DCAwareRoundRobinPolicy: typeof DCAwareRoundRobinPolicy; + DefaultLoadBalancingPolicy: typeof DefaultLoadBalancingPolicy; + LoadBalancingPolicy: typeof LoadBalancingPolicy; + RoundRobinPolicy: typeof RoundRobinPolicy; + TokenAwarePolicy: typeof TokenAwarePolicy; + WhiteListPolicy: typeof WhiteListPolicy; + }; + reconnection: { + ReconnectionPolicy: typeof ReconnectionPolicy; + ConstantReconnectionPolicy: typeof ConstantReconnectionPolicy; + ExponentialReconnectionPolicy: typeof ExponentialReconnectionPolicy; + }; + retry: { + IdempotenceAwareRetryPolicy: typeof IdempotenceAwareRetryPolicy; + FallthroughRetryPolicy: typeof FallthroughRetryPolicy; + RetryPolicy: typeof RetryPolicy; + }; + speculativeExecution: { + NoSpeculativeExecutionPolicy: typeof NoSpeculativeExecutionPolicy; + SpeculativeExecutionPolicy: typeof SpeculativeExecutionPolicy; + ConstantSpeculativeExecutionPolicy: typeof ConstantSpeculativeExecutionPolicy; + }; + timestampGeneration: { + TimestampGenerator: typeof TimestampGenerator; + MonotonicTimestampGenerator: typeof MonotonicTimestampGenerator; + }; + defaultAddressTranslator: () => AddressTranslator; + defaultLoadBalancingPolicy: (localDc?: string) => LoadBalancingPolicy; + defaultRetryPolicy: () => RetryPolicy; + defaultReconnectionPolicy: () => ReconnectionPolicy; + defaultSpeculativeExecutionPolicy: () => SpeculativeExecutionPolicy; + defaultTimestampGenerator: () => TimestampGenerator; +}; + +// @public +export class Polygon extends Geometry { + constructor(...ringPoints: Point[][]); + equals(other: Polygon): boolean; + static fromBuffer(buffer: Buffer): Polygon; + static fromString(textValue: string): Polygon; + // @internal (undocumented) + rings: ReadonlyArray>; + toBuffer(): Buffer; + toJSON(): object; + toString(): string; + // @internal (undocumented) + useBESerialization(): boolean; +} + +// @public (undocumented) +export type PreparedQueryInfo = { + queryId?: Buffer; + preparing?: boolean; + query: string; + keyspace: string; + meta?: DataCollection; +} & EventEmitter_2; + +// @public +export class Property { + constructor(key: string, value: any); + // (undocumented) + key: string; + // (undocumented) + value: any; +} + +// Warning: (ae-internal-missing-underscore) The name "protocolEvents" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export const protocolEvents: { + topologyChange: string; + statusChange: string; + schemaChange: string; +}; + +// @public +export enum protocolVersion { + // (undocumented) + dseV1 = 65, + // (undocumented) + dseV2 = 66, + // (undocumented) + maxSupported = 66, + // (undocumented) + minSupported = 1, + // (undocumented) + v1 = 1, + // (undocumented) + v2 = 2, + // (undocumented) + v3 = 3, + // (undocumented) + v4 = 4, + // (undocumented) + v5 = 5, + // (undocumented) + v6 = 6 +} + +// @public (undocumented) +export namespace protocolVersion { + // @internal + export function canStartupResponseErrorBeWrapped(version: number): boolean; + // @internal + export function getHighestCommon(connection: Connection, hosts: HostMap): number; + // @internal + export function getLowerSupported(version: number): number; + // @internal + export function isBeta(version: number): boolean; + // @internal + export function isDse(version: number): boolean; + export function isSupported(version: number): boolean; + // @internal + export function isSupportedCassandra(version: number): boolean; + // @internal + export function supportsContinuousPaging(version: any): boolean; + // @internal + export function supportsFailureReasonMap(version: any): boolean; + // @internal + export function supportsKeyspaceInRequest(version: number): boolean; + // @internal + export function supportsNamedParameters(version: number): boolean; + // @internal + export function supportsPaging(version: any): boolean; + // @internal + export function supportsPreparedPartitionKey(version: number): boolean; + // @internal + export function supportsPrepareFlags(version: number): boolean; + // @internal + export function supportsResultMetadataId(version: number): boolean; + // @internal + export function supportsSchemaChangeFullMetadata(version: any): boolean; + // @internal + export function supportsTimestamp(version: number): boolean; + // @internal + export function supportsUnset(version: number): boolean; + // @internal + export function uses2BytesStreamIds(version: number): boolean; + // @internal + export function uses4BytesCollectionLength(version: number): boolean; + // @internal + export function uses4BytesQueryFlags(version: number): boolean; +} + +// @public +export const q: { + in_: (arr: any) => QueryOperator; + gt: (value: any) => QueryOperator; + gte: (value: any) => QueryOperator; + lt: (value: any) => QueryOperator; + lte: (value: any) => QueryOperator; + notEq: (value: any) => QueryOperator; + and: (condition1: any, condition2: any) => QueryOperator; + incr: (value: any) => QueryAssignment; + decr: (value: any) => QueryAssignment; + append: (value: any) => QueryAssignment; + prepend: (value: any) => QueryAssignment; + remove: (value: any) => QueryAssignment; +}; + +// @public +export interface QueryOptions { + // (undocumented) + autoPage?: boolean; + // (undocumented) + captureStackTrace?: boolean; + // (undocumented) + consistency?: consistencies; + // (undocumented) + counter?: boolean; + // (undocumented) + customPayload?: object; + // (undocumented) + executeAs?: string; + // (undocumented) + executionProfile?: string | ExecutionProfile; + // (undocumented) + fetchSize?: number; + // (undocumented) + graphOptions?: { + language?: string; + name?: string; + readConsistency?: number; + readTimeout?: number; + source?: string; + writeConsistency?: number; + }; + // (undocumented) + hints?: Array | Array>; + // (undocumented) + host?: Host; + // (undocumented) + isIdempotent?: boolean; + // (undocumented) + keyspace?: string; + // (undocumented) + logged?: boolean; + // (undocumented) + pageState?: Buffer | string; + // (undocumented) + prepare?: boolean; + // (undocumented) + readTimeout?: number; + // (undocumented) + retry?: RetryPolicy; + // (undocumented) + routingIndexes?: number[]; + // (undocumented) + routingKey?: Buffer | Buffer[]; + // (undocumented) + routingNames?: string[]; + // (undocumented) + serialConsistency?: number; + // (undocumented) + timestamp?: number | Long__default; + // (undocumented) + traceQuery?: boolean; +} + +// @public (undocumented) +export interface QueryTrace { + // (undocumented) + clientAddress: string; + // (undocumented) + coordinator: InetAddress; + // (undocumented) + duration: number; + // (undocumented) + events: Array<{ + id: Uuid; + activity: any; + source: any; + elapsed: any; + thread: any; + }>; + // (undocumented) + parameters: { + [key: string]: any; + }; + // (undocumented) + requestType: string; + // (undocumented) + startedAt: number | Long__default; +} + +// @public (undocumented) +export const reconnection: { + ReconnectionPolicy: typeof ReconnectionPolicy; + ConstantReconnectionPolicy: typeof ConstantReconnectionPolicy; + ExponentialReconnectionPolicy: typeof ExponentialReconnectionPolicy; +}; + +// @public +export class ReconnectionPolicy { + constructor(); + getOptions(): Map; + newSchedule(): Iterator; +} + +// @public (undocumented) +export type RemoveDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { + [key: string]: any; + }; + deleteOnlyColumns?: boolean; +}; + +// @public +export class RequestLogger extends RequestTracker { + constructor(options: { + slowThreshold?: number; + requestSizeThreshold?: number; + logNormalRequests?: boolean; + logErroredRequests?: boolean; + messageMaxQueryLength?: number; + messageMaxParameterValueLength?: number; + messageMaxErrorStackTraceLength?: number; + }); + // @override + onError(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [p: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, err: Error, latency: number[]): void; + // @override + onSuccess(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [p: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, responseLength: number, latency: number[]): void; +} + +// @public +export class RequestTracker { + onError?(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [key: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, err: Error, latency: number[]): void; + onSuccess?(host: Host, query: string | Array<{ + query: string; + params?: any; + }>, parameters: any[] | { + [key: string]: any; + } | null, executionOptions: ExecutionOptions, requestLength: number, responseLength: number, latency: number[]): void; + shutdown?(): void; +} + +// @public +export class ResponseError extends DriverError { + constructor(code: number, message: string); + // (undocumented) + alive?: number; + // (undocumented) + argTypes?: string[]; + // (undocumented) + blockFor?: number; + // (undocumented) + code: number; + // (undocumented) + consistencies?: consistencies; + // (undocumented) + failures?: number; + // (undocumented) + functionName?: string; + // (undocumented) + isDataPresent?: any; + // (undocumented) + keyspace?: string; + // (undocumented) + queryId?: any; + // (undocumented) + reasons?: object; + // (undocumented) + received?: number; + // (undocumented) + required?: number; + // (undocumented) + table?: string; + // (undocumented) + writeType?: any; +} + +// @public +export enum responseErrorCodes { + // (undocumented) + alreadyExists = 9216, + // (undocumented) + badCredentials = 256, + // (undocumented) + clientWriteFailure = 32768, + // (undocumented) + configError = 8960, + // (undocumented) + functionFailure = 5120, + // (undocumented) + invalid = 8704, + // (undocumented) + isBootstrapping = 4098, + // (undocumented) + overloaded = 4097, + // (undocumented) + protocolError = 10, + // (undocumented) + readFailure = 4864, + // (undocumented) + readTimeout = 4608, + // (undocumented) + serverError = 0, + // (undocumented) + syntaxError = 8192, + // (undocumented) + truncateError = 4099, + // (undocumented) + unauthorized = 8448, + // (undocumented) + unavailableException = 4096, + // (undocumented) + unprepared = 9472, + // (undocumented) + writeFailure = 5376, + // (undocumented) + writeTimeout = 4352 +} + +// @public +export class Result implements IterableIterator { + // (undocumented) + [inspectMethod](): T[]; + [Symbol.iterator](): IterableIterator; + // @internal + constructor(rs: ResultSet, info: ModelMappingInfo, rowAdapter: Function); + first(): T | null; + forEach(callback: (currentValue: T, index: number) => void, thisArg: any): void; + // @internal (undocumented) + length: number; + // (undocumented) + next(): { + done: boolean; + value: T; + }; + // @internal (undocumented) + pageState: string; + toArray(): T[]; + wasApplied(): boolean; +} + +// Warning: (ae-internal-missing-underscore) The name "resultKind" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export const resultKind: { + voidResult: number; + rows: number; + setKeyspace: number; + prepared: number; + schemaChange: number; +}; + +// @public +export class ResultSet implements Iterable, AsyncIterable { + [Symbol.asyncIterator](): AsyncIterator; + [Symbol.iterator](): Iterator; + // @internal + constructor(response: { + rows: Array; + flags: { + traceId: Uuid; + warnings: string[]; + customPayload: any; + }; + meta?: { + columns: Array<{ + name: string; + type: DataTypeInfo; + }>; + pageState: Buffer; + }; + }, host: string, triedHosts: { + [key: string]: any; + }, speculativeExecutions: number, consistency: consistencies, isSchemaInAgreement: boolean); + // (undocumented) + columns: Array<{ + name: string; + type: DataTypeInfo; + }>; + first(): Row; + // (undocumented) + getColumns(): Array<{ + name: string; + type: DataTypeInfo; + }>; + // (undocumented) + getPageState(): string; + // (undocumented) + info: { + queriedHost: string; + triedHosts: { + [key: string]: any; + }; + speculativeExecutions: number; + achievedConsistency: consistencies; + traceId: Uuid; + warnings: string[]; + customPayload: any; + isSchemaInAgreement: boolean; + }; + isPaged(): boolean; + // (undocumented) + nextPage: (() => void) | null; + // @internal (undocumented) + nextPageAsync: Function | undefined; + // (undocumented) + pageState: string; + // @internal (undocumented) + rawPageState: any; + // (undocumented) + rowLength: number; + // (undocumented) + rows: Row[]; + wasApplied(): boolean; +} + +// @public +export class ResultSetGroup { + // @internal + constructor(options: any); + // (undocumented) + errors: Error[]; + // (undocumented) + resultItems: any[]; + // @internal + setError(index: any, err: any): void; + // @internal (undocumented) + setResultItem(index: any, rs: any): void; + // (undocumented) + totalExecuted: number; +} + +// @public +export class ResultStream extends Readable { + // @internal + constructor(opt: any); + // (undocumented) + add(chunk: any): number; + // (undocumented) + buffer: any[]; + // @internal + cancel(callback: Function): any; + // (undocumented) + paused: boolean; + // @internal (undocumented) + _read(): void; + // @internal + setHandlers(options: any): void; + // @internal + _valve(readNext: Function): void; +} + +// @public (undocumented) +export const retry: { + IdempotenceAwareRetryPolicy: typeof IdempotenceAwareRetryPolicy; + FallthroughRetryPolicy: typeof FallthroughRetryPolicy; + RetryPolicy: typeof RetryPolicy; +}; + +// @public +export class RetryPolicy { + onReadTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, isDataPresent: boolean): DecisionInfo; + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo; + onUnavailable(info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo; + onWriteTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, writeType: string): DecisionInfo; + rethrowResult(): DecisionInfo; + retryResult(consistency?: consistencies, useCurrentHost?: boolean): DecisionInfo; +} + +// @public +export namespace RetryPolicy { + // (undocumented) + export enum retryDecision { + // (undocumented) + ignore = 2, + // (undocumented) + rethrow = 0, + // (undocumented) + retry = 1 + } +} + +// @public +export class RoundRobinPolicy extends LoadBalancingPolicy { + constructor(); + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; +} + +// @public +export class Row { + // @internal + constructor(columns: Array); + // (undocumented) + [key: string]: any; + forEach(callback: (val: any, key: string) => void): void; + get(columnName: string | number): any; + keys(): string[]; + values(): Array; +} + +// @public +export class SchemaFunction { + // @internal + constructor(); + argumentNames: Array; + argumentTypes: Array; + body: string; + calledOnNullInput: boolean; + deterministic: boolean; + keyspaceName: string; + language: string; + monotonic: boolean; + monotonicOn: Array; + name: string; + returnType: DataTypeInfo; + signature: Array; +} + +// @public +export const search: { + DateRange: typeof DateRange; + DateRangeBound: typeof DateRangeBound; + dateRangePrecision: { + readonly year: 0; + readonly month: 1; + readonly day: 2; + readonly hour: 3; + readonly minute: 4; + readonly second: 5; + readonly millisecond: 6; + }; +}; + +// @public (undocumented) +export const speculativeExecution: { + NoSpeculativeExecutionPolicy: typeof NoSpeculativeExecutionPolicy; + SpeculativeExecutionPolicy: typeof SpeculativeExecutionPolicy; + ConstantSpeculativeExecutionPolicy: typeof ConstantSpeculativeExecutionPolicy; +}; + +// @public +export class SpeculativeExecutionPolicy { + constructor(); + getOptions(): Map; + init(client: Client): void; + newPlan(keyspace: string, queryInfo: string | Array): { + nextExecution: () => number; + }; + shutdown(): void; +} + +// @public +export const t: { + id: EnumValue; + key: EnumValue; + label: EnumValue; + value: EnumValue; +}; + +// @public +export class TableMappings { + getColumnName(propName: string): string; + getPropertyName(columnName: string): string; + newObjectInstance(): object; +} + +// @public +export class TableMetadata extends DataCollection { + // @internal + constructor(name: string); + cdc?: boolean; + indexes: Array; + indexInterval?: number; + isCompact: boolean; + memtableFlushPeriod: number; + replicateOnWrite: boolean; + virtual: boolean; +} + +// @public (undocumented) +export class TimeoutError extends errors.DriverError { + constructor(message: string); +} + +// @public (undocumented) +export const timestampGeneration: { + TimestampGenerator: typeof TimestampGenerator; + MonotonicTimestampGenerator: typeof MonotonicTimestampGenerator; +}; + +// @public +export class TimestampGenerator { + constructor(); + next(client: Client): Long__default | number | null; +} + +// @public +export class TimeUuid extends Uuid { + constructor(value: Date | Buffer, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer); + static fromDate(date: Date, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer): TimeUuid; + // (undocumented) + static fromDate(date: Date, ticks: number, nodeId: string | Buffer, clockId: string | Buffer, callback: ValueCallback): void; + static fromString(value: string): TimeUuid; + getClockId(): Buffer; + getDate(): Date; + getDatePrecision(): { + date: Date; + ticks: number; + }; + getNodeId(): Buffer; + getNodeIdString(): string; + static max(date: Date, ticks?: number): TimeUuid; + static min(date: Date, ticks?: number): TimeUuid; + static now(): TimeUuid; + // (undocumented) + static now(nodeId: string | Buffer, clockId?: string | Buffer): TimeUuid; + // (undocumented) + static now(nodeId: string | Buffer, clockId: string | Buffer, callback: ValueCallback): void; + // (undocumented) + static now(callback: ValueCallback): void; +} + +// @public @deprecated +export function timeuuid(options: { + msecs: any; + node: any; + clockseq: any; + nsecs: any; +}, buffer: Buffer, offset: number): string | Buffer; + +// @public (undocumented) +export const token: { + Token: typeof Token; + TokenRange: typeof TokenRange; +}; + +// @public +export class TokenAwarePolicy extends LoadBalancingPolicy { + constructor(childPolicy: LoadBalancingPolicy); + // (undocumented) + getDistance(host: Host): distance; + // (undocumented) + getOptions(): Map; + // (undocumented) + init(client: Client, hosts: HostMap, callback: EmptyCallback): void; + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void; +} + +// @public +export const tracker: { + RequestTracker: typeof RequestTracker; + RequestLogger: typeof RequestLogger; +}; + +// @public +export class Tuple { + constructor(...args: any[]); + // (undocumented) + elements: any[]; + static fromArray(elements: any[]): Tuple; + get(index: number): any; + // (undocumented) + length: number; + toJSON(): any[]; + toString(): string; + values(): any[]; +} + +// @public (undocumented) +export const types: { + opcodes: { + error: number; + startup: number; + ready: number; + authenticate: number; + credentials: number; + options: number; + supported: number; + query: number; + result: number; + prepare: number; + execute: number; + register: number; + event: number; + batch: number; + authChallenge: number; + authResponse: number; + authSuccess: number; + cancel: number; + isInRange: (code: any) => boolean; + }; + consistencies: typeof consistencies; + consistencyToString: {}; + dataTypes: typeof dataTypes; + getDataTypeNameByCode: typeof getDataTypeNameByCode; + distance: typeof distance; + frameFlags: { + compression: number; + tracing: number; + customPayload: number; + warning: number; + }; + protocolEvents: { + topologyChange: string; + statusChange: string; + schemaChange: string; + }; + protocolVersion: typeof protocolVersion; + responseErrorCodes: typeof responseErrorCodes; + resultKind: { + voidResult: number; + rows: number; + setKeyspace: number; + prepared: number; + schemaChange: number; + }; + timeuuid: typeof timeuuid; + uuid: typeof uuid; + BigDecimal: typeof BigDecimal; + Duration: typeof Duration; + FrameHeader: typeof FrameHeader; + InetAddress: typeof InetAddress; + Integer: typeof Integer; + LocalDate: typeof LocalDate; + LocalTime: typeof LocalTime; + Long: typeof Long__default; + ResultSet: typeof ResultSet; + ResultStream: typeof ResultStream; + Row: typeof Row; + DriverError: typeof DriverError; + TimeoutError: typeof TimeoutError; + TimeUuid: typeof TimeUuid; + Tuple: typeof Tuple; + Uuid: typeof Uuid; + unset: Readonly<{ + readonly unset: true; + }>; + generateTimestamp: typeof generateTimestamp; + Vector: typeof Vector; +}; + +// @public (undocumented) +export interface Udt { + // (undocumented) + fields: ColumnInfo[]; + // (undocumented) + name: string; +} + +// Warning: (ae-internal-missing-underscore) The name "UdtGraphWrapper" should be prefixed with an underscore because the declaration is marked as @internal +// +// @internal +export class UdtGraphWrapper { + constructor(value: any, udtInfo: any); + // (undocumented) + udtInfo: any; + // (undocumented) + value: any; +} + +// @public +export class UnderscoreCqlToCamelCaseMappings extends TableMappings { + constructor(); + getColumnName(propName: string): string; + getPropertyName(columnName: string): string; +} + +// @public +export const unset: Readonly<{ + readonly unset: true; +}>; + +// @public (undocumented) +export type UpdateDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { + [key: string]: any; + }; + orderBy?: { + [key: string]: string; + }; + limit?: number; + deleteOnlyColumns?: boolean; +}; + +// @public +export class Uuid { + constructor(buffer: Buffer); + // @internal (undocumented) + buffer: Buffer; + equals(other: Uuid): boolean; + static fromString(value: string): Uuid; + getBuffer(): Buffer; + inspect(): string; + static random(): Uuid; + // (undocumented) + static random(callback: ValueCallback): void; + toJSON(): string; + toString(): string; +} + +// @public @deprecated +export function uuid(options: any, buffer: any, offset: any): any; + +// @public (undocumented) +export class Vector { + // (undocumented) + [Symbol.iterator](): IterableIterator; + // (undocumented) + static get [Symbol.species](): typeof Vector; + constructor(elements: Float32Array | Array, subtype?: string); + // (undocumented) + at(index: number): any; + // (undocumented) + elements: any[]; + // (undocumented) + forEach(callback: (value: any, index: number, array: any[]) => void): void; + // (undocumented) + getSubtype(): string | null; + length: number; + // (undocumented) + subtype: string; + toString(): string; +} + +// @public (undocumented) +export const version: string; + +// @public +export class Vertex extends Element { + constructor(id: any, label: string, properties?: { + [key: string]: any[]; + }); + // (undocumented) + properties: { + [key: string]: any[]; + }; +} + +// @public +export class VertexProperty extends Element { + constructor(id: any, label: string, value: any, properties: object); + // (undocumented) + key: string; + // (undocumented) + properties: any; + // (undocumented) + value: any; +} + +// @public +export class VIntOutOfRangeException extends DriverError { + constructor(long: Long__default); +} + +// @public @deprecated +export class WhiteListPolicy extends AllowListPolicy { + // @deprecated + constructor(childPolicy: LoadBalancingPolicy, allowList: Array); +} + +// Warnings were encountered during analysis: +// +// out/cassandra-rollup.d.ts:2970:9 - (ae-forgotten-export) The symbol "Keyspace" needs to be exported by the entry point cassandra-rollup.d.ts +// out/cassandra-rollup.d.ts:2993:13 - (ae-forgotten-export) The symbol "HashSet" needs to be exported by the entry point cassandra-rollup.d.ts +// out/cassandra-rollup.d.ts:4511:9 - (ae-forgotten-export) The symbol "AddressResolver" needs to be exported by the entry point cassandra-rollup.d.ts +// out/cassandra-rollup.d.ts:7591:5 - (ae-forgotten-export) The symbol "EnumValue" needs to be exported by the entry point cassandra-rollup.d.ts +// out/cassandra-rollup.d.ts:9425:13 - (ae-forgotten-export) The symbol "QueryOperator" needs to be exported by the entry point cassandra-rollup.d.ts +// out/cassandra-rollup.d.ts:9432:13 - (ae-forgotten-export) The symbol "QueryAssignment" needs to be exported by the entry point cassandra-rollup.d.ts +// out/cassandra-rollup.d.ts:9452:9 - (ae-forgotten-export) The symbol "Token" needs to be exported by the entry point cassandra-rollup.d.ts +// out/cassandra-rollup.d.ts:9453:9 - (ae-forgotten-export) The symbol "TokenRange" needs to be exported by the entry point cassandra-rollup.d.ts + +// (No @packageDocumentation comment for this package) + +``` diff --git a/index.d.ts b/index.d.ts deleted file mode 100644 index 90f2b6c3b..000000000 --- a/index.d.ts +++ /dev/null @@ -1,418 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as events from 'events'; -import * as tls from 'tls'; -import { URL } from 'url'; -import { auth } from './lib/auth'; -import { policies } from './lib/policies'; -import { types } from './lib/types'; -import { metrics } from './lib/metrics'; -import { tracker } from './lib/tracker'; -import { metadata } from './lib/metadata'; -import { datastax } from './lib/datastax/'; -import Long = types.Long; -import Uuid = types.Uuid; -import graph = datastax.graph; - -// Export imported submodules -export { concurrent } from './lib/concurrent'; -export { mapping } from './lib/mapping'; -export { geometry } from './lib/geometry'; -export { auth, datastax, metadata, metrics, policies, tracker, types }; - -export const version: number; - -export function defaultOptions(): ClientOptions; - -export type ValueCallback = (err: Error, val: T) => void; -export type EmptyCallback = (err: Error) => void; -export type ArrayOrObject = any[]|{[key: string]: any}; - -export class Client extends events.EventEmitter { - hosts: HostMap; - keyspace: string; - metadata: metadata.Metadata; - metrics: metrics.ClientMetrics; - - constructor(options: DseClientOptions); - - connect(): Promise; - - connect(callback: EmptyCallback): void; - - execute(query: string, params?: ArrayOrObject, options?: QueryOptions): Promise; - - execute(query: string, params: ArrayOrObject, options: QueryOptions, callback: ValueCallback): void; - - execute(query: string, params: ArrayOrObject, callback: ValueCallback): void; - - execute(query: string, callback: ValueCallback): void; - - executeGraph( - traversal: string, - parameters: { [name: string]: any } | undefined, - options: GraphQueryOptions, - callback: ValueCallback): void; - - executeGraph( - traversal: string, - parameters: { [name: string]: any } | undefined, - callback: ValueCallback): void; - - executeGraph(traversal: string, callback: ValueCallback): void; - - executeGraph( - traversal: string, - parameters?: { [name: string]: any }, - options?: GraphQueryOptions): Promise; - - eachRow(query: string, - params: ArrayOrObject, - options: QueryOptions, - rowCallback: (n: number, row: types.Row) => void, - callback?: ValueCallback): void; - - eachRow(query: string, - params: ArrayOrObject, - rowCallback: (n: number, row: types.Row) => void, - callback?: ValueCallback): void; - - eachRow(query: string, - rowCallback: (n: number, row: types.Row) => void): void; - - stream(query: string, params?: ArrayOrObject, options?: QueryOptions, callback?: EmptyCallback): events.EventEmitter; - - batch( - queries: Array, - options?: QueryOptions): Promise; - - batch( - queries: Array, - options: QueryOptions, - callback: ValueCallback): void; - - batch( - queries: Array, - callback: ValueCallback): void; - - shutdown(): Promise; - - shutdown(callback: EmptyCallback): void; - - getReplicas(keyspace: string, token: Buffer): Host[]; - - getState(): metadata.ClientState; -} - -export interface HostMap extends events.EventEmitter { - length: number; - - forEach(callback: (value: Host, key: string) => void): void; - - get(key: string): Host; - - keys(): string[]; - - values(): Host[]; -} - -export interface Host extends events.EventEmitter { - address: string; - cassandraVersion: string; - datacenter: string; - rack: string; - tokens: string[]; - hostId: types.Uuid; - - canBeConsideredAsUp(): boolean; - - getCassandraVersion(): number[]; - - isUp(): boolean; -} - -export interface ExecutionOptions { - getCaptureStackTrace(): boolean; - - getConsistency(): types.consistencies; - - getCustomPayload(): { [key: string]: any }; - - getFetchSize(): number; - - getFixedHost(): Host; - - getHints(): string[] | string[][]; - - isAutoPage(): boolean; - - isBatchCounter(): boolean; - - isBatchLogged(): boolean; - - isIdempotent(): boolean; - - isPrepared(): boolean; - - isQueryTracing(): boolean; - - getKeyspace(): string; - - getLoadBalancingPolicy(): policies.loadBalancing.LoadBalancingPolicy; - - getPageState(): Buffer; - - getRawQueryOptions(): QueryOptions; - - getReadTimeout(): number; - - getRetryPolicy(): policies.retry.RetryPolicy; - - getRoutingKey(): Buffer | Buffer[]; - - getSerialConsistency(): types.consistencies; - - getTimestamp(): number | Long | undefined | null; - - setHints(hints: string[]): void; -} - -export interface ClientOptions { - contactPoints?: string[]; - localDataCenter?: string; - keyspace?: string; - authProvider?: auth.AuthProvider; - credentials?: { - username: string; - password: string; - } - - cloud?: { - secureConnectBundle: string | URL; - }; - - encoding?: { - map?: Function; - set?: Function; - copyBuffer?: boolean; - useUndefinedAsUnset?: boolean; - useBigIntAsLong?: boolean; - useBigIntAsVarint?: boolean; - }; - isMetadataSyncEnabled?: boolean; - maxPrepared?: number; - metrics?: metrics.ClientMetrics; - policies?: { - addressResolution?: policies.addressResolution.AddressTranslator; - loadBalancing?: policies.loadBalancing.LoadBalancingPolicy; - reconnection?: policies.reconnection.ReconnectionPolicy; - retry?: policies.retry.RetryPolicy; - speculativeExecution?: policies.speculativeExecution.SpeculativeExecutionPolicy; - timestampGeneration?: policies.timestampGeneration.TimestampGenerator; - }; - pooling?: { - coreConnectionsPerHost?: { [key: number]: number; }; - heartBeatInterval?: number; - maxRequestsPerConnection?: number; - warmup?: boolean; - }; - prepareOnAllHosts?: boolean; - profiles?: ExecutionProfile[]; - protocolOptions?: { - maxSchemaAgreementWaitSeconds?: number; - maxVersion?: number; - noCompact?: boolean; - port?: number; - }; - promiseFactory?: (handler: (callback: (err: Error, result?: any) => void) => void) => Promise; - queryOptions?: QueryOptions; - refreshSchemaDelay?: number; - rePrepareOnUp?: boolean; - requestTracker?: tracker.RequestTracker; - socketOptions?: { - coalescingThreshold?: number; - connectTimeout?: number; - defunctReadTimeoutThreshold?: number; - keepAlive?: boolean; - keepAliveDelay?: number; - readTimeout?: number; - tcpNoDelay?: boolean; - }; - sslOptions?: tls.ConnectionOptions; -} - -export interface QueryOptions { - autoPage?: boolean; - captureStackTrace?: boolean; - consistency?: number; - counter?: boolean; - customPayload?: any; - executionProfile?: string | ExecutionProfile; - fetchSize?: number; - hints?: string[] | string[][]; - host?: Host; - isIdempotent?: boolean; - keyspace?: string; - logged?: boolean; - pageState?: Buffer | string; - prepare?: boolean; - readTimeout?: number; - retry?: policies.retry.RetryPolicy; - routingIndexes?: number[]; - routingKey?: Buffer | Buffer[]; - routingNames?: string[]; - serialConsistency?: number; - timestamp?: number | Long; - traceQuery?: boolean; -} - -export interface DseClientOptions extends ClientOptions { - id?: Uuid; - applicationName?: string; - applicationVersion?: string; - monitorReporting?: { enabled?: boolean }; - graphOptions?: GraphOptions; -} - -export interface GraphQueryOptions extends QueryOptions { - graphLanguage?: string; - graphName?: string; - graphReadConsistency?: types.consistencies; - graphSource?: string; - graphWriteConsistency?: types.consistencies; -} - -export type GraphOptions = { - language?: string; - name?: string; - readConsistency?: types.consistencies; - readTimeout?: number; - source?: string; - writeConsistency?: types.consistencies; -}; - -export class ExecutionProfile { - consistency?: types.consistencies; - loadBalancing?: policies.loadBalancing.LoadBalancingPolicy; - name: string; - readTimeout?: number; - retry?: policies.retry.RetryPolicy; - serialConsistency?: types.consistencies; - graphOptions?: { - name?: string; - language?: string; - source?: string; - readConsistency?: types.consistencies; - writeConsistency?: types.consistencies; - }; - - constructor(name: string, options: { - consistency?: types.consistencies; - loadBalancing?: policies.loadBalancing.LoadBalancingPolicy; - readTimeout?: number; - retry?: policies.retry.RetryPolicy; - serialConsistency?: types.consistencies; - graphOptions?: { - name?: string; - language?: string; - source?: string; - readConsistency?: types.consistencies; - writeConsistency?: types.consistencies; - }; - }); -} - -export namespace errors { - class ArgumentError extends DriverError { - constructor(message: string); - } - - class AuthenticationError extends DriverError { - constructor(message: string); - } - - class BusyConnectionError extends DriverError { - constructor(address: string, maxRequestsPerConnection: number, connectionLength: number); - } - - class VIntOutOfRangeException extends DriverError { - constructor(long: Long); - } - - abstract class DriverError extends Error { - info: string; - - constructor(message: string, constructor?: any); - } - - class DriverInternalError extends DriverError { - constructor(message: string); - } - - class NoHostAvailableError extends DriverError { - innerErrors: any; - - constructor(innerErrors: any, message?: string); - } - - class NotSupportedError extends DriverError { - constructor(message: string); - } - - class OperationTimedOutError extends DriverError { - host?: string; - - constructor(message: string, host?: string); - } - - class ResponseError extends DriverError { - code: number; - - constructor(code: number, message: string); - } -} - -export namespace token { - interface Token { - compare(other: Token): number; - - equals(other: Token): boolean; - - getType(): { code: types.dataTypes, info: any }; - - getValue(): any; - } - - interface TokenRange { - start: Token; - end: Token; - - compare(other: TokenRange): number; - - contains(token: Token): boolean; - - equals(other: TokenRange): boolean; - - isEmpty(): boolean; - - isWrappedAround(): boolean; - - splitEvenly(numberOfSplits: number): TokenRange[]; - - unwrap(): TokenRange[]; - } -} \ No newline at end of file diff --git a/index.js b/index.js deleted file mode 100644 index 2acf5ccb6..000000000 --- a/index.js +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const clientOptions = require('./lib/client-options'); -exports.Client = require('./lib/client'); -exports.ExecutionProfile = require('./lib/execution-profile').ExecutionProfile; -exports.ExecutionOptions = require('./lib/execution-options').ExecutionOptions; -exports.types = require('./lib/types'); -exports.errors = require('./lib/errors'); -exports.policies = require('./lib/policies'); -exports.auth = require('./lib/auth'); -exports.mapping = require('./lib/mapping'); -exports.tracker = require('./lib/tracker'); -exports.metrics = require('./lib/metrics'); -exports.concurrent = require('./lib/concurrent'); - -const token = require('./lib/token'); -exports.token = { - Token: token.Token, - TokenRange: token.TokenRange -}; -const Metadata = require('./lib/metadata'); -exports.metadata = { - Metadata: Metadata -}; -exports.Encoder = require('./lib/encoder'); -exports.geometry = require('./lib/geometry'); -exports.datastax = require('./lib/datastax'); -/** - * Returns a new instance of the default [options]{@link ClientOptions} used by the driver. - */ -exports.defaultOptions = function () { - return clientOptions.defaultOptions(); -}; -exports.version = require('./package.json').version; \ No newline at end of file diff --git a/index.ts b/index.ts new file mode 100644 index 000000000..d58545df9 --- /dev/null +++ b/index.ts @@ -0,0 +1,118 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import auth from './lib/auth/index'; +import Client, { type ClientOptions, type QueryOptions } from "./lib/client"; +import clientOptions from "./lib/client-options"; +import concurrent from "./lib/concurrent/index"; +import datastax from "./lib/datastax/index"; +import Encoder from "./lib/encoder"; +import errors from "./lib/errors"; +import geometry from "./lib/geometry/index"; +import mapping from "./lib/mapping/index"; +import Metadata from "./lib/metadata/index"; +import metrics from "./lib/metrics/index"; +import policies from "./lib/policies/index"; +import Token from "./lib/token"; +import tracker from "./lib/tracker/index"; +import types from "./lib/types/index"; +import packageJson from './package.json'; + +import { ExecutionOptions } from './lib/execution-options'; +import { ExecutionProfile } from './lib/execution-profile'; + +const token = { + Token: Token.Token, + TokenRange: Token.TokenRange +}; +const metadata = {Metadata: Metadata}; +const defaultOptions = function () { + return clientOptions.defaultOptions(); +}; +const version = packageJson.version; + +const cassandra = { + Client, + ExecutionProfile, + ExecutionOptions, + types: { + ...types, + /** @internal */ + getDataTypeNameByCode: types.getDataTypeNameByCode, + /** @internal */ + FrameHeader: types.FrameHeader, + /** @internal */ + generateTimestamp: types.generateTimestamp, + }, + errors, + policies, + auth: { + ...auth, + /** @internal */ + NoAuthProvider: auth.NoAuthProvider + }, + mapping, + tracker, + metrics, + concurrent, + token, + metadata, + Encoder, + geometry, + datastax: { + ...datastax, + graph: { + ...datastax.graph, + /** @internal */ + getCustomTypeSerializers: datastax.graph.getCustomTypeSerializers, + /** @internal */ + GraphTypeWrapper: datastax.graph.GraphTypeWrapper, + /** @internal */ + UdtGraphWrapper: datastax.graph.UdtGraphWrapper, + } + }, + /** + * Returns a new instance of the default [options]{@link ClientOptions} used by the driver. + */ + defaultOptions, + version, +}; + +export default cassandra; + + + +export { + auth, Client, concurrent, datastax, + /** + * Returns a new instance of the default [options]{@link ClientOptions} used by the driver. + */ + defaultOptions, Encoder, errors, ExecutionOptions, ExecutionProfile, geometry, mapping, metadata, metrics, policies, token, tracker, types, version, type ClientOptions, type QueryOptions +}; + +// We need those for something like this to work: client.execute(query, (err: DriverError, result: ResultSet) => {})) +export * from './lib/auth/index'; +export * from './lib/concurrent/index'; +export * from './lib/datastax/index'; +export * from './lib/errors'; +export * from './lib/host'; +export * from './lib/geometry/index'; +export * from './lib/mapping/index'; +export * from './lib/metadata/index'; +export * from './lib/metrics/index'; +export * from './lib/policies/index'; +export * from './lib/tracker/index'; +export * from './lib/types/index'; + diff --git a/lib/auth/base-dse-authenticator.js b/lib/auth/base-dse-authenticator.js deleted file mode 100644 index bddc6493e..000000000 --- a/lib/auth/base-dse-authenticator.js +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const { Authenticator } = require('./provider'); - -const dseAuthenticatorName = 'com.datastax.bdp.cassandra.auth.DseAuthenticator'; - -/** - * Base class for Authenticator implementations that want to make use of - * the authentication scheme negotiation in the DseAuthenticator - * @param {String} authenticatorName - * @extends Authenticator - * @constructor - * @ignore - */ -function BaseDseAuthenticator(authenticatorName) { - this.authenticatorName = authenticatorName; -} - -util.inherits(BaseDseAuthenticator, Authenticator); - -/** - * Return a Buffer containing the required SASL mechanism. - * @abstract - * @returns {Buffer} - */ -BaseDseAuthenticator.prototype.getMechanism = function () { - throw new Error('Not implemented'); -}; - -/** - * Return a byte array containing the expected successful server challenge. - * @abstract - * @returns {Buffer} - */ -BaseDseAuthenticator.prototype.getInitialServerChallenge = function () { - throw new Error('Not implemented'); -}; - -/** - * @param {Function} callback - * @override - */ -BaseDseAuthenticator.prototype.initialResponse = function (callback) { - if (!this._isDseAuthenticator()) { - //fallback - return this.evaluateChallenge(this.getInitialServerChallenge(), callback); - } - //send the mechanism as a first auth message - callback(null, this.getMechanism()); -}; - -/** - * Determines if the name of the authenticator matches DSE 5+ - * @protected - * @ignore - */ -BaseDseAuthenticator.prototype._isDseAuthenticator = function () { - return this.authenticatorName === dseAuthenticatorName; -}; - -module.exports = BaseDseAuthenticator; \ No newline at end of file diff --git a/lib/auth/base-dse-authenticator.ts b/lib/auth/base-dse-authenticator.ts new file mode 100644 index 000000000..571cce0d5 --- /dev/null +++ b/lib/auth/base-dse-authenticator.ts @@ -0,0 +1,74 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Authenticator } from './provider'; + +const dseAuthenticatorName = 'com.datastax.bdp.cassandra.auth.DseAuthenticator'; + +/** + * Base class for Authenticator implementations that want to make use of + * the authentication scheme negotiation in the DseAuthenticator + */ +class BaseDseAuthenticator extends Authenticator { + authenticatorName: string; + + constructor(authenticatorName: string) { + super(); + this.authenticatorName = authenticatorName; + } + + /** + * Return a Buffer containing the required SASL mechanism. + * @abstract + * @returns {Buffer} + */ + getMechanism(): Buffer { + throw new Error('Not implemented'); + } + + /** + * Return a byte array containing the expected successful server challenge. + * @abstract + * @returns {Buffer} + */ + getInitialServerChallenge(): Buffer { + throw new Error('Not implemented'); + } + + /** + * @param {Function} callback + * @override + */ + initialResponse(callback: Function) { + if (!this._isDseAuthenticator()) { + // fallback + return this.evaluateChallenge(this.getInitialServerChallenge(), callback); + } + // send the mechanism as a first auth message + callback(null, this.getMechanism()); + } + + /** + * Determines if the name of the authenticator matches DSE 5+ + * @protected + * @ignore @internal + */ + protected _isDseAuthenticator(): boolean { + return this.authenticatorName === dseAuthenticatorName; + } +} + +export default BaseDseAuthenticator; \ No newline at end of file diff --git a/lib/auth/dse-gssapi-auth-provider.js b/lib/auth/dse-gssapi-auth-provider.js deleted file mode 100644 index ac25a5185..000000000 --- a/lib/auth/dse-gssapi-auth-provider.js +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const { AuthProvider } = require('./provider'); -const BaseDseAuthenticator = require('./base-dse-authenticator'); -const GssapiClient = require('./gssapi-client'); -const dns = require('dns'); -const utils = require('../utils'); - -const mechanism = utils.allocBufferFromString('GSSAPI'); -const initialServerChallenge = 'GSSAPI-START'; -const emptyBuffer = utils.allocBuffer(0); - -/** - * Creates a new instance of DseGssapiAuthProvider. - * @classdesc - * AuthProvider that provides GSSAPI authenticator instances for clients to connect - * to DSE clusters secured with the DseAuthenticator. - * @param {Object} [gssOptions] GSSAPI authenticator options - * @param {String} [gssOptions.authorizationId] The optional authorization ID. Providing an authorization ID allows the - * currently authenticated user to act as a different user (a.k.a. proxy authentication). - * @param {String} [gssOptions.service] The service to use. Defaults to 'dse'. - * @param {Function} [gssOptions.hostNameResolver] A method to be used to resolve the name of the Cassandra node based - * on the IP Address. Defaults to [lookupServiceResolver]{@link module:auth~DseGssapiAuthProvider.lookupServiceResolver} - * which resolves the FQDN of the provided IP to generate principals in the format of - * dse/example.com@MYREALM.COM. - * Alternatively, you can use [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver} to do a - * reverse DNS lookup or [useIpResolver]{@link module:auth~DseGssapiAuthProvider.useIpResolver} to simply use the IP - * address provided. - * @param {String} [gssOptions.user] DEPRECATED, it will be removed in future versions. For proxy authentication, use - * authorizationId instead. - * @example - * const client = new cassandra.Client({ - * contactPoints: ['h1', 'h2'], - * authProvider: new cassandra.auth.DseGssapiAuthProvider() - * }); - * @alias module:auth~DseGssapiAuthProvider - * @constructor - */ -function DseGssapiAuthProvider(gssOptions) { - //load the kerberos at construction time - try { - // eslint-disable-next-line - this._kerberos = require('kerberos'); - } - catch (err) { - if (err.code === 'MODULE_NOT_FOUND') { - const newErr = new Error('You must install module "kerberos" to use GSSAPI auth provider: ' + - 'https://www.npmjs.com/package/kerberos'); - newErr.code = err.code; - throw newErr; - } - throw err; - } - gssOptions = gssOptions || utils.emptyObject; - this.authorizationId = gssOptions.authorizationId || gssOptions.user; - this.service = gssOptions.service; - this.hostNameResolver = gssOptions.hostNameResolver || DseGssapiAuthProvider.lookupServiceResolver; -} - -util.inherits(DseGssapiAuthProvider, AuthProvider); - -/** - * Returns an Authenticator instance to be used by the driver when connecting to a host. - * @param {String} endpoint The IP address and port number in the format ip:port. - * @param {String} name Authenticator name. - * @override - * @returns {Authenticator} - */ -DseGssapiAuthProvider.prototype.newAuthenticator = function (endpoint, name) { - let address = endpoint; - if (endpoint.indexOf(':') > 0) { - address = endpoint.split(':')[0]; - } - return new GssapiAuthenticator( - this._kerberos, address, name, this.authorizationId, this.service, this.hostNameResolver); -}; - -/** - * Performs a lookupService query that resolves an IPv4 or IPv6 address to a hostname. This ultimately makes a - * getnameinfo() system call which depends on the OS to do hostname resolution. - *

- * Note: Depends on dns.lookupService which was added in 0.12. For older versions falls back on - * [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver}. - * - * @param {String} ip IP address to resolve. - * @param {Function} callback The callback function with err and hostname arguments. - */ -DseGssapiAuthProvider.lookupServiceResolver = function (ip, callback) { - if (!dns.lookupService) { - return DseGssapiAuthProvider.reverseDnsResolver(ip, callback); - } - dns.lookupService(ip, 0, function (err, hostname) { - if (err) { - return callback(err); - } - if (!hostname) { - //fallback to ip - return callback(null, ip); - } - callback(null, hostname); - }); -}; - -/** - * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to a hostname. - * @param {String} ip IP address to resolve. - * @param {Function} callback The callback function with err and hostname arguments. - */ -DseGssapiAuthProvider.reverseDnsResolver = function (ip, callback) { - dns.reverse(ip, function (err, names) { - if (err) { - return callback(err); - } - if (!names || !names.length) { - //fallback to ip - return callback(null, ip); - } - callback(null, names[0]); - }); -}; - -/** - * Effectively a no op operation, returns the IP address provided. - * @param {String} ip IP address to use. - * @param {Function} callback The callback function with err and hostname arguments. - */ -DseGssapiAuthProvider.useIpResolver = function (ip, callback) { - callback(null, ip); -}; - -/** - * @param {Object} kerberosModule - * @param {String} address Host address. - * @param {String} authenticatorName - * @param {String} authorizationId - * @param {String} service - * @param {Function} hostNameResolver - * @extends Authenticator - * @private - */ -function GssapiAuthenticator(kerberosModule, address, authenticatorName, authorizationId, service, hostNameResolver) { - BaseDseAuthenticator.call(this, authenticatorName); - this.authorizationId = authorizationId; - this.address = address; - this.client = GssapiClient.createNew(kerberosModule, authorizationId, service); - this.hostNameResolver = hostNameResolver; -} - -//noinspection JSCheckFunctionSignatures -util.inherits(GssapiAuthenticator, BaseDseAuthenticator); - -GssapiAuthenticator.prototype.getMechanism = function () { - return mechanism; -}; - -GssapiAuthenticator.prototype.getInitialServerChallenge = function () { - return utils.allocBufferFromString(initialServerChallenge); -}; - -//noinspection JSUnusedGlobalSymbols -/** - * Obtain an initial response token for initializing the SASL handshake. - * @param {Function} callback - */ -GssapiAuthenticator.prototype.initialResponse = function (callback) { - const self = this; - //initialize the GSS client - let host = this.address; - utils.series([ - function getHostName(next) { - self.hostNameResolver(self.address, function (err, name) { - if (!err && name) { - host = name; - } - next(); - }); - }, - function initClient(next) { - self.client.init(host, function (err) { - if (err) { - return next(err); - } - if (!self._isDseAuthenticator()) { - //fallback - return self.evaluateChallenge(self.getInitialServerChallenge(), next); - } - //send the mechanism as a first auth message - next(null, self.getMechanism()); - }); - } - ], callback); -}; - -/** - * Evaluates a challenge received from the Server. Generally, this method should callback with - * no error and no additional params when authentication is complete from the client perspective. - * @param {Buffer} challenge - * @param {Function} callback - * @override - */ -GssapiAuthenticator.prototype.evaluateChallenge = function (challenge, callback) { - if (!challenge || challenge.toString() === initialServerChallenge) { - challenge = emptyBuffer; - } - this.client.evaluateChallenge(challenge, callback); -}; - -/** - * @override - */ -GssapiAuthenticator.prototype.onAuthenticationSuccess = function (token) { - this.client.shutdown(function noop() { }); -}; - - -module.exports = DseGssapiAuthProvider; \ No newline at end of file diff --git a/lib/auth/dse-gssapi-auth-provider.ts b/lib/auth/dse-gssapi-auth-provider.ts new file mode 100644 index 000000000..cd6ad6c79 --- /dev/null +++ b/lib/auth/dse-gssapi-auth-provider.ts @@ -0,0 +1,261 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import dns from "dns"; +import utils from "../utils"; +import BaseDseAuthenticator from './base-dse-authenticator'; +import GssapiClient from './gssapi-client'; +import { Authenticator, AuthProvider } from "./provider"; + +const mechanism = utils.allocBufferFromString('GSSAPI'); +const initialServerChallenge = 'GSSAPI-START'; +const emptyBuffer = utils.allocBuffer(0); + +/** + * @classdesc + * AuthProvider that provides GSSAPI authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DseGssapiAuthProvider() + * }); + * @alias module:auth~DseGssapiAuthProvider + */ +class DseGssapiAuthProvider extends AuthProvider { + private _kerberos: any; + private authorizationId: string; + private service: string; + private hostNameResolver: Function; + + /** + * Creates a new instance of DseGssapiAuthProvider. + * @classdesc + * AuthProvider that provides GSSAPI authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @param {Object} [gssOptions] GSSAPI authenticator options + * @param {String} [gssOptions.authorizationId] The optional authorization ID. Providing an authorization ID allows the + * currently authenticated user to act as a different user (a.k.a. proxy authentication). + * @param {String} [gssOptions.service] The service to use. Defaults to 'dse'. + * @param {Function} [gssOptions.hostNameResolver] A method to be used to resolve the name of the Cassandra node based + * on the IP Address. Defaults to [lookupServiceResolver]{@link module:auth~DseGssapiAuthProvider.lookupServiceResolver} + * which resolves the FQDN of the provided IP to generate principals in the format of + * dse/example.com@MYREALM.COM. + * Alternatively, you can use [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver} to do a + * reverse DNS lookup or [useIpResolver]{@link module:auth~DseGssapiAuthProvider.useIpResolver} to simply use the IP + * address provided. + * @param {String} [gssOptions.user] DEPRECATED, it will be removed in future versions. For proxy authentication, use + * authorizationId instead. + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DseGssapiAuthProvider() + * }); + * @alias module:auth~DseGssapiAuthProvider + * @constructor + */ + constructor(gssOptions: { authorizationId?: string; service?: string; hostNameResolver?: Function; user?: string; }) { + super(); + // Load the kerberos at construction time + try { + // eslint-disable-next-line + this._kerberos = require('kerberos'); + } catch (err) { + if (err.code === 'MODULE_NOT_FOUND') { + const newErr = new Error('You must install module "kerberos" to use GSSAPI auth provider: ' + + 'https://www.npmjs.com/package/kerberos'); + newErr["code"] = err.code; + throw newErr; + } + throw err; + } + gssOptions = gssOptions || utils.emptyObject; + this.authorizationId = gssOptions.authorizationId || gssOptions.user; + this.service = gssOptions.service; + this.hostNameResolver = gssOptions.hostNameResolver || DseGssapiAuthProvider.lookupServiceResolver; + } + + /** + * Returns an Authenticator instance to be used by the driver when connecting to a host. + * @param {String} endpoint The IP address and port number in the format ip:port. + * @param {String} name Authenticator name. + * @override + * @returns {Authenticator} + */ + newAuthenticator(endpoint: string, name: string): Authenticator { + let address = endpoint; + if (endpoint.indexOf(':') > 0) { + address = endpoint.split(':')[0]; + } + return new GssapiAuthenticator( + this._kerberos, address, name, this.authorizationId, this.service, this.hostNameResolver); + } + + /** + * Performs a lookupService query that resolves an IPv4 or IPv6 address to a hostname. This ultimately makes a + * getnameinfo() system call which depends on the OS to do hostname resolution. + *

+ * Note: Depends on dns.lookupService which was added in 0.12. For older versions falls back on + * [reverseDnsResolver]{@link module:auth~DseGssapiAuthProvider.reverseDnsResolver}. + * + * @param {String} ip IP address to resolve. + * @param {Function} callback The callback function with err and hostname arguments. + */ + private static lookupServiceResolver(ip: string, callback: Function) { + if (!dns.lookupService) { + return DseGssapiAuthProvider.reverseDnsResolver(ip, callback); + } + dns.lookupService(ip, 0, function (err, hostname) { + if (err) { + return callback(err); + } + if (!hostname) { + // Fallback to ip + return callback(null, ip); + } + callback(null, hostname); + }); + } + + //TODO: reverseDnsResolver and useIpResolver are in JSDoc but we didn't expose them in .d.ts. Should we? + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to a hostname. + * @param {String} ip IP address to resolve. + * @param {Function} callback The callback function with err and hostname arguments. + */ + static reverseDnsResolver(ip: string, callback: Function) { + dns.reverse(ip, function (err, names) { + if (err) { + return callback(err); + } + if (!names || !names.length) { + // Fallback to ip + return callback(null, ip); + } + callback(null, names[0]); + }); + } + + /** + * Effectively a no op operation, returns the IP address provided. + * @param {String} ip IP address to use. + * @param {Function} callback The callback function with err and hostname arguments. + */ + static useIpResolver(ip: string, callback: Function) { + callback(null, ip); + } +} + +/** + * @param {Object} kerberosModule + * @param {String} address Host address. + * @param {String} authenticatorName + * @param {String} authorizationId + * @param {String} service + * @param {Function} hostNameResolver + * @extends Authenticator + * @private @internal + */ +class GssapiAuthenticator extends BaseDseAuthenticator { + authorizationId: any; + address: any; + client: GssapiClient; + hostNameResolver: any; + + /** + * @param {Object} kerberosModule + * @param {String} address Host address. + * @param {String} authenticatorName + * @param {String} authorizationId + * @param {String} service + * @param {Function} hostNameResolver + * @extends Authenticator + * @private + */ + constructor(kerberosModule: object, address: string, authenticatorName: string, authorizationId: string, service: string, hostNameResolver: Function) { + super(authenticatorName); + this.authorizationId = authorizationId; + this.address = address; + this.client = GssapiClient.createNew(kerberosModule, authorizationId, service); + this.hostNameResolver = hostNameResolver; + } + + getMechanism() { + return mechanism; + } + + getInitialServerChallenge() { + return utils.allocBufferFromString(initialServerChallenge); + } + + //noinspection JSUnusedGlobalSymbols + /** + * Obtain an initial response token for initializing the SASL handshake. + * @param {Function} callback + */ + initialResponse(callback: Function) { + const self = this; + // Initialize the GSS client + let host = this.address; + utils.series([ + function getHostName(next) { + self.hostNameResolver(self.address, function (err, name) { + if (!err && name) { + host = name; + } + next(); + }); + }, + function initClient(next) { + self.client.init(host, function (err) { + if (err) { + return next(err); + } + if (!self._isDseAuthenticator()) { + // Fallback + return self.evaluateChallenge(self.getInitialServerChallenge(), next); + } + // Send the mechanism as a first auth message + next(null, self.getMechanism()); + }); + } + ], callback); + } + + /** + * Evaluates a challenge received from the Server. Generally, this method should callback with + * no error and no additional params when authentication is complete from the client perspective. + * @param {Buffer} challenge + * @param {Function} callback + * @override + */ + evaluateChallenge(challenge: Buffer, callback: Function) { + if (!challenge || challenge.toString() === initialServerChallenge) { + challenge = emptyBuffer; + } + this.client.evaluateChallenge(challenge, callback); + } + + /** + * @override + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + onAuthenticationSuccess(token?: Buffer) { + this.client.shutdown(function noop() { }); + } +} + +export default DseGssapiAuthProvider; \ No newline at end of file diff --git a/lib/auth/dse-plain-text-auth-provider.js b/lib/auth/dse-plain-text-auth-provider.js deleted file mode 100644 index 64a0f649e..000000000 --- a/lib/auth/dse-plain-text-auth-provider.js +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const { AuthProvider } = require('./provider'); -const BaseDseAuthenticator = require('./base-dse-authenticator'); -const utils = require('../utils'); - -const mechanism = utils.allocBufferFromString('PLAIN'); -const separatorBuffer = utils.allocBufferFromArray([0]); -const initialServerChallenge = 'PLAIN-START'; - -/** - * Creates a new instance of DsePlainTextAuthProvider. - * @classdesc - * AuthProvider that provides plain text authenticator instances for clients to connect - * to DSE clusters secured with the DseAuthenticator. - * @param {String} username The username; cannot be null. - * @param {String} password The password; cannot be null. - * @param {String} [authorizationId] The optional authorization ID. Providing an authorization ID allows the currently - * authenticated user to act as a different user (a.k.a. proxy authentication). - * @extends AuthProvider - * @alias module:auth~DsePlainTextAuthProvider - * @example - * const client = new cassandra.Client({ - * contactPoints: ['h1', 'h2'], - * authProvider: new cassandra.auth.DsePlainTextAuthProvider('user', 'p@ssword1'); - * }); - * @constructor - */ -function DsePlainTextAuthProvider(username, password, authorizationId) { - if (typeof username !== 'string' || typeof password !== 'string') { - // Validate for null and undefined - throw new TypeError('Username and password must be a string'); - } - this.username = username; - this.password = password; - this.authorizationId = authorizationId; -} - -util.inherits(DsePlainTextAuthProvider, AuthProvider); - -/** - * Returns an Authenticator instance to be used by the driver when connecting to a host. - * @param {String} endpoint The IP address and port number in the format ip:port. - * @param {String} name Authenticator name. - * @override - * @returns {Authenticator} - */ -DsePlainTextAuthProvider.prototype.newAuthenticator = function (endpoint, name) { - return new PlainTextAuthenticator(name, this.username, this.password, this.authorizationId); -}; - -/** - * @param {String} authenticatorName - * @param {String} authenticatorId - * @param {String} password - * @param {String} authorizationId - * @extends BaseDseAuthenticator - * @constructor - * @private - */ -function PlainTextAuthenticator(authenticatorName, authenticatorId, password, authorizationId) { - BaseDseAuthenticator.call(this, authenticatorName); - this.authenticatorId = utils.allocBufferFromString(authenticatorId); - this.password = utils.allocBufferFromString(password); - this.authorizationId = utils.allocBufferFromString(authorizationId || ''); -} - -util.inherits(PlainTextAuthenticator, BaseDseAuthenticator); - -/** @override */ -PlainTextAuthenticator.prototype.getMechanism = function () { - return mechanism; -}; - -/** @override */ -PlainTextAuthenticator.prototype.getInitialServerChallenge = function () { - return utils.allocBufferFromString(initialServerChallenge); -}; - -/** @override */ -PlainTextAuthenticator.prototype.evaluateChallenge = function (challenge, callback) { - if (!challenge || challenge.toString() !== initialServerChallenge) { - return callback(new Error('Incorrect SASL challenge from server')); - } - // The SASL plain text format is authorizationId 0 username 0 password - callback(null, Buffer.concat([ - this.authorizationId, - separatorBuffer, - this.authenticatorId, - separatorBuffer, - this.password - ])); -}; - -module.exports = DsePlainTextAuthProvider; \ No newline at end of file diff --git a/lib/auth/dse-plain-text-auth-provider.ts b/lib/auth/dse-plain-text-auth-provider.ts new file mode 100644 index 000000000..f8e95ae01 --- /dev/null +++ b/lib/auth/dse-plain-text-auth-provider.ts @@ -0,0 +1,132 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import utils from "../utils"; +import BaseDseAuthenticator from './base-dse-authenticator'; +import { Authenticator, AuthProvider } from './provider'; + +const mechanism = utils.allocBufferFromString('PLAIN'); +const separatorBuffer = utils.allocBufferFromArray([0]); +const initialServerChallenge = 'PLAIN-START'; + +/** + * @classdesc + * AuthProvider that provides plain text authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @extends AuthProvider + * @alias module:auth~DsePlainTextAuthProvider + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DsePlainTextAuthProvider('user', 'p@ssword1'); + * }); + */ +class DsePlainTextAuthProvider extends AuthProvider { + private username: string; + private password: string; + private authorizationId: string; + /** + * Creates a new instance of DsePlainTextAuthProvider. + * @classdesc + * AuthProvider that provides plain text authenticator instances for clients to connect + * to DSE clusters secured with the DseAuthenticator. + * @param {String} username The username; cannot be null. + * @param {String} password The password; cannot be null. + * @param {String} [authorizationId] The optional authorization ID. Providing an authorization ID allows the currently + * authenticated user to act as a different user (a.k.a. proxy authentication). + * @extends AuthProvider + * @alias module:auth~DsePlainTextAuthProvider + * @example + * const client = new cassandra.Client({ + * contactPoints: ['h1', 'h2'], + * authProvider: new cassandra.auth.DsePlainTextAuthProvider('user', 'p@ssword1'); + * }); + * @constructor + */ + constructor(username: string, password: string, authorizationId?: string) { + super(); + if (typeof username !== 'string' || typeof password !== 'string') { + // Validate for null and undefined + throw new TypeError('Username and password must be a string'); + } + this.username = username; + this.password = password; + this.authorizationId = authorizationId; + } + + /** + * Returns an Authenticator instance to be used by the driver when connecting to a host. + * @param {String} endpoint The IP address and port number in the format ip:port. + * @param {String} name Authenticator name. + * @override + * @returns {Authenticator} + */ + newAuthenticator(endpoint: string, name: string): Authenticator { + return new PlainTextAuthenticator(name, this.username, this.password, this.authorizationId); + } +} + +/** + * @extends BaseDseAuthenticator + * @private @internal + */ +class PlainTextAuthenticator extends BaseDseAuthenticator { + authenticatorId: Buffer; + password: Buffer; + authorizationId: Buffer; + + /** + * @param {String} authenticatorName + * @param {String} authenticatorId + * @param {String} password + * @param {String} authorizationId + * @constructor + * @private + */ + constructor(authenticatorName: string, authenticatorId: string, password: string, authorizationId: string) { + super(authenticatorName); + this.authenticatorId = utils.allocBufferFromString(authenticatorId); + this.password = utils.allocBufferFromString(password); + this.authorizationId = utils.allocBufferFromString(authorizationId || ''); + } + + /** @override */ + getMechanism() { + return mechanism; + } + + /** @override */ + getInitialServerChallenge() { + return utils.allocBufferFromString(initialServerChallenge); + } + + /** @override */ + evaluateChallenge(challenge, callback) { + if (!challenge || challenge.toString() !== initialServerChallenge) { + return callback(new Error('Incorrect SASL challenge from server')); + } + // The SASL plain text format is authorizationId 0 username 0 password + callback(null, Buffer.concat([ + this.authorizationId, + separatorBuffer, + this.authenticatorId, + separatorBuffer, + this.password + ])); + } +} + +export default DsePlainTextAuthProvider; \ No newline at end of file diff --git a/lib/auth/gssapi-client.js b/lib/auth/gssapi-client.ts similarity index 87% rename from lib/auth/gssapi-client.js rename to lib/auth/gssapi-client.ts index 92af818ba..4a18ea17e 100644 --- a/lib/auth/gssapi-client.js +++ b/lib/auth/gssapi-client.ts @@ -13,22 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import util from "util"; +import utils from "../utils"; -'use strict'; -const util = require('util'); -const utils = require('../utils'); /** * GSSAPI Client interface. * @ignore + * @internal */ class GssapiClient { + authorizationId: string; + service: string; /** * @param {String} [authorizationId] * @param {String} [service] */ - constructor(authorizationId, service) { + constructor(authorizationId: string, service: string) { this.authorizationId = authorizationId; this.service = service !== undefined ? service : 'dse'; } @@ -38,7 +41,7 @@ class GssapiClient { * @param {String} host Host name or ip * @param {Function} callback */ - init(host, callback) { + init(host: string, callback: Function) { throw new Error('Not implemented'); } @@ -47,7 +50,7 @@ class GssapiClient { * @param {Function} callback * @abstract */ - evaluateChallenge(challenge, callback) { + evaluateChallenge(challenge: Buffer, callback: Function) { throw new Error('Not implemented'); } @@ -55,7 +58,7 @@ class GssapiClient { * @abstract * @param {Function} [callback] */ - shutdown(callback) { + shutdown(callback: Function) { throw new Error('Not implemented'); } @@ -66,16 +69,20 @@ class GssapiClient { * @param {String} [service] The service to use. (defaults to 'dse') * @returns GssapiClient */ - static createNew(kerberosModule, authorizationId, service) { + static createNew(kerberosModule: object, authorizationId?: string, service?: string) { return new StandardGssClient(kerberosModule, authorizationId, service); } } /** * GSSAPI Client implementation using kerberos module. - * @ignore + * @ignore @internal */ class StandardGssClient extends GssapiClient { + kerberos: any; + transitionIndex: number; + host: any; + kerberosClient: any; constructor(kerberosModule, authorizationId, service) { if (typeof kerberosModule.initializeClient !== 'function') { throw new Error('The driver expects version 1.x of the kerberos library'); @@ -152,4 +159,4 @@ class StandardGssClient extends GssapiClient { } } -module.exports = GssapiClient; \ No newline at end of file +export default GssapiClient; \ No newline at end of file diff --git a/lib/auth/index.d.ts b/lib/auth/index.d.ts deleted file mode 100644 index c1c374134..000000000 --- a/lib/auth/index.d.ts +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export namespace auth { - interface Authenticator { - initialResponse(callback: Function): void; - - evaluateChallenge(challenge: Buffer, callback: Function): void; - - onAuthenticationSuccess(token?: Buffer): void; - } - - interface AuthProvider { - newAuthenticator(endpoint: string, name: string): Authenticator; - } - - class PlainTextAuthProvider implements AuthProvider { - constructor(username: string, password: string); - - newAuthenticator(endpoint: string, name: string): Authenticator; - } - - class DsePlainTextAuthProvider implements AuthProvider { - constructor(username: string, password: string, authorizationId?: string); - - newAuthenticator(endpoint: string, name: string): Authenticator; - } - - class DseGssapiAuthProvider implements AuthProvider { - constructor(gssOptions?: { authorizationId?: string, service?: string, hostNameResolver?: Function }); - - newAuthenticator(endpoint: string, name: string): Authenticator; - } -} \ No newline at end of file diff --git a/lib/auth/index.js b/lib/auth/index.ts similarity index 63% rename from lib/auth/index.js rename to lib/auth/index.ts index b79b14e22..4a463ce86 100644 --- a/lib/auth/index.js +++ b/lib/auth/index.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; + /** * DSE Authentication module. @@ -23,17 +23,28 @@ * @module auth */ -const { Authenticator, AuthProvider } = require('./provider'); -const { PlainTextAuthProvider } = require('./plain-text-auth-provider'); -const DseGssapiAuthProvider = require('./dse-gssapi-auth-provider'); -const DsePlainTextAuthProvider = require('./dse-plain-text-auth-provider'); -const NoAuthProvider = require('./no-auth-provider'); +import DseGssapiAuthProvider from './dse-gssapi-auth-provider'; +import DsePlainTextAuthProvider from './dse-plain-text-auth-provider'; +import NoAuthProvider from './no-auth-provider'; +import { PlainTextAuthProvider } from './plain-text-auth-provider'; +import { Authenticator, AuthProvider } from './provider'; -module.exports = { +export { Authenticator, AuthProvider, DseGssapiAuthProvider, DsePlainTextAuthProvider, + /** @internal */ NoAuthProvider, PlainTextAuthProvider }; + +export default { + Authenticator, + AuthProvider, + DseGssapiAuthProvider, + DsePlainTextAuthProvider, + /** @internal */ + NoAuthProvider, + PlainTextAuthProvider +}; \ No newline at end of file diff --git a/lib/auth/no-auth-provider.js b/lib/auth/no-auth-provider.ts similarity index 88% rename from lib/auth/no-auth-provider.js rename to lib/auth/no-auth-provider.ts index 7dbcd9ea5..467a0cca6 100644 --- a/lib/auth/no-auth-provider.js +++ b/lib/auth/no-auth-provider.ts @@ -14,17 +14,16 @@ * limitations under the License. */ -'use strict'; -const { AuthProvider, Authenticator } = require('./provider'); -const { PlainTextAuthenticator } = require('./plain-text-auth-provider'); -const errors = require('../errors'); +import errors from "../errors"; +import { PlainTextAuthenticator } from './plain-text-auth-provider'; +import { Authenticator, AuthProvider } from './provider'; const dseAuthenticator = 'com.datastax.bdp.cassandra.auth.DseAuthenticator'; /** * Internal authentication provider that is used when no provider has been set by the user. - * @ignore + * @ignore @internal */ class NoAuthProvider extends AuthProvider { newAuthenticator(endpoint, name) { @@ -40,9 +39,10 @@ class NoAuthProvider extends AuthProvider { /** * An authenticator throws an error when authentication flow is started. - * @ignore + * @ignore @internal */ class NoAuthAuthenticator extends Authenticator { + endpoint: any; constructor(endpoint) { super(); this.endpoint = endpoint; @@ -60,6 +60,7 @@ class NoAuthAuthenticator extends Authenticator { * In this situation, the client is allowed to connect without authentication, but DSE * would still send an AUTHENTICATE response. This Authenticator handles this situation * by sending back a dummy credential. + * @internal */ class TransitionalModePlainTextAuthenticator extends PlainTextAuthenticator { constructor() { @@ -67,4 +68,4 @@ class TransitionalModePlainTextAuthenticator extends PlainTextAuthenticator { } } -module.exports = NoAuthProvider; +export default NoAuthProvider; diff --git a/lib/auth/plain-text-auth-provider.js b/lib/auth/plain-text-auth-provider.js deleted file mode 100644 index f5241f7e4..000000000 --- a/lib/auth/plain-text-auth-provider.js +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); - -const provider = require('./provider.js'); -const utils = require('../utils'); -const AuthProvider = provider.AuthProvider; -const Authenticator = provider.Authenticator; -/** - * Creates a new instance of the Authenticator provider - * @classdesc Provides plain text [Authenticator]{@link module:auth~Authenticator} instances to be used when - * connecting to a host. - * @extends module:auth~AuthProvider - * @example - * var authProvider = new cassandra.auth.PlainTextAuthProvider('my_user', 'p@ssword1!'); - * //Set the auth provider in the clientOptions when creating the Client instance - * const client = new Client({ contactPoints: contactPoints, authProvider: authProvider }); - * @param {String} username User name in plain text - * @param {String} password Password in plain text - * @alias module:auth~PlainTextAuthProvider - * @constructor - */ -function PlainTextAuthProvider(username, password) { - this.username = username; - this.password = password; -} - -util.inherits(PlainTextAuthProvider, AuthProvider); - -/** - * Returns a new [Authenticator]{@link module:auth~Authenticator} instance to be used for plain text authentication. - * @override - * @returns {Authenticator} - */ -PlainTextAuthProvider.prototype.newAuthenticator = function () { - return new PlainTextAuthenticator(this.username, this.password); -}; - -/** - * @ignore - */ -function PlainTextAuthenticator(username, password) { - this.username = username; - this.password = password; -} - -util.inherits(PlainTextAuthenticator, Authenticator); - -PlainTextAuthenticator.prototype.initialResponse = function (callback) { - const initialToken = Buffer.concat([ - utils.allocBufferFromArray([0]), - utils.allocBufferFromString(this.username, 'utf8'), - utils.allocBufferFromArray([0]), - utils.allocBufferFromString(this.password, 'utf8') - ]); - callback(null, initialToken); -}; - -PlainTextAuthenticator.prototype.evaluateChallenge = function (challenge, callback) { - //noop - callback(); -}; - -module.exports = { - PlainTextAuthenticator, - PlainTextAuthProvider, -}; diff --git a/lib/auth/plain-text-auth-provider.ts b/lib/auth/plain-text-auth-provider.ts new file mode 100644 index 000000000..e720f66e1 --- /dev/null +++ b/lib/auth/plain-text-auth-provider.ts @@ -0,0 +1,94 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import utils from "../utils"; +import { Authenticator, AuthProvider } from './provider'; + +/** + * @classdesc Provides plain text [Authenticator]{@link module:auth~Authenticator} instances to be used when + * connecting to a host. + * @extends module:auth~AuthProvider + * @example + * var authProvider = new cassandra.auth.PlainTextAuthProvider('my_user', 'p@ssword1!'); + * //Set the auth provider in the clientOptions when creating the Client instance + * const client = new Client({ contactPoints: contactPoints, authProvider: authProvider }); + * @alias module:auth~PlainTextAuthProvider + */ +class PlainTextAuthProvider extends AuthProvider { + private username: string; + private password: string; + + /** + * Creates a new instance of the Authenticator provider + * @classdesc Provides plain text [Authenticator]{@link module:auth~Authenticator} instances to be used when + * connecting to a host. + * @example + * var authProvider = new cassandra.auth.PlainTextAuthProvider('my_user', 'p@ssword1!'); + * //Set the auth provider in the clientOptions when creating the Client instance + * const client = new Client({ contactPoints: contactPoints, authProvider: authProvider }); + * @param {String} username User name in plain text + * @param {String} password Password in plain text + * @alias module:auth~PlainTextAuthProvider + * @constructor + */ + constructor(username: string, password: string) { + super(); + this.username = username; + this.password = password; + } + + /** + * Returns a new [Authenticator]{@link module:auth~Authenticator} instance to be used for plain text authentication. + * @override + * @returns {Authenticator} + */ + newAuthenticator(): Authenticator { + return new PlainTextAuthenticator(this.username, this.password); + } +} + +/** + * @ignore @internal + */ +class PlainTextAuthenticator extends Authenticator { + username: any; + password: any; + constructor(username, password) { + super(); + this.username = username; + this.password = password; + } + + initialResponse(callback) { + const initialToken = Buffer.concat([ + utils.allocBufferFromArray([0]), + utils.allocBufferFromString(this.username, 'utf8'), + utils.allocBufferFromArray([0]), + utils.allocBufferFromString(this.password, 'utf8') + ]); + callback(null, initialToken); + } + + evaluateChallenge(challenge, callback) { + // noop + callback(); + } +} + +export { + PlainTextAuthenticator, + PlainTextAuthProvider +}; diff --git a/lib/auth/provider.js b/lib/auth/provider.js deleted file mode 100644 index d4bf9ed10..000000000 --- a/lib/auth/provider.js +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -/** - * @classdesc Provides [Authenticator]{@link module:auth~Authenticator} instances to be used when connecting to a host. - * @constructor - * @abstract - * @alias module:auth~AuthProvider - */ -function AuthProvider() { - -} - -/** - * Returns an [Authenticator]{@link module:auth~Authenticator} instance to be used when connecting to a host. - * @param {String} endpoint The ip address and port number in the format ip:port - * @param {String} name Authenticator name - * @abstract - * @returns {Authenticator} - */ -AuthProvider.prototype.newAuthenticator = function (endpoint, name) { - throw new Error('This is an abstract class, you must implement newAuthenticator method or ' + - 'use another auth provider that inherits from this class'); -}; - -/** - * @class - * @classdesc Handles SASL authentication with Cassandra servers. - * Each time a new connection is created and the server requires authentication, - * a new instance of this class will be created by the corresponding. - * @constructor - * @alias module:auth~Authenticator - */ -function Authenticator() { - -} - -/** - * Obtain an initial response token for initializing the SASL handshake. - * @param {Function} callback - */ -Authenticator.prototype.initialResponse = function (callback) { - callback(new Error('Not implemented')); -}; - -/** - * Evaluates a challenge received from the Server. Generally, this method should callback with - * no error and no additional params when authentication is complete from the client perspective. - * @param {Buffer} challenge - * @param {Function} callback - */ -Authenticator.prototype.evaluateChallenge = function (challenge, callback) { - callback(new Error('Not implemented')); -}; - -/** - * Called when authentication is successful with the last information - * optionally sent by the server. - * @param {Buffer} [token] - */ -Authenticator.prototype.onAuthenticationSuccess = function (token) { - -}; - -exports.AuthProvider = AuthProvider; -exports.Authenticator = Authenticator; \ No newline at end of file diff --git a/lib/auth/provider.ts b/lib/auth/provider.ts new file mode 100644 index 000000000..ce084e2aa --- /dev/null +++ b/lib/auth/provider.ts @@ -0,0 +1,75 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* eslint-disable @typescript-eslint/no-unused-vars */ + +/** + * Provides [Authenticator]{@link module:auth~Authenticator} instances to be used when connecting to a host. + * @abstract + * @alias module:auth~AuthProvider + */ +class AuthProvider { + /** + * Returns an [Authenticator]{@link module:auth~Authenticator} instance to be used when connecting to a host. + * @param {String} endpoint The ip address and port number in the format ip:port + * @param {String} name Authenticator name + * @abstract + * @returns {Authenticator} + */ + newAuthenticator(endpoint: string, name: string): Authenticator { + throw new Error('This is an abstract class, you must implement newAuthenticator method or ' + + 'use another auth provider that inherits from this class'); + } +} + +/** + * Handles SASL authentication with Cassandra servers. + * Each time a new connection is created and the server requires authentication, + * a new instance of this class will be created by the corresponding. + * @alias module:auth~Authenticator + */ +class Authenticator { + /** + * Obtain an initial response token for initializing the SASL handshake. + * @param {Function} callback + */ + initialResponse(callback: Function) { + callback(new Error('Not implemented')); + } + + /** + * Evaluates a challenge received from the Server. Generally, this method should callback with + * no error and no additional params when authentication is complete from the client perspective. + * @param {Buffer} challenge + * @param {Function} callback + */ + evaluateChallenge(challenge: Buffer, callback: Function) { + callback(new Error('Not implemented')); + } + + /** + * Called when authentication is successful with the last information + * optionally sent by the server. + * @param {Buffer} [token] + */ + onAuthenticationSuccess(token?: Buffer) { + // No-op + } +} + +export { + Authenticator, + AuthProvider +}; diff --git a/lib/client-options.js b/lib/client-options.ts similarity index 84% rename from lib/client-options.js rename to lib/client-options.ts index 029e5c8b3..436497170 100644 --- a/lib/client-options.js +++ b/lib/client-options.ts @@ -13,16 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const util = require('util'); -const policies = require('./policies'); -const types = require('./types'); -const utils = require('./utils'); -const tracker = require('./tracker'); -const metrics = require('./metrics'); -const auth = require('./auth'); - +import util from "util"; +import auth from "./auth/index"; +import type Client from "./client"; +import { type ClientOptions } from "./client"; +import metrics from "./metrics/index"; +import policies from "./policies/index"; +import tracker from "./tracker/index"; +import types from "./types"; +import utils from "./utils"; + +//TODO: need to revisit those options, potentially merge the ones in JSDoc and .d.ts /** Core connections per host for protocol versions 1 and 2 */ const coreConnectionsPerHostV2 = { [types.distance.local]: 2, @@ -50,7 +51,7 @@ const continuousPageDefaultHighWaterMark = 10000; /** * @returns {ClientOptions} */ -function defaultOptions () { +function defaultOptions (): ClientOptions { return ({ policies: { addressResolution: policies.defaultAddressTranslator(), @@ -104,16 +105,19 @@ function defaultOptions () { /** * Extends and validates the user options - * @param {Object} [baseOptions] The source object instance that will be overridden - * @param {Object} userOptions + * @param {ClientOptions} [baseOptions] The source object instance that will be overridden + * @param {ClientOptions} userOptions * @returns {Object} + * @internal @ignore */ -function extend(baseOptions, userOptions) { +function extend(userOptions: Partial): ClientOptions; +function extend(baseOptions: Partial, userOptions: Partial): ClientOptions; +function extend(baseOptions: Partial, userOptions?: Partial): ClientOptions { if (arguments.length === 1) { userOptions = arguments[0]; baseOptions = {}; } - const options = utils.deepExtend(baseOptions, defaultOptions(), userOptions); + const options = utils.deepExtend(baseOptions, defaultOptions(), userOptions) as ClientOptions; if (!options.cloud) { if (!Array.isArray(options.contactPoints) || options.contactPoints.length === 0) { @@ -197,7 +201,7 @@ function validateCloudOptions(options) { * @param {ClientOptions.policies} policiesOptions * @private */ -function validatePoliciesOptions(policiesOptions) { +function validatePoliciesOptions(policiesOptions: ClientOptions["policies"]) { if (!policiesOptions) { throw new TypeError('policies not defined in options'); } @@ -224,7 +228,7 @@ function validatePoliciesOptions(policiesOptions) { * @param {ClientOptions.protocolOptions} protocolOptions * @private */ -function validateProtocolOptions(protocolOptions) { +function validateProtocolOptions(protocolOptions: ClientOptions["protocolOptions"]) { if (!protocolOptions) { throw new TypeError('protocolOptions not defined in options'); } @@ -239,7 +243,7 @@ function validateProtocolOptions(protocolOptions) { * @param {ClientOptions.socketOptions} socketOptions * @private */ -function validateSocketOptions(socketOptions) { +function validateSocketOptions(socketOptions: ClientOptions["socketOptions"]) { if (!socketOptions) { throw new TypeError('socketOptions not defined in options'); } @@ -256,7 +260,7 @@ function validateSocketOptions(socketOptions) { * @param {ClientOptions} options * @private */ -function validateAuthenticationOptions(options) { +function validateAuthenticationOptions(options: ClientOptions) { if (!options.authProvider) { const credentials = options.credentials; if (credentials) { @@ -278,7 +282,7 @@ function validateAuthenticationOptions(options) { * @param {ClientOptions.encoding} encodingOptions * @private */ -function validateEncodingOptions(encodingOptions) { +function validateEncodingOptions(encodingOptions: ClientOptions["encoding"]) { if (encodingOptions.map) { const mapConstructor = encodingOptions.map; if (typeof mapConstructor !== 'function' || @@ -330,7 +334,7 @@ function validateMonitorReporting(options) { * Sets the default options that depend on the protocol version and other metadata. * @param {Client} client */ -function setMetadataDependent(client) { +function setMetadataDependent(client: Client) { const version = client.controlConnection.protocolVersion; let coreConnectionsPerHost = coreConnectionsPerHostV3; let maxRequestsPerConnection = maxRequestsPerConnectionV3; @@ -349,13 +353,22 @@ function setMetadataDependent(client) { {}, { coreConnectionsPerHost, maxRequestsPerConnection }, client.options.pooling); } -exports.extend = extend; -exports.defaultOptions = defaultOptions; -exports.coreConnectionsPerHostV2 = coreConnectionsPerHostV2; -exports.coreConnectionsPerHostV3 = coreConnectionsPerHostV3; -exports.maxRequestsPerConnectionV2 = maxRequestsPerConnectionV2; -exports.maxRequestsPerConnectionV3 = maxRequestsPerConnectionV3; -exports.setMetadataDependent = setMetadataDependent; -exports.continuousPageUnitBytes = continuousPageUnitBytes; -exports.continuousPageDefaultSize = continuousPageDefaultSize; -exports.continuousPageDefaultHighWaterMark = continuousPageDefaultHighWaterMark; +export { + continuousPageDefaultHighWaterMark, continuousPageDefaultSize, continuousPageUnitBytes, coreConnectionsPerHostV2, + coreConnectionsPerHostV3, defaultOptions, extend, maxRequestsPerConnectionV2, + maxRequestsPerConnectionV3, + setMetadataDependent +}; + +export default { + extend, + defaultOptions, + coreConnectionsPerHostV2, + coreConnectionsPerHostV3, + maxRequestsPerConnectionV2, + maxRequestsPerConnectionV3, + setMetadataDependent, + continuousPageUnitBytes, + continuousPageDefaultSize, + continuousPageDefaultHighWaterMark +}; \ No newline at end of file diff --git a/lib/client.js b/lib/client.js deleted file mode 100644 index fae8ff83d..000000000 --- a/lib/client.js +++ /dev/null @@ -1,1180 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const events = require('events'); -const util = require('util'); - -const utils = require('./utils.js'); -const errors = require('./errors.js'); -const types = require('./types'); -const { ProfileManager } = require('./execution-profile'); -const requests = require('./requests'); -const clientOptions = require('./client-options'); -const ClientState = require('./metadata/client-state'); -const description = require('../package.json').description; -const { version } = require('../package.json'); -const { DefaultExecutionOptions } = require('./execution-options'); -const ControlConnection = require('./control-connection'); -const RequestHandler = require('./request-handler'); -const PrepareHandler = require('./prepare-handler'); -const InsightsClient = require('./insights-client'); -const cloud = require('./datastax/cloud'); -const GraphExecutor = require('./datastax/graph/graph-executor'); -const promiseUtils = require('./promise-utils'); - -/** - * Max amount of pools being warmup in parallel, when warmup is enabled - * @private - */ -const warmupLimit = 32; - -/** - * Client options. - *

While the driver provides lots of extensibility points and configurability, few client options are required.

- *

Default values for all settings are designed to be suitable for the majority of use cases, you should avoid - * fine tuning it when not needed.

- *

See [Client constructor]{@link Client} documentation for recommended options.

- * @typedef {Object} ClientOptions - * @property {Array.} contactPoints - * Array of addresses or host names of the nodes to add as contact points. - *

- * Contact points are addresses of Cassandra nodes that the driver uses to discover the cluster topology. - *

- *

- * Only one contact point is required (the driver will retrieve the address of the other nodes automatically), - * but it is usually a good idea to provide more than one contact point, because if that single contact point is - * unavailable, the driver will not be able to initialize correctly. - *

- * @property {String} [localDataCenter] The local data center to use. - *

- * If using DCAwareRoundRobinPolicy (default), this option is required and only hosts from this data center are - * connected to and used in query plans. - *

- * @property {String} [keyspace] The logged keyspace for all the connections created within the {@link Client} instance. - * @property {Object} [credentials] An object containing the username and password for plain-text authentication. - * It configures the authentication provider to be used against Apache Cassandra's PasswordAuthenticator or DSE's - * DseAuthenticator, when default auth scheme is plain-text. - *

- * Note that you should configure either credentials or authProvider to connect to an - * auth-enabled cluster, but not both. - *

- * @property {String} [credentials.username] The username to use for plain-text authentication. - * @property {String} [credentials.password] The password to use for plain-text authentication. - * @property {Uuid} [id] A unique identifier assigned to a {@link Client} object, that will be communicated to the - * server (DSE 6.0+) to identify the client instance created with this options. When not defined, the driver will - * generate a random identifier. - * @property {String} [applicationName] An optional setting identifying the name of the application using - * the {@link Client} instance. - *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

- * @property {String} [applicationVersion] An optional setting identifying the version of the application using - * the {@link Client} instance. - *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

- * @property {Object} [monitorReporting] Options for reporting mechanism from the client to the DSE server, for - * versions that support it. - * @property {Boolean} [monitorReporting.enabled=true] Determines whether the reporting mechanism is enabled. - * Defaults to true. - * @property {Object} [cloud] The options to connect to a cloud instance. - * @property {String|URL} cloud.secureConnectBundle Determines the file path for the credentials file bundle. - * @property {Number} [refreshSchemaDelay] The default window size in milliseconds used to debounce node list and schema - * refresh metadata requests. Default: 1000. - * @property {Boolean} [isMetadataSyncEnabled] Determines whether client-side schema metadata retrieval and update is - * enabled. - *

Setting this value to false will cause keyspace information not to be automatically loaded, affecting - * replica calculation per token in the different keyspaces. When disabling metadata synchronization, use - * [Metadata.refreshKeyspaces()]{@link module:metadata~Metadata#refreshKeyspaces} to keep keyspace information up to - * date or token-awareness will not work correctly.

- * Default: true. - * @property {Boolean} [prepareOnAllHosts] Determines if the driver should prepare queries on all hosts in the cluster. - * Default: true. - * @property {Boolean} [rePrepareOnUp] Determines if the driver should re-prepare all cached prepared queries on a - * host when it marks it back up. - * Default: true. - * @property {Number} [maxPrepared] Determines the maximum amount of different prepared queries before evicting items - * from the internal cache. Reaching a high threshold hints that the queries are not being reused, like when - * hard-coding parameter values inside the queries. - * Default: 500. - * @property {Object} [policies] - * @property {LoadBalancingPolicy} [policies.loadBalancing] The load balancing policy instance to be used to determine - * the coordinator per query. - * @property {RetryPolicy} [policies.retry] The retry policy. - * @property {ReconnectionPolicy} [policies.reconnection] The reconnection policy to be used. - * @property {AddressTranslator} [policies.addressResolution] The address resolution policy. - * @property {SpeculativeExecutionPolicy} [policies.speculativeExecution] The SpeculativeExecutionPolicy - * instance to be used to determine if the client should send speculative queries when the selected host takes more - * time than expected. - *

- * Default: [NoSpeculativeExecutionPolicy]{@link - * module:policies/speculativeExecution~NoSpeculativeExecutionPolicy} - *

- * @property {TimestampGenerator} [policies.timestampGeneration] The client-side - * [query timestamp generator]{@link module:policies/timestampGeneration~TimestampGenerator}. - *

- * Default: [MonotonicTimestampGenerator]{@link module:policies/timestampGeneration~MonotonicTimestampGenerator} - * - *

- *

Use null to disable client-side timestamp generation.

- * @property {QueryOptions} [queryOptions] Default options for all queries. - * @property {Object} [pooling] Pooling options. - * @property {Number} [pooling.heartBeatInterval] The amount of idle time in milliseconds that has to pass before the - * driver issues a request on an active connection to avoid idle time disconnections. Default: 30000. - * @property {Object} [pooling.coreConnectionsPerHost] Associative array containing amount of connections per host - * distance. - * @property {Number} [pooling.maxRequestsPerConnection] The maximum number of requests per connection. The default - * value is: - *
    - *
  • For modern protocol versions (v3 and above): 2048
  • - *
  • For older protocol versions (v1 and v2): 128
  • - *
- * @property {Boolean} [pooling.warmup] Determines if all connections to hosts in the local datacenter must be opened on - * connect. Default: true. - * @property {Object} [protocolOptions] - * @property {Number} [protocolOptions.port] The port to use to connect to the Cassandra host. If not set through this - * method, the default port (9042) will be used instead. - * @property {Number} [protocolOptions.maxSchemaAgreementWaitSeconds] The maximum time in seconds to wait for schema - * agreement between nodes before returning from a DDL query. Default: 10. - * @property {Number} [protocolOptions.maxVersion] When set, it limits the maximum protocol version used to connect to - * the nodes. - * Useful for using the driver against a cluster that contains nodes with different major/minor versions of Cassandra. - * @property {Boolean} [protocolOptions.noCompact] When set to true, enables the NO_COMPACT startup option. - *

- * When this option is supplied SELECT, UPDATE, DELETE, and BATCH - * statements on COMPACT STORAGE tables function in "compatibility" mode which allows seeing these tables - * as if they were "regular" CQL tables. - *

- *

- * This option only effects interactions with interactions with tables using COMPACT STORAGE and is only - * supported by C* 3.0.16+, 3.11.2+, 4.0+ and DSE 6.0+. - *

- * @property {Object} [socketOptions] - * @property {Number} [socketOptions.connectTimeout] Connection timeout in milliseconds. Default: 5000. - * @property {Number} [socketOptions.defunctReadTimeoutThreshold] Determines the amount of requests that simultaneously - * have to timeout before closing the connection. Default: 64. - * @property {Boolean} [socketOptions.keepAlive] Whether to enable TCP keep-alive on the socket. Default: true. - * @property {Number} [socketOptions.keepAliveDelay] TCP keep-alive delay in milliseconds. Default: 0. - * @property {Number} [socketOptions.readTimeout] Per-host read timeout in milliseconds. - *

- * Please note that this is not the maximum time a call to {@link Client#execute} may have to wait; - * this is the maximum time that call will wait for one particular Cassandra host, but other hosts will be tried if - * one of them timeout. In other words, a {@link Client#execute} call may theoretically wait up to - * readTimeout * number_of_cassandra_hosts (though the total number of hosts tried for a given query also - * depends on the LoadBalancingPolicy in use). - *

When setting this value, keep in mind the following:

- *
    - *
  • the timeout settings used on the Cassandra side (*_request_timeout_in_ms in cassandra.yaml) should be taken - * into account when picking a value for this read timeout. You should pick a value a couple of seconds greater than - * the Cassandra timeout settings. - *
  • - *
  • - * the read timeout is only approximate and only control the timeout to one Cassandra host, not the full query. - *
  • - *
- * Setting a value of 0 disables read timeouts. Default: 12000. - * @property {Boolean} [socketOptions.tcpNoDelay] When set to true, it disables the Nagle algorithm. Default: true. - * @property {Number} [socketOptions.coalescingThreshold] Buffer length in bytes use by the write queue before flushing - * the frames. Default: 8000. - * @property {AuthProvider} [authProvider] Provider to be used to authenticate to an auth-enabled cluster. - * @property {RequestTracker} [requestTracker] The instance of RequestTracker used to monitor or log requests executed - * with this instance. - * @property {Object} [sslOptions] Client-to-node ssl options. When set the driver will use the secure layer. - * You can specify cert, ca, ... options named after the Node.js tls.connect() options. - *

- * It uses the same default values as Node.js tls.connect() except for rejectUnauthorized - * which is set to false by default (for historical reasons). This setting is likely to change - * in upcoming versions to enable validation by default. - *

- * @property {Object} [encoding] Encoding options. - * @property {Function} [encoding.map] Map constructor to use for Cassandra map type encoding and decoding. - * If not set, it will default to Javascript Object with map keys as property names. - * @property {Function} [encoding.set] Set constructor to use for Cassandra set type encoding and decoding. - * If not set, it will default to Javascript Array. - * @property {Boolean} [encoding.copyBuffer] Determines if the network buffer should be copied for buffer based data - * types (blob, uuid, timeuuid and inet). - *

- * Setting it to true will cause that the network buffer is copied for each row value of those types, - * causing additional allocations but freeing the network buffer to be reused. - * Setting it to true is a good choice for cases where the Row and ResultSet returned by the queries are long-lived - * objects. - *

- *

- * Setting it to false will cause less overhead and the reference of the network buffer to be maintained until the row - * / result set are de-referenced. - * Default: true. - *

- * @property {Boolean} [encoding.useUndefinedAsUnset] Valid for Cassandra 2.2 and above. Determines that, if a parameter - * is set to - * undefined it should be encoded as unset. - *

- * By default, ECMAScript undefined is encoded as null in the driver. Cassandra 2.2 - * introduced the concept of unset. - * At driver level, you can set a parameter to unset using the field types.unset. Setting this flag to - * true allows you to use ECMAScript undefined as Cassandra unset. - *

- *

- * Default: true. - *

- * @property {Boolean} [encoding.useBigIntAsLong] Use [BigInt ECMAScript type](https://tc39.github.io/proposal-bigint/) - * to represent CQL bigint and counter data types. - * @property {Boolean} [encoding.useBigIntAsVarint] Use [BigInt ECMAScript - * type](https://tc39.github.io/proposal-bigint/) to represent CQL varint data type. - * @property {Array.} [profiles] The array of [execution profiles]{@link ExecutionProfile}. - * @property {Function} [promiseFactory] Function to be used to create a Promise from a - * callback-style function. - *

- * Promise libraries often provide different methods to create a promise. For example, you can use Bluebird's - * Promise.fromCallback() method. - *

- *

- * By default, the driver will use the - * [Promise constructor]{@link https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise}. - *

- */ - -/** - * Query options - * @typedef {Object} QueryOptions - * @property {Boolean} [autoPage] Determines if the driver must retrieve the following result pages automatically. - *

- * This setting is only considered by the [Client#eachRow()]{@link Client#eachRow} method. For more information, - * check the - * [paging results documentation]{@link https://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. - *

- * @property {Boolean} [captureStackTrace] Determines if the stack trace before the query execution should be - * maintained. - *

- * Useful for debugging purposes, it should be set to false under production environment as it adds an - * unnecessary overhead to each execution. - *

- * Default: false. - * @property {Number} [consistency] [Consistency level]{@link module:types~consistencies}. - *

- * Defaults to localOne for Apache Cassandra and DSE deployments. - * For DataStax Astra, it defaults to localQuorum. - *

- * @property {Object} [customPayload] Key-value payload to be passed to the server. On the Cassandra side, - * implementations of QueryHandler can use this data. - * @property {String} [executeAs] The user or role name to act as when executing this statement. - *

When set, it executes as a different user/role than the one currently authenticated (a.k.a. proxy execution).

- *

This feature is only available in DSE 5.1+.

- * @property {String|ExecutionProfile} [executionProfile] Name or instance of the [profile]{@link ExecutionProfile} to - * be used for this execution. If not set, it will the use "default" execution profile. - * @property {Number} [fetchSize] Amount of rows to retrieve per page. - * @property {Array|Array} [hints] Type hints for parameters given in the query, ordered as for the parameters. - *

For batch queries, an array of such arrays, ordered as with the queries in the batch.

- * @property {Host} [host] The host that should handle the query. - *

- * Use of this option is heavily discouraged and should only be used in the following cases: - *

- *
    - *
  1. - * Querying node-local tables, such as tables in the system and system_views - * keyspaces. - *
  2. - *
  3. - * Applying a series of schema changes, where it may be advantageous to execute schema changes in sequence on the - * same node. - *
  4. - *
- *

- * Configuring a specific host causes the configured - * [LoadBalancingPolicy]{@link module:policies/loadBalancing~LoadBalancingPolicy} to be completely bypassed. - * However, if the load balancing policy dictates that the host is at a - * [distance of ignored]{@link module:types~distance} or there is no active connectivity to the host, the request will - * fail with a [NoHostAvailableError]{@link module:errors~NoHostAvailableError}. - *

- * @property {Boolean} [isIdempotent] Defines whether the query can be applied multiple times without changing the result - * beyond the initial application. - *

- * The query execution idempotence can be used at [RetryPolicy]{@link module:policies/retry~RetryPolicy} level to - * determine if an statement can be retried in case of request error or write timeout. - *

- *

Default: false.

- * @property {String} [keyspace] Specifies the keyspace for the query. It is used for the following: - *
    - *
  1. To indicate what keyspace the statement is applicable to (protocol V5+ only). This is useful when the - * query does not provide an explicit keyspace and you want to override the current {@link Client#keyspace}.
  2. - *
  3. For query routing when the query operates on a different keyspace than the current {@link Client#keyspace}.
  4. - *
- * @property {Boolean} [logged] Determines if the batch should be written to the batchlog. Only valid for - * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: true. - * @property {Boolean} [counter] Determines if its a counter batch. Only valid for - * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: false. - * @property {Buffer|String} [pageState] Buffer or string token representing the paging state. - *

Useful for manual paging, if provided, the query will be executed starting from a given paging state.

- * @property {Boolean} [prepare] Determines if the query must be executed as a prepared statement. - * @property {Number} [readTimeout] When defined, it overrides the default read timeout - * (socketOptions.readTimeout) in milliseconds for this execution per coordinator. - *

- * Suitable for statements for which the coordinator may allow a longer server-side timeout, for example aggregation - * queries. - *

- *

- * A value of 0 disables client side read timeout for the execution. Default: undefined. - *

- * @property {RetryPolicy} [retry] Retry policy for the query. - *

- * This property can be used to specify a different [retry policy]{@link module:policies/retry} to the one specified - * in the {@link ClientOptions}.policies. - *

- * @property {Array} [routingIndexes] Index of the parameters that are part of the partition key to determine - * the routing. - * @property {Buffer|Array} [routingKey] Partition key(s) to determine which coordinator should be used for the query. - * @property {Array} [routingNames] Array of the parameters names that are part of the partition key to determine the - * routing. Only valid for non-prepared requests, it's recommended that you use the prepare flag instead. - * @property {Number} [serialConsistency] Serial consistency is the consistency level for the serial phase of - * conditional updates. - * This option will be ignored for anything else that a conditional update/insert. - * @property {Number|Long} [timestamp] The default timestamp for the query in microseconds from the unix epoch - * (00:00:00, January 1st, 1970). - *

If provided, this will replace the server side assigned timestamp as default timestamp.

- *

Use [generateTimestamp()]{@link module:types~generateTimestamp} utility method to generate a valid timestamp - * based on a Date and microseconds parts.

- * @property {Boolean} [traceQuery] Enable query tracing for the execution. Use query tracing to diagnose performance - * problems related to query executions. Default: false. - *

To retrieve trace, you can call [Metadata.getTrace()]{@link module:metadata~Metadata#getTrace} method.

- * @property {Object} [graphOptions] Default options for graph query executions. - *

- * These options are meant to provide defaults for all graph query executions. Consider using - * [execution profiles]{@link ExecutionProfile} if you plan to reuse different set of options across different - * query executions. - *

- * @property {String} [graphOptions.language] The graph language to use in graph queries. Default: - * 'gremlin-groovy'. - * @property {String} [graphOptions.name] The graph name to be used in all graph queries. - *

- * This property is required but there is no default value for it. This value can be overridden at query level. - *

- * @property {Number} [graphOptions.readConsistency] Overrides the - * [consistency level]{@link module:types~consistencies} - * defined in the query options for graph read queries. - * @property {Number} [graphOptions.readTimeout] Overrides the default per-host read timeout (in milliseconds) for all - * graph queries. Default: 0. - *

- * Use null to reset the value and use the default on socketOptions.readTimeout . - *

- * @property {String} [graphOptions.source] The graph traversal source name to use in graph queries. Default: - * 'g'. - * @property {Number} [graphOptions.writeConsistency] Overrides the [consistency - * level]{@link module:types~consistencies} defined in the query options for graph write queries. - */ - -/** - * Creates a new instance of {@link Client}. - * @classdesc - * Represents a database client that maintains multiple connections to the cluster nodes, providing methods to - * execute CQL statements. - *

- * The Client uses [policies]{@link module:policies} to decide which nodes to connect to, which node - * to use per each query execution, when it should retry failed or timed-out executions and how reconnection to down - * nodes should be made. - *

- * @extends EventEmitter - * @param {ClientOptions} options The options for this instance. - * @example Creating a new client instance - * const client = new Client({ - * contactPoints: ['10.0.1.101', '10.0.1.102'], - * localDataCenter: 'datacenter1' - * }); - * @example Executing a query - * const result = await client.connect(); - * console.log(`Connected to ${client.hosts.length} nodes in the cluster: ${client.hosts.keys().join(', ')}`); - * @example Executing a query - * const result = await client.execute('SELECT key FROM system.local'); - * const row = result.first(); - * console.log(row['key']); - * @constructor - */ -function Client(options) { - events.EventEmitter.call(this); - this.options = clientOptions.extend({ logEmitter: this.emit.bind(this), id: types.Uuid.random() }, options); - Object.defineProperty(this, 'profileManager', { value: new ProfileManager(this.options) }); - Object.defineProperty(this, 'controlConnection', { - value: new ControlConnection(this.options, this.profileManager), writable: true } - ); - Object.defineProperty(this, 'insightsClient', { value: new InsightsClient(this)}); - - //Unlimited amount of listeners for internal event queues by default - this.setMaxListeners(0); - this.connected = false; - this.isShuttingDown = false; - /** - * Gets the name of the active keyspace. - * @type {String} - */ - this.keyspace = options.keyspace; - /** - * Gets the schema and cluster metadata information. - * @type {Metadata} - */ - this.metadata = this.controlConnection.metadata; - /** - * Gets an associative array of cluster hosts. - * @type {HostMap} - */ - this.hosts = this.controlConnection.hosts; - - /** - * The [ClientMetrics]{@link module:metrics~ClientMetrics} instance used to expose measurements of its internal - * behavior and of the server as seen from the driver side. - *

By default, a [DefaultMetrics]{@link module:metrics~DefaultMetrics} instance is used.

- * @type {ClientMetrics} - */ - this.metrics = this.options.metrics; - - this._graphExecutor = new GraphExecutor(this, options, this._execute); -} - -util.inherits(Client, events.EventEmitter); - -/** - * Emitted when a new host is added to the cluster. - *
    - *
  • {@link Host} The host being added.
  • - *
- * @event Client#hostAdd - */ -/** - * Emitted when a host is removed from the cluster - *
    - *
  • {@link Host} The host being removed.
  • - *
- * @event Client#hostRemove - */ -/** - * Emitted when a host in the cluster changed status from down to up. - *
    - *
  • {@link Host host} The host that changed the status.
  • - *
- * @event Client#hostUp - */ -/** - * Emitted when a host in the cluster changed status from up to down. - *
    - *
  • {@link Host host} The host that changed the status.
  • - *
- * @event Client#hostDown - */ - -/** - * Attempts to connect to one of the [contactPoints]{@link ClientOptions} and discovers the rest the nodes of the - * cluster. - *

When the {@link Client} is already connected, it resolves immediately.

- *

It returns a Promise when a callback is not provided.

- * @param {function} [callback] The optional callback that is invoked when the pool is connected or it failed to - * connect. - * @example Usage example - * await client.connect(); - */ -Client.prototype.connect = function (callback) { - if (this.connected && callback) { - // Avoid creating Promise to immediately resolve them - return callback(); - } - - return promiseUtils.optionalCallback(this._connect(), callback); -}; - -/** - * Async-only version of {@link Client#connect()}. - * @private - */ -Client.prototype._connect = async function () { - if (this.connected) { - return; - } - - if (this.isShuttingDown) { - //it is being shutdown, don't allow further calls to connect() - throw new errors.NoHostAvailableError(null, 'Connecting after shutdown is not supported'); - } - - if (this.connecting) { - return promiseUtils.fromEvent(this, 'connected'); - } - - this.connecting = true; - this.log('info', util.format("Connecting to cluster using '%s' version %s", description, version)); - - try { - await cloud.init(this.options); - await this.controlConnection.init(); - this.hosts = this.controlConnection.hosts; - await this.profileManager.init(this, this.hosts); - - if (this.keyspace) { - await RequestHandler.setKeyspace(this); - } - - clientOptions.setMetadataDependent(this); - - await this._warmup(); - - } catch (err) { - // We should close the pools (if any) and reset the state to allow successive calls to connect() - await this.controlConnection.reset(); - this.connected = false; - this.connecting = false; - this.emit('connected', err); - throw err; - } - - this._setHostListeners(); - - // Set the distance of the control connection host relatively to this instance - this.profileManager.getDistance(this.controlConnection.host); - this.insightsClient.init(); - this.connected = true; - this.connecting = false; - this.emit('connected'); -}; - -/** - * Executes a query on an available connection. - *

The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag.

- *

- * Some execution failures can be handled transparently by the driver, according to the - * [RetryPolicy]{@linkcode module:policies/retry~RetryPolicy} or the - * [SpeculativeExecutionPolicy]{@linkcode module:policies/speculativeExecution} used. - *

- *

It returns a Promise when a callback is not provided.

- * @param {String} query The query to execute. - * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names - * as keys and its value. - * @param {QueryOptions} [options] The query options for the execution. - * @param {ResultCallback} [callback] Executes callback(err, result) when execution completed. When not defined, the - * method will return a promise. - * @example Promise-based API, using async/await - * const query = 'SELECT name, email FROM users WHERE id = ?'; - * const result = await client.execute(query, [ id ], { prepare: true }); - * const row = result.first(); - * console.log('%s: %s', row['name'], row['email']); - * @example Callback-based API - * const query = 'SELECT name, email FROM users WHERE id = ?'; - * client.execute(query, [ id ], { prepare: true }, function (err, result) { - * assert.ifError(err); - * const row = result.first(); - * console.log('%s: %s', row['name'], row['email']); - * }); - * @see {@link ExecutionProfile} to reuse a set of options across different query executions. - */ -Client.prototype.execute = function (query, params, options, callback) { - // This method acts as a wrapper for the async method _execute() - - if (!callback) { - // Set default argument values for optional parameters - if (typeof options === 'function') { - callback = options; - options = null; - } else if (typeof params === 'function') { - callback = params; - params = null; - } - } - - try { - const execOptions = DefaultExecutionOptions.create(options, this); - return promiseUtils.optionalCallback(this._execute(query, params, execOptions), callback); - } - catch (err) { - // There was an error when parsing the user options - if (callback) { - return callback(err); - } - - return Promise.reject(err); - } -}; - -/** - * Executes a graph query. - *

It returns a Promise when a callback is not provided.

- * @param {String} query The gremlin query. - * @param {Object|null} [parameters] An associative array containing the key and values of the parameters. - * @param {GraphQueryOptions|null} [options] The graph query options. - * @param {Function} [callback] Function to execute when the response is retrieved, taking two arguments: - * err and result. When not defined, the method will return a promise. - * @example Promise-based API, using async/await - * const result = await client.executeGraph('g.V()'); - * // Get the first item (vertex, edge, scalar value, ...) - * const vertex = result.first(); - * console.log(vertex.label); - * @example Callback-based API - * client.executeGraph('g.V()', (err, result) => { - * const vertex = result.first(); - * console.log(vertex.label); - * }); - * @example Iterating through the results - * const result = await client.executeGraph('g.E()'); - * for (let edge of result) { - * console.log(edge.label); // created - * }); - * @example Using result.forEach() - * const result = await client.executeGraph('g.V().hasLabel("person")'); - * result.forEach(function(vertex) { - * console.log(vertex.type); // vertex - * console.log(vertex.label); // person - * }); - * @see {@link ExecutionProfile} to reuse a set of options across different query executions. - */ -Client.prototype.executeGraph = function (query, parameters, options, callback) { - callback = callback || (options ? options : parameters); - - if (typeof callback === 'function') { - parameters = typeof parameters !== 'function' ? parameters : null; - return promiseUtils.toCallback(this._graphExecutor.send(query, parameters, options), callback); - } - - return this._graphExecutor.send(query, parameters, options); -}; - -/** - * Executes the query and calls rowCallback for each row as soon as they are received. Calls the final - * callback after all rows have been sent, or when there is an error. - *

- * The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag. - *

- * @param {String} query The query to execute - * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names - * as keys and its value. - * @param {QueryOptions} [options] The query options. - * @param {function} rowCallback Executes rowCallback(n, row) per each row received, where n is the row - * index and row is the current Row. - * @param {function} [callback] Executes callback(err, result) after all rows have been received. - *

- * When dealing with paged results, [ResultSet#nextPage()]{@link module:types~ResultSet#nextPage} method can be used - * to retrieve the following page. In that case, rowCallback() will be again called for each row and - * the final callback will be invoked when all rows in the following page has been retrieved. - *

- * @example Using per-row callback and arrow functions - * client.eachRow(query, params, { prepare: true }, (n, row) => console.log(n, row), err => console.error(err)); - * @example Overloads - * client.eachRow(query, rowCallback); - * client.eachRow(query, params, rowCallback); - * client.eachRow(query, params, options, rowCallback); - * client.eachRow(query, params, rowCallback, callback); - * client.eachRow(query, params, options, rowCallback, callback); - */ -Client.prototype.eachRow = function (query, params, options, rowCallback, callback) { - if (!callback && rowCallback && typeof options === 'function') { - callback = utils.validateFn(rowCallback, 'rowCallback'); - rowCallback = options; - } else { - callback = callback || utils.noop; - rowCallback = utils.validateFn(rowCallback || options || params, 'rowCallback'); - } - - params = typeof params !== 'function' ? params : null; - - let execOptions; - try { - execOptions = DefaultExecutionOptions.create(options, this, rowCallback); - } - catch (e) { - return callback(e); - } - - let rowLength = 0; - - const nextPage = () => promiseUtils.toCallback(this._execute(query, params, execOptions), pageCallback); - - function pageCallback (err, result) { - if (err) { - return callback(err); - } - // Next requests in case paging (auto or explicit) is used - rowLength += result.rowLength; - - if (result.rawPageState !== undefined) { - // Use new page state as next request page state - execOptions.setPageState(result.rawPageState); - if (execOptions.isAutoPage()) { - // Issue next request for the next page - return nextPage(); - } - // Allows for explicit (manual) paging, in case the caller needs it - result.nextPage = nextPage; - } - - // Finished auto-paging - result.rowLength = rowLength; - callback(null, result); - } - - promiseUtils.toCallback(this._execute(query, params, execOptions), pageCallback); -}; - -/** - * Executes the query and pushes the rows to the result stream as soon as they received. - *

- * The stream is a [ReadableStream]{@linkcode https://nodejs.org/api/stream.html#stream_class_stream_readable} object - * that emits rows. - * It can be piped downstream and provides automatic pause/resume logic (it buffers when not read). - *

- *

- * The query can be prepared (recommended) or not depending on {@link QueryOptions}.prepare flag. Retries on multiple - * hosts if needed. - *

- * @param {String} query The query to prepare and execute. - * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names - * as keys and its value - * @param {QueryOptions} [options] The query options. - * @param {function} [callback] executes callback(err) after all rows have been received or if there is an error - * @returns {ResultStream} - */ -Client.prototype.stream = function (query, params, options, callback) { - callback = callback || utils.noop; - // NOTE: the nodejs stream maintains yet another internal buffer - // we rely on the default stream implementation to keep memory - // usage reasonable. - const resultStream = new types.ResultStream({ objectMode: 1 }); - function onFinish(err, result) { - if (err) { - resultStream.emit('error', err); - } - if (result && result.nextPage ) { - // allows for throttling as per the - // default nodejs stream implementation - resultStream._valve(function pageValve() { - try { - result.nextPage(); - } - catch( ex ) { - resultStream.emit('error', ex ); - } - }); - return; - } - // Explicitly dropping the valve (closure) - resultStream._valve(null); - resultStream.add(null); - callback(err); - } - let sync = true; - this.eachRow(query, params, options, function rowCallback(n, row) { - resultStream.add(row); - }, function eachRowFinished(err, result) { - if (sync) { - // Prevent sync callback - return setImmediate(function eachRowFinishedImmediate() { - onFinish(err, result); - }); - } - onFinish(err, result); - }); - sync = false; - return resultStream; -}; - -/** - * Executes batch of queries on an available connection to a host. - *

It returns a Promise when a callback is not provided.

- * @param {Array.|Array.<{query, params}>} queries The queries to execute as an Array of strings or as an array - * of object containing the query and params - * @param {QueryOptions} [options] The query options. - * @param {ResultCallback} [callback] Executes callback(err, result) when the batch was executed - */ -Client.prototype.batch = function (queries, options, callback) { - if (!callback && typeof options === 'function') { - callback = options; - options = null; - } - - return promiseUtils.optionalCallback(this._batch(queries, options), callback); -}; - -/** - * Async-only version of {@link Client#batch()} . - * @param {Array.|Array.<{query, params}>}queries - * @param {QueryOptions} options - * @returns {Promise} - * @private - */ -Client.prototype._batch = async function (queries, options) { - if (!Array.isArray(queries)) { - throw new errors.ArgumentError('Queries should be an Array'); - } - - if (queries.length === 0) { - throw new errors.ArgumentError('Queries array should not be empty'); - } - - await this._connect(); - - const execOptions = DefaultExecutionOptions.create(options, this); - let queryItems; - - if (execOptions.isPrepared()) { - // use keyspace from query options if protocol supports per-query keyspace, otherwise use connection keyspace. - const version = this.controlConnection.protocolVersion; - const queryKeyspace = types.protocolVersion.supportsKeyspaceInRequest(version) && options.keyspace || this.keyspace; - queryItems = await PrepareHandler.getPreparedMultiple( - this, execOptions.getLoadBalancingPolicy(), queries, queryKeyspace); - } else { - queryItems = new Array(queries.length); - - for (let i = 0; i < queries.length; i++) { - const item = queries[i]; - if (!item) { - throw new errors.ArgumentError(`Invalid query at index ${i}`); - } - - const query = typeof item === 'string' ? item : item.query; - if (!query) { - throw new errors.ArgumentError(`Invalid query at index ${i}`); - } - - queryItems[i] = { query, params: item.params }; - } - } - - const request = await this._createBatchRequest(queryItems, execOptions); - return await RequestHandler.send(request, execOptions, this); -}; - -/** - * Gets the host that are replicas of a given token. - * @param {String} keyspace - * @param {Buffer} token - * @returns {Array} - */ -Client.prototype.getReplicas = function (keyspace, token) { - return this.metadata.getReplicas(keyspace, token); -}; - -/** - * Gets a snapshot containing information on the connections pools held by this Client at the current time. - *

- * The information provided in the returned object only represents the state at the moment this method was called and - * it's not maintained in sync with the driver metadata. - *

- * @returns {ClientState} A [ClientState]{@linkcode module:metadata~ClientState} instance. - */ -Client.prototype.getState = function () { - return ClientState.from(this); -}; - -Client.prototype.log = utils.log; - -/** - * Closes all connections to all hosts. - *

It returns a Promise when a callback is not provided.

- * @param {Function} [callback] Optional callback to be invoked when finished closing all connections. - */ -Client.prototype.shutdown = function (callback) { - return promiseUtils.optionalCallback(this._shutdown(), callback); -}; - -/** @private */ -Client.prototype._shutdown = async function () { - this.log('info', 'Shutting down'); - - if (!this.hosts || !this.connected) { - // not initialized - this.connected = false; - return; - } - - if (this.connecting) { - this.log('warning', 'Shutting down while connecting'); - // wait until finish connecting for easier troubleshooting - await promiseUtils.fromEvent(this, 'connected'); - } - - this.connected = false; - this.isShuttingDown = true; - const hosts = this.hosts.values(); - - this.insightsClient.shutdown(); - - // Shutdown the ControlConnection before shutting down the pools - this.controlConnection.shutdown(); - this.options.policies.speculativeExecution.shutdown(); - - if (this.options.requestTracker) { - this.options.requestTracker.shutdown(); - } - - // go through all the host and shut down their pools - await Promise.all(hosts.map(h => h.shutdown(false))); -}; - -/** - * Waits until that the schema version in all nodes is the same or the waiting time passed. - * @param {Connection} connection - * @returns {Promise} - * @ignore - */ -Client.prototype._waitForSchemaAgreement = async function (connection) { - if (this.hosts.length === 1) { - return true; - } - - const start = process.hrtime(); - const maxWaitSeconds = this.options.protocolOptions.maxSchemaAgreementWaitSeconds; - - this.log('info', 'Waiting for schema agreement'); - - let versionsMatch; - - while (!versionsMatch && process.hrtime(start)[0] < maxWaitSeconds) { - versionsMatch = await this.metadata.compareSchemaVersions(connection); - - if (versionsMatch) { - this.log('info', 'Schema versions match'); - break; - } - - // Let some time pass before the next check - await promiseUtils.delay(500); - } - - return versionsMatch; -}; - -/** - * Waits for schema agreements and schedules schema metadata refresh. - * @param {Connection} connection - * @param event - * @returns {Promise} - * @ignore - * @internal - */ -Client.prototype.handleSchemaAgreementAndRefresh = async function (connection, event) { - let agreement = false; - - try { - agreement = await this._waitForSchemaAgreement(connection); - } catch (err) { - //we issue a warning but we continue with the normal flow - this.log('warning', 'There was an error while waiting for the schema agreement between nodes', err); - } - - if (!this.options.isMetadataSyncEnabled) { - return agreement; - } - - // Refresh metadata immediately - try { - await this.controlConnection.handleSchemaChange(event, true); - } catch (err) { - this.log('warning', 'There was an error while handling schema change', err); - } - - return agreement; -}; - -/** - * Connects and handles the execution of prepared and simple statements. - * @param {string} query - * @param {Array} params - * @param {ExecutionOptions} execOptions - * @returns {Promise} - * @private - */ -Client.prototype._execute = async function (query, params, execOptions) { - const version = this.controlConnection.protocolVersion; - - if (!execOptions.isPrepared() && params && !Array.isArray(params) && - !types.protocolVersion.supportsNamedParameters(version)) { - // Only Cassandra 2.1 and above supports named parameters - throw new errors.ArgumentError('Named parameters for simple statements are not supported, use prepare flag'); - } - - let request; - - if (!this.connected) { - // Micro optimization to avoid an async execution for a simple check - await this._connect(); - } - - if (!execOptions.isPrepared()) { - request = await this._createQueryRequest(query, execOptions, params); - } else { - const lbp = execOptions.getLoadBalancingPolicy(); - - // Use keyspace from query options if protocol supports per-query keyspace, otherwise use connection keyspace. - const queryKeyspace = types.protocolVersion.supportsKeyspaceInRequest(version) && - execOptions.getKeyspace() || this.keyspace; - - const { queryId, meta } = await PrepareHandler.getPrepared(this, lbp, query, queryKeyspace); - request = await this._createExecuteRequest(query, queryId, execOptions, params, meta); - } - - return await RequestHandler.send(request, execOptions, this); -}; - -/** - * Sets the listeners for the nodes. - * @private - */ -Client.prototype._setHostListeners = function () { - function getHostUpListener(emitter, h) { - return () => emitter.emit('hostUp', h); - } - - function getHostDownListener(emitter, h) { - return () => emitter.emit('hostDown', h); - } - - const self = this; - - // Add status listeners when new nodes are added and emit hostAdd - this.hosts.on('add', function hostAddedListener(h) { - h.on('up', getHostUpListener(self, h)); - h.on('down', getHostDownListener(self, h)); - self.emit('hostAdd', h); - }); - - // Remove all listeners and emit hostRemove - this.hosts.on('remove', function hostRemovedListener(h) { - h.removeAllListeners(); - self.emit('hostRemove', h); - }); - - // Add status listeners for existing hosts - this.hosts.forEach(function (h) { - h.on('up', getHostUpListener(self, h)); - h.on('down', getHostDownListener(self, h)); - }); -}; - -/** - * Sets the distance to each host and when warmup is true, creates all connections to local hosts. - * @returns {Promise} - * @private - */ -Client.prototype._warmup = function () { - const hosts = this.hosts.values(); - - return promiseUtils.times(hosts.length, warmupLimit, async (index) => { - const h = hosts[index]; - const distance = this.profileManager.getDistance(h); - - if (distance === types.distance.ignored) { - return; - } - - if (this.options.pooling.warmup && distance === types.distance.local) { - try { - await h.warmupPool(this.keyspace); - } catch (err) { - // An error while trying to create a connection to one of the hosts. - // Warn the user and move on. - this.log('warning', `Connection pool to host ${h.address} could not be created: ${err}`, err); - } - } else { - h.initializePool(); - } - }); -}; - -/** - * @returns {Encoder} - * @private - */ -Client.prototype._getEncoder = function () { - const encoder = this.controlConnection.getEncoder(); - if (!encoder) { - throw new errors.DriverInternalError('Encoder is not defined'); - } - return encoder; -}; - -/** - * Returns a BatchRequest instance and fills the routing key information in the provided options. - * @private - */ -Client.prototype._createBatchRequest = async function (queryItems, info) { - const firstQuery = queryItems[0]; - if (!firstQuery.meta) { - return new requests.BatchRequest(queryItems, info); - } - - await this._setRoutingInfo(info, firstQuery.params, firstQuery.meta); - return new requests.BatchRequest(queryItems, info); -}; - -/** - * Returns an ExecuteRequest instance and fills the routing key information in the provided options. - * @private - */ -Client.prototype._createExecuteRequest = async function(query, queryId, info, params, meta) { - params = utils.adaptNamedParamsPrepared(params, meta.columns); - await this._setRoutingInfo(info, params, meta); - return new requests.ExecuteRequest(query, queryId, params, info, meta); -}; - -/** - * Returns a QueryRequest instance and fills the routing key information in the provided options. - * @private - */ -Client.prototype._createQueryRequest = async function (query, execOptions, params) { - await this.metadata.adaptUserHints(this.keyspace, execOptions.getHints()); - const paramsInfo = utils.adaptNamedParamsWithHints(params, execOptions); - this._getEncoder().setRoutingKeyFromUser(paramsInfo.params, execOptions, paramsInfo.keyIndexes); - - return new requests.QueryRequest(query, paramsInfo.params, execOptions, paramsInfo.namedParameters); -}; - -/** - * Sets the routing key based on the parameter values or the provided routing key components. - * @param {ExecutionOptions} execOptions - * @param {Array} params - * @param meta - * @private - */ -Client.prototype._setRoutingInfo = async function (execOptions, params, meta) { - const encoder = this._getEncoder(); - - if (!execOptions.getKeyspace() && meta.keyspace) { - execOptions.setKeyspace(meta.keyspace); - } - if (execOptions.getRoutingKey()) { - // Routing information provided by the user - return encoder.setRoutingKeyFromUser(params, execOptions); - } - if (Array.isArray(meta.partitionKeys)) { - // The partition keys are provided as part of the metadata for modern protocol versions - execOptions.setRoutingIndexes(meta.partitionKeys); - return encoder.setRoutingKeyFromMeta(meta, params, execOptions); - } - - // Older versions of the protocol (v3 and below) don't provide routing information - try { - const tableInfo = await this.metadata.getTable(meta.keyspace, meta.table); - - if (!tableInfo) { - // The schema data is not there, maybe it is being recreated, avoid setting the routing information - return; - } - - execOptions.setRoutingIndexes(tableInfo.partitionKeys.map(c => meta.columnsByName[c.name])); - // Skip parsing metadata next time - meta.partitionKeys = execOptions.getRoutingIndexes(); - encoder.setRoutingKeyFromMeta(meta, params, execOptions); - } catch (err) { - this.log('warning', util.format('Table %s.%s metadata could not be retrieved', meta.keyspace, meta.table)); - } -}; - -/** - * Callback used by execution methods. - * @callback ResultCallback - * @param {Error} err Error occurred in the execution of the query. - * @param {ResultSet} [result] Result of the execution of the query. - */ - -module.exports = Client; diff --git a/lib/client.ts b/lib/client.ts new file mode 100644 index 000000000..ff5829944 --- /dev/null +++ b/lib/client.ts @@ -0,0 +1,1422 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +//TODO: fix the types +import events from "events"; +import { ConnectionOptions } from "tls"; +import util from "util"; +import packageInfo from '../package.json'; +import { AuthProvider } from "./auth"; +import clientOptions from "./client-options"; +import Connection from "./connection"; +import ControlConnection from "./control-connection"; +import cloud from "./datastax/cloud/index"; +import type { GraphResultSet } from "./datastax/graph"; +import GraphExecutor from "./datastax/graph/graph-executor"; +import Encoder from "./encoder"; +import errors from "./errors"; +import { DefaultExecutionOptions, ExecutionOptions } from "./execution-options"; +import { ExecutionProfile, ProfileManager } from "./execution-profile"; +import { Host, type HostMap } from "./host"; +import InsightsClient from "./insights-client"; +import Metadata from "./metadata"; +import ClientState from "./metadata/client-state"; +import { ClientMetrics } from "./metrics"; +import { AddressTranslator } from "./policies/address-resolution"; +import { LoadBalancingPolicy } from "./policies/load-balancing"; +import { ReconnectionPolicy } from "./policies/reconnection"; +import { RetryPolicy } from "./policies/retry"; +import { SpeculativeExecutionPolicy } from "./policies/speculative-execution"; +import { TimestampGenerator } from "./policies/timestamp-generation"; +import PrepareHandler from "./prepare-handler"; +import promiseUtils from "./promise-utils"; +import RequestHandler from "./request-handler"; +import requests from "./requests"; +import { RequestTracker } from "./tracker"; +import types, { type consistencies, Long, ResultSet, ResultStream, Row, Uuid } from "./types/index"; +import utils, { AddressResolver, type ArrayOrObject, type EmptyCallback, type ValueCallback } from "./utils"; + +const version = packageInfo.version; +const description = packageInfo.description; + +/** + * Max amount of pools being warmup in parallel, when warmup is enabled + * @private + */ +const warmupLimit = 32; + +/** + * Client options. + *

While the driver provides lots of extensibility points and configurability, few client options are required.

+ *

Default values for all settings are designed to be suitable for the majority of use cases, you should avoid + * fine tuning it when not needed.

+ *

See [Client constructor]{@link Client} documentation for recommended options.

+ * @typedef {Object} ClientOptions + * @property {Array.} contactPoints + * Array of addresses or host names of the nodes to add as contact points. + *

+ * Contact points are addresses of Cassandra nodes that the driver uses to discover the cluster topology. + *

+ *

+ * Only one contact point is required (the driver will retrieve the address of the other nodes automatically), + * but it is usually a good idea to provide more than one contact point, because if that single contact point is + * unavailable, the driver will not be able to initialize correctly. + *

+ * @property {String} [localDataCenter] The local data center to use. + *

+ * If using DCAwareRoundRobinPolicy (default), this option is required and only hosts from this data center are + * connected to and used in query plans. + *

+ * @property {String} [keyspace] The logged keyspace for all the connections created within the {@link Client} instance. + * @property {Object} [credentials] An object containing the username and password for plain-text authentication. + * It configures the authentication provider to be used against Apache Cassandra's PasswordAuthenticator or DSE's + * DseAuthenticator, when default auth scheme is plain-text. + *

+ * Note that you should configure either credentials or authProvider to connect to an + * auth-enabled cluster, but not both. + *

+ * @property {String} [credentials.username] The username to use for plain-text authentication. + * @property {String} [credentials.password] The password to use for plain-text authentication. + * @property {Uuid} [id] A unique identifier assigned to a {@link Client} object, that will be communicated to the + * server (DSE 6.0+) to identify the client instance created with this options. When not defined, the driver will + * generate a random identifier. + * @property {String} [applicationName] An optional setting identifying the name of the application using + * the {@link Client} instance. + *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

+ * @property {String} [applicationVersion] An optional setting identifying the version of the application using + * the {@link Client} instance. + *

This value is passed to DSE and is useful as metadata for describing a client connection on the server side.

+ * @property {Object} [monitorReporting] Options for reporting mechanism from the client to the DSE server, for + * versions that support it. + * @property {Boolean} [monitorReporting.enabled=true] Determines whether the reporting mechanism is enabled. + * Defaults to true. + * @property {Object} [cloud] The options to connect to a cloud instance. + * @property {String|URL} cloud.secureConnectBundle Determines the file path for the credentials file bundle. + * @property {Number} [refreshSchemaDelay] The default window size in milliseconds used to debounce node list and schema + * refresh metadata requests. Default: 1000. + * @property {Boolean} [isMetadataSyncEnabled] Determines whether client-side schema metadata retrieval and update is + * enabled. + *

Setting this value to false will cause keyspace information not to be automatically loaded, affecting + * replica calculation per token in the different keyspaces. When disabling metadata synchronization, use + * [Metadata.refreshKeyspaces()]{@link module:metadata~Metadata#refreshKeyspaces} to keep keyspace information up to + * date or token-awareness will not work correctly.

+ * Default: true. + * @property {Boolean} [prepareOnAllHosts] Determines if the driver should prepare queries on all hosts in the cluster. + * Default: true. + * @property {Boolean} [rePrepareOnUp] Determines if the driver should re-prepare all cached prepared queries on a + * host when it marks it back up. + * Default: true. + * @property {Number} [maxPrepared] Determines the maximum amount of different prepared queries before evicting items + * from the internal cache. Reaching a high threshold hints that the queries are not being reused, like when + * hard-coding parameter values inside the queries. + * Default: 500. + * @property {Object} [policies] + * @property {LoadBalancingPolicy} [policies.loadBalancing] The load balancing policy instance to be used to determine + * the coordinator per query. + * @property {RetryPolicy} [policies.retry] The retry policy. + * @property {ReconnectionPolicy} [policies.reconnection] The reconnection policy to be used. + * @property {AddressTranslator} [policies.addressResolution] The address resolution policy. + * @property {SpeculativeExecutionPolicy} [policies.speculativeExecution] The SpeculativeExecutionPolicy + * instance to be used to determine if the client should send speculative queries when the selected host takes more + * time than expected. + *

+ * Default: [NoSpeculativeExecutionPolicy]{@link + * module:policies/speculativeExecution~NoSpeculativeExecutionPolicy} + *

+ * @property {TimestampGenerator} [policies.timestampGeneration] The client-side + * [query timestamp generator]{@link module:policies/timestampGeneration~TimestampGenerator}. + *

+ * Default: [MonotonicTimestampGenerator]{@link module:policies/timestampGeneration~MonotonicTimestampGenerator} + * + *

+ *

Use null to disable client-side timestamp generation.

+ * @property {QueryOptions} [queryOptions] Default options for all queries. + * @property {Object} [pooling] Pooling options. + * @property {Number} [pooling.heartBeatInterval] The amount of idle time in milliseconds that has to pass before the + * driver issues a request on an active connection to avoid idle time disconnections. Default: 30000. + * @property {Object} [pooling.coreConnectionsPerHost] Associative array containing amount of connections per host + * distance. + * @property {Number} [pooling.maxRequestsPerConnection] The maximum number of requests per connection. The default + * value is: + *
    + *
  • For modern protocol versions (v3 and above): 2048
  • + *
  • For older protocol versions (v1 and v2): 128
  • + *
+ * @property {Boolean} [pooling.warmup] Determines if all connections to hosts in the local datacenter must be opened on + * connect. Default: true. + * @property {Object} [protocolOptions] + * @property {Number} [protocolOptions.port] The port to use to connect to the Cassandra host. If not set through this + * method, the default port (9042) will be used instead. + * @property {Number} [protocolOptions.maxSchemaAgreementWaitSeconds] The maximum time in seconds to wait for schema + * agreement between nodes before returning from a DDL query. Default: 10. + * @property {Number} [protocolOptions.maxVersion] When set, it limits the maximum protocol version used to connect to + * the nodes. + * Useful for using the driver against a cluster that contains nodes with different major/minor versions of Cassandra. + * @property {Boolean} [protocolOptions.noCompact] When set to true, enables the NO_COMPACT startup option. + *

+ * When this option is supplied SELECT, UPDATE, DELETE, and BATCH + * statements on COMPACT STORAGE tables function in "compatibility" mode which allows seeing these tables + * as if they were "regular" CQL tables. + *

+ *

+ * This option only effects interactions with interactions with tables using COMPACT STORAGE and is only + * supported by C* 3.0.16+, 3.11.2+, 4.0+ and DSE 6.0+. + *

+ * @property {Object} [socketOptions] + * @property {Number} [socketOptions.connectTimeout] Connection timeout in milliseconds. Default: 5000. + * @property {Number} [socketOptions.defunctReadTimeoutThreshold] Determines the amount of requests that simultaneously + * have to timeout before closing the connection. Default: 64. + * @property {Boolean} [socketOptions.keepAlive] Whether to enable TCP keep-alive on the socket. Default: true. + * @property {Number} [socketOptions.keepAliveDelay] TCP keep-alive delay in milliseconds. Default: 0. + * @property {Number} [socketOptions.readTimeout] Per-host read timeout in milliseconds. + *

+ * Please note that this is not the maximum time a call to {@link Client#execute} may have to wait; + * this is the maximum time that call will wait for one particular Cassandra host, but other hosts will be tried if + * one of them timeout. In other words, a {@link Client#execute} call may theoretically wait up to + * readTimeout * number_of_cassandra_hosts (though the total number of hosts tried for a given query also + * depends on the LoadBalancingPolicy in use). + *

When setting this value, keep in mind the following:

+ *
    + *
  • the timeout settings used on the Cassandra side (*_request_timeout_in_ms in cassandra.yaml) should be taken + * into account when picking a value for this read timeout. You should pick a value a couple of seconds greater than + * the Cassandra timeout settings. + *
  • + *
  • + * the read timeout is only approximate and only control the timeout to one Cassandra host, not the full query. + *
  • + *
+ * Setting a value of 0 disables read timeouts. Default: 12000. + * @property {Boolean} [socketOptions.tcpNoDelay] When set to true, it disables the Nagle algorithm. Default: true. + * @property {Number} [socketOptions.coalescingThreshold] Buffer length in bytes use by the write queue before flushing + * the frames. Default: 8000. + * @property {AuthProvider} [authProvider] Provider to be used to authenticate to an auth-enabled cluster. + * @property {RequestTracker} [requestTracker] The instance of RequestTracker used to monitor or log requests executed + * with this instance. + * @property {Object} [sslOptions] Client-to-node ssl options. When set the driver will use the secure layer. + * You can specify cert, ca, ... options named after the Node.js tls.connect() options. + *

+ * It uses the same default values as Node.js tls.connect() except for rejectUnauthorized + * which is set to false by default (for historical reasons). This setting is likely to change + * in upcoming versions to enable validation by default. + *

+ * @property {Object} [encoding] Encoding options. + * @property {Function} [encoding.map] Map constructor to use for Cassandra map type encoding and decoding. + * If not set, it will default to Javascript Object with map keys as property names. + * @property {Function} [encoding.set] Set constructor to use for Cassandra set type encoding and decoding. + * If not set, it will default to Javascript Array. + * @property {Boolean} [encoding.copyBuffer] Determines if the network buffer should be copied for buffer based data + * types (blob, uuid, timeuuid and inet). + *

+ * Setting it to true will cause that the network buffer is copied for each row value of those types, + * causing additional allocations but freeing the network buffer to be reused. + * Setting it to true is a good choice for cases where the Row and ResultSet returned by the queries are long-lived + * objects. + *

+ *

+ * Setting it to false will cause less overhead and the reference of the network buffer to be maintained until the row + * / result set are de-referenced. + * Default: true. + *

+ * @property {Boolean} [encoding.useUndefinedAsUnset] Valid for Cassandra 2.2 and above. Determines that, if a parameter + * is set to + * undefined it should be encoded as unset. + *

+ * By default, ECMAScript undefined is encoded as null in the driver. Cassandra 2.2 + * introduced the concept of unset. + * At driver level, you can set a parameter to unset using the field types.unset. Setting this flag to + * true allows you to use ECMAScript undefined as Cassandra unset. + *

+ *

+ * Default: true. + *

+ * @property {Boolean} [encoding.useBigIntAsLong] Use [BigInt ECMAScript type](https://tc39.github.io/proposal-bigint/) + * to represent CQL bigint and counter data types. + * @property {Boolean} [encoding.useBigIntAsVarint] Use [BigInt ECMAScript + * type](https://tc39.github.io/proposal-bigint/) to represent CQL varint data type. + * @property {Array.} [profiles] The array of [execution profiles]{@link ExecutionProfile}. + * @property {Function} [promiseFactory] Function to be used to create a Promise from a + * callback-style function. + *

+ * Promise libraries often provide different methods to create a promise. For example, you can use Bluebird's + * Promise.fromCallback() method. + *

+ *

+ * By default, the driver will use the + * [Promise constructor]{@link https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise}. + *

+ */ +interface ClientOptions { + //TODO: remove when bundled. Remove insight-clients relevant doc too. + /** @internal */ + applicationName?: string; + /** @internal */ + applicationVersion?: string; + authProvider?: AuthProvider; + contactPoints?: string[]; + localDataCenter?: string; //TODO: not exposed. Should we? + /** @internal */ + logEmitter?: any; //TODO: not exposed. Should we? + keyspace?: string; + credentials?: { + username: string; + password: string; + } + + cloud?: { + secureConnectBundle: string | URL; + }; + + encoding?: { + map?: Function; + set?: Function; + copyBuffer?: boolean; + useUndefinedAsUnset?: boolean; + useBigIntAsLong?: boolean; + useBigIntAsVarint?: boolean; + }; + /** @internal */ + id?: Uuid; + isMetadataSyncEnabled?: boolean; + maxPrepared?: number; + metrics?: ClientMetrics; + /** @internal */ + monitorReporting?: { + enabled?: boolean; + }; + policies?: { + addressResolution?: AddressTranslator; + loadBalancing?: LoadBalancingPolicy; + reconnection?: ReconnectionPolicy; + retry?: RetryPolicy; + speculativeExecution?: SpeculativeExecutionPolicy; + timestampGeneration?: TimestampGenerator; + }; + pooling?: { + coreConnectionsPerHost?: { [key: number]: number; }; + heartBeatInterval?: number; + maxRequestsPerConnection?: number; + warmup?: boolean; + }; + prepareOnAllHosts?: boolean; + profiles?: ExecutionProfile[]; + protocolOptions?: { + maxSchemaAgreementWaitSeconds?: number; + maxVersion?: number; + noCompact?: boolean; + port?: number; + }; + promiseFactory?: (handler: (callback: (err: Error, result?: any) => void) => void) => Promise; + queryOptions?: QueryOptions; + refreshSchemaDelay?: number; + rePrepareOnUp?: boolean; + requestTracker?: RequestTracker; + /** @internal */ + sni?: { + address?: string; + port?: string; + addressResolver?: AddressResolver; + } + socketOptions?: { + coalescingThreshold?: number; + connectTimeout?: number; + defunctReadTimeoutThreshold?: number; + keepAlive?: boolean; + keepAliveDelay?: number; + readTimeout?: number; + tcpNoDelay?: boolean; + }; + sslOptions?: ConnectionOptions; +} + +//TODO: are they all insight-clients related? Should we remove all of them? +interface DseClientOptions extends ClientOptions { + id?: Uuid; + applicationName?: string; + applicationVersion?: string; + monitorReporting?: { enabled?: boolean }; + graphOptions?: GraphOptions; +} + +interface GraphQueryOptions extends QueryOptions { + graphLanguage?: string; + graphName?: string; + graphReadConsistency?: consistencies; + graphSource?: string; + graphWriteConsistency?: consistencies; + graphResults?: string; +} + +type GraphOptions = { + language?: string; + name?: string; + readConsistency?: consistencies; + readTimeout?: number; + source?: string; + writeConsistency?: consistencies; +}; + +/** + * Query options + * @typedef {Object} QueryOptions + * @property {Boolean} [autoPage] Determines if the driver must retrieve the following result pages automatically. + *

+ * This setting is only considered by the [Client#eachRow()]{@link Client#eachRow} method. For more information, + * check the + * [paging results documentation]{@link https://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. + *

+ * @property {Boolean} [captureStackTrace] Determines if the stack trace before the query execution should be + * maintained. + *

+ * Useful for debugging purposes, it should be set to false under production environment as it adds an + * unnecessary overhead to each execution. + *

+ * Default: false. + * @property {Number} [consistency] [Consistency level]{@link module:types~consistencies}. + *

+ * Defaults to localOne for Apache Cassandra and DSE deployments. + * For DataStax Astra, it defaults to localQuorum. + *

+ * @property {Object} [customPayload] Key-value payload to be passed to the server. On the Cassandra side, + * implementations of QueryHandler can use this data. + * @property {String} [executeAs] The user or role name to act as when executing this statement. + *

When set, it executes as a different user/role than the one currently authenticated (a.k.a. proxy execution).

+ *

This feature is only available in DSE 5.1+.

+ * @property {String|ExecutionProfile} [executionProfile] Name or instance of the [profile]{@link ExecutionProfile} to + * be used for this execution. If not set, it will the use "default" execution profile. + * @property {Number} [fetchSize] Amount of rows to retrieve per page. + * @property {Array|Array} [hints] Type hints for parameters given in the query, ordered as for the parameters. + *

For batch queries, an array of such arrays, ordered as with the queries in the batch.

+ * @property {Host} [host] The host that should handle the query. + *

+ * Use of this option is heavily discouraged and should only be used in the following cases: + *

+ *
    + *
  1. + * Querying node-local tables, such as tables in the system and system_views + * keyspaces. + *
  2. + *
  3. + * Applying a series of schema changes, where it may be advantageous to execute schema changes in sequence on the + * same node. + *
  4. + *
+ *

+ * Configuring a specific host causes the configured + * [LoadBalancingPolicy]{@link module:policies/loadBalancing~LoadBalancingPolicy} to be completely bypassed. + * However, if the load balancing policy dictates that the host is at a + * [distance of ignored]{@link module:types~distance} or there is no active connectivity to the host, the request will + * fail with a [NoHostAvailableError]{@link module:errors~NoHostAvailableError}. + *

+ * @property {Boolean} [isIdempotent] Defines whether the query can be applied multiple times without changing the result + * beyond the initial application. + *

+ * The query execution idempotence can be used at [RetryPolicy]{@link module:policies/retry~RetryPolicy} level to + * determine if an statement can be retried in case of request error or write timeout. + *

+ *

Default: false.

+ * @property {String} [keyspace] Specifies the keyspace for the query. It is used for the following: + *
    + *
  1. To indicate what keyspace the statement is applicable to (protocol V5+ only). This is useful when the + * query does not provide an explicit keyspace and you want to override the current {@link Client#keyspace}.
  2. + *
  3. For query routing when the query operates on a different keyspace than the current {@link Client#keyspace}.
  4. + *
+ * @property {Boolean} [logged] Determines if the batch should be written to the batchlog. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: true. + * @property {Boolean} [counter] Determines if its a counter batch. Only valid for + * [Client#batch()]{@link Client#batch}, it will be ignored by other methods. Default: false. + * @property {Buffer|String} [pageState] Buffer or string token representing the paging state. + *

Useful for manual paging, if provided, the query will be executed starting from a given paging state.

+ * @property {Boolean} [prepare] Determines if the query must be executed as a prepared statement. + * @property {Number} [readTimeout] When defined, it overrides the default read timeout + * (socketOptions.readTimeout) in milliseconds for this execution per coordinator. + *

+ * Suitable for statements for which the coordinator may allow a longer server-side timeout, for example aggregation + * queries. + *

+ *

+ * A value of 0 disables client side read timeout for the execution. Default: undefined. + *

+ * @property {RetryPolicy} [retry] Retry policy for the query. + *

+ * This property can be used to specify a different [retry policy]{@link module:policies/retry} to the one specified + * in the {@link ClientOptions}.policies. + *

+ * @property {Array} [routingIndexes] Index of the parameters that are part of the partition key to determine + * the routing. + * @property {Buffer|Array} [routingKey] Partition key(s) to determine which coordinator should be used for the query. + * @property {Array} [routingNames] Array of the parameters names that are part of the partition key to determine the + * routing. Only valid for non-prepared requests, it's recommended that you use the prepare flag instead. + * @property {Number} [serialConsistency] Serial consistency is the consistency level for the serial phase of + * conditional updates. + * This option will be ignored for anything else that a conditional update/insert. + * @property {Number|Long} [timestamp] The default timestamp for the query in microseconds from the unix epoch + * (00:00:00, January 1st, 1970). + *

If provided, this will replace the server side assigned timestamp as default timestamp.

+ *

Use [generateTimestamp()]{@link module:types~generateTimestamp} utility method to generate a valid timestamp + * based on a Date and microseconds parts.

+ * @property {Boolean} [traceQuery] Enable query tracing for the execution. Use query tracing to diagnose performance + * problems related to query executions. Default: false. + *

To retrieve trace, you can call [Metadata.getTrace()]{@link module:metadata~Metadata#getTrace} method.

+ * @property {Object} [graphOptions] Default options for graph query executions. + *

+ * These options are meant to provide defaults for all graph query executions. Consider using + * [execution profiles]{@link ExecutionProfile} if you plan to reuse different set of options across different + * query executions. + *

+ * @property {String} [graphOptions.language] The graph language to use in graph queries. Default: + * 'gremlin-groovy'. + * @property {String} [graphOptions.name] The graph name to be used in all graph queries. + *

+ * This property is required but there is no default value for it. This value can be overridden at query level. + *

+ * @property {Number} [graphOptions.readConsistency] Overrides the + * [consistency level]{@link module:types~consistencies} + * defined in the query options for graph read queries. + * @property {Number} [graphOptions.readTimeout] Overrides the default per-host read timeout (in milliseconds) for all + * graph queries. Default: 0. + *

+ * Use null to reset the value and use the default on socketOptions.readTimeout . + *

+ * @property {String} [graphOptions.source] The graph traversal source name to use in graph queries. Default: + * 'g'. + * @property {Number} [graphOptions.writeConsistency] Overrides the [consistency + * level]{@link module:types~consistencies} defined in the query options for graph write queries. + */ +interface QueryOptions { + autoPage?: boolean; + captureStackTrace?: boolean; + consistency?: consistencies; + customPayload?: object; + executeAs?: string; + executionProfile?: string | ExecutionProfile; + fetchSize?: number; + hints?: Array | Array>; + host?: Host; + isIdempotent?: boolean; + keyspace?: string; + logged?: boolean; + counter?: boolean; + pageState?: Buffer | string; + prepare?: boolean; + readTimeout?: number; + retry?: RetryPolicy; + routingIndexes?: number[]; + routingKey?: Buffer | Buffer[]; + routingNames?: string[]; + serialConsistency?: number; + timestamp?: number | Long; + traceQuery?: boolean; + //TODO: graphOptions was not exposed. Should we? + graphOptions?: { + language?: string; + name?: string; + readConsistency?: number; + readTimeout?: number; + source?: string; + writeConsistency?: number; + }; +} + +/** + * Creates a new instance of {@link Client}. + * @classdesc + * Represents a database client that maintains multiple connections to the cluster nodes, providing methods to + * execute CQL statements. + *

+ * The Client uses [policies]{@link module:policies} to decide which nodes to connect to, which node + * to use per each query execution, when it should retry failed or timed-out executions and how reconnection to down + * nodes should be made. + *

+ * @extends EventEmitter + * @param {ClientOptions} options The options for this instance. + * @example Creating a new client instance + * const client = new Client({ + * contactPoints: ['10.0.1.101', '10.0.1.102'], + * localDataCenter: 'datacenter1' + * }); + * @example Executing a query + * const result = await client.connect(); + * console.log(`Connected to ${client.hosts.length} nodes in the cluster: ${client.hosts.keys().join(', ')}`); + * @example Executing a query + * const result = await client.execute('SELECT key FROM system.local'); + * const row = result.first(); + * console.log(row['key']); + */ +class Client extends events.EventEmitter{ + /** @internal */ + options: ClientOptions; + /** @internal */ + profileManager: ProfileManager; + private connected: boolean; + private isShuttingDown: boolean; + /** + * Gets the name of the active keyspace. + * @type {String} + */ + keyspace: string; + /** + * Gets the schema and cluster metadata information. + * @type {Metadata} + */ + metadata: Metadata; + /** @internal */ + controlConnection: ControlConnection; + /** + * Gets an associative array of cluster hosts. + * @type {HostMap} + */ + hosts: HostMap; + /** + * The [ClientMetrics]{@link module:metrics~ClientMetrics} instance used to expose measurements of its internal + * behavior and of the server as seen from the driver side. + *

By default, a [DefaultMetrics]{@link module:metrics~DefaultMetrics} instance is used.

+ * @type {ClientMetrics} + */ + metrics: ClientMetrics; + private _graphExecutor: GraphExecutor; + private connecting: boolean; + private insightsClient: InsightsClient; + /** + * Creates a new instance of {@link Client}. + * Represents a database client that maintains multiple connections to the cluster nodes, providing methods to + * execute CQL statements. + *

+ * The Client uses [policies]{@link module:policies} to decide which nodes to connect to, which node + * to use per each query execution, when it should retry failed or timed-out executions and how reconnection to down + * nodes should be made. + *

+ * @param {DseClientOptions} options The options for this instance. + * @example Creating a new client instance + * const client = new Client({ + * contactPoints: ['10.0.1.101', '10.0.1.102'], + * localDataCenter: 'datacenter1' + * }); + * @example Executing a query + * const result = await client.connect(); + * console.log(`Connected to ${client.hosts.length} nodes in the cluster: ${client.hosts.keys().join(', ')}`); + * @example Executing a query + * const result = await client.execute('SELECT key FROM system.local'); + * const row = result.first(); + * console.log(row['key']); + * @constructor + */ + constructor(options: DseClientOptions) { + super(); + this.options = clientOptions.extend({ logEmitter: this.emit.bind(this), id: types.Uuid.random() }, options); + Object.defineProperty(this, 'profileManager', { value: new ProfileManager(this.options) }); + Object.defineProperty(this, 'controlConnection', { + value: new ControlConnection(this.options, this.profileManager), writable: true, configurable: true + } + ); + Object.defineProperty(this, 'insightsClient', { value: new InsightsClient(this) }); + + //Unlimited amount of listeners for internal event queues by default + this.setMaxListeners(0); + this.connected = false; + this.isShuttingDown = false; + /** + * Gets the name of the active keyspace. + * @type {String} + */ + this.keyspace = options.keyspace; + /** + * Gets the schema and cluster metadata information. + * @type {Metadata} + */ + this.metadata = this.controlConnection.metadata; + /** + * Gets an associative array of cluster hosts. + * @type {HostMap} + */ + this.hosts = this.controlConnection.hosts; + + /** + * The [ClientMetrics]{@link module:metrics~ClientMetrics} instance used to expose measurements of its internal + * behavior and of the server as seen from the driver side. + *

By default, a [DefaultMetrics]{@link module:metrics~DefaultMetrics} instance is used.

+ * @type {ClientMetrics} + */ + this.metrics = this.options.metrics; + + this._graphExecutor = new GraphExecutor(this, options, this._execute); + } + /** + * Emitted when a new host is added to the cluster. + *
    + *
  • {@link Host} The host being added.
  • + *
+ * @event Client#hostAdd + */ + /** + * Emitted when a host is removed from the cluster + *
    + *
  • {@link Host} The host being removed.
  • + *
+ * @event Client#hostRemove + */ + /** + * Emitted when a host in the cluster changed status from down to up. + *
    + *
  • {@link Host host} The host that changed the status.
  • + *
+ * @event Client#hostUp + */ + /** + * Emitted when a host in the cluster changed status from up to down. + *
    + *
  • {@link Host host} The host that changed the status.
  • + *
+ * @event Client#hostDown + */ + /** + * Attempts to connect to one of the [contactPoints]{@link ClientOptions} and discovers the rest the nodes of the + * cluster. + *

When the {@link Client} is already connected, it resolves immediately.

+ *

It returns a Promise when a callback is not provided.

+ * @param {function} [callback] The optional callback that is invoked when the pool is connected or it failed to + * connect. + * @example Usage example + * await client.connect(); + */ + connect(): Promise; + connect(callback: EmptyCallback): void; + connect(callback?: EmptyCallback) { + if (this.connected && callback) { + // Avoid creating Promise to immediately resolve them + return callback(null); + } + return promiseUtils.optionalCallback(this._connect(), callback); + } + /** + * Async-only version of {@link Client#connect()}. + * @private + */ + private async _connect() { + if (this.connected) { + return; + } + + if (this.isShuttingDown) { + //it is being shutdown, don't allow further calls to connect() + throw new errors.NoHostAvailableError(null, 'Connecting after shutdown is not supported'); + } + + if (this.connecting) { + return promiseUtils.fromEvent(this, 'connected'); + } + + this.connecting = true; + this.log('info', util.format("Connecting to cluster using '%s' version %s", description, version)); + + try { + await cloud.init(this.options); + await this.controlConnection.init(); + this.hosts = this.controlConnection.hosts; + await this.profileManager.init(this, this.hosts); + + if (this.keyspace) { + await RequestHandler.setKeyspace(this); + } + + clientOptions.setMetadataDependent(this); + + await this._warmup(); + + } catch (err) { + // We should close the pools (if any) and reset the state to allow successive calls to connect() + await this.controlConnection.reset(); + this.connected = false; + this.connecting = false; + this.emit('connected', err); + throw err; + } + + this._setHostListeners(); + + // Set the distance of the control connection host relatively to this instance + this.profileManager.getDistance(this.controlConnection.host); + this.insightsClient.init(); + this.connected = true; + this.connecting = false; + this.emit('connected'); + } + /** @internal */ + log = utils.log.bind(this); + + /** + * Executes a query on an available connection. + *

The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag.

+ *

+ * Some execution failures can be handled transparently by the driver, according to the + * [RetryPolicy]{@linkcode module:policies/retry~RetryPolicy} or the + * [SpeculativeExecutionPolicy]{@linkcode module:policies/speculativeExecution} used. + *

+ *

It returns a Promise when a callback is not provided.

+ * @param {String} query The query to execute. + * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value. + * @param {QueryOptions} [options] The query options for the execution. + * @param {ResultCallback} [callback] Executes callback(err, result) when execution completed. When not defined, the + * method will return a promise. + * @example Promise-based API, using async/await + * const query = 'SELECT name, email FROM users WHERE id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * const row = result.first(); + * console.log('%s: %s', row['name'], row['email']); + * @example Callback-based API + * const query = 'SELECT name, email FROM users WHERE id = ?'; + * client.execute(query, [ id ], { prepare: true }, function (err, result) { + * assert.ifError(err); + * const row = result.first(); + * console.log('%s: %s', row['name'], row['email']); + * }); + * @see {@link ExecutionProfile} to reuse a set of options across different query executions. + */ + execute(query: string, params?: ArrayOrObject, options?: QueryOptions): Promise; + execute(query: string, params: ArrayOrObject, options: QueryOptions, callback: ValueCallback): void; + execute(query: string, params: ArrayOrObject, callback: ValueCallback): void; + execute(query: string, callback: ValueCallback): void; + execute(query: string, params?: Array | ValueCallback, options?: QueryOptions | ValueCallback, callback?: ValueCallback) { + // This method acts as a wrapper for the async method _execute() + if (!callback) { + // Set default argument values for optional parameters + if (typeof options === 'function') { + callback = options; + options = null; + } else if (typeof params === 'function') { + callback = params; + params = null; + } + } + + try { + const execOptions = DefaultExecutionOptions.create(options as QueryOptions, this); + return promiseUtils.optionalCallback(this._execute(query, params as any[], execOptions), callback); + } + catch (err) { + // There was an error when parsing the user options + if (callback) { + return callback(err,null); + } + + return Promise.reject(err); + } + } + /** + * Executes a graph query. + *

It returns a Promise when a callback is not provided.

+ * @param {String} query The gremlin query. + * @param {Object|null} [parameters] An associative array containing the key and values of the parameters. + * @param {GraphQueryOptions|null} [options] The graph query options. + * @param {Function} [callback] Function to execute when the response is retrieved, taking two arguments: + * err and result. When not defined, the method will return a promise. + * @example Promise-based API, using async/await + * const result = await client.executeGraph('g.V()'); + * // Get the first item (vertex, edge, scalar value, ...) + * const vertex = result.first(); + * console.log(vertex.label); + * @example Callback-based API + * client.executeGraph('g.V()', (err, result) => { + * const vertex = result.first(); + * console.log(vertex.label); + * }); + * @example Iterating through the results + * const result = await client.executeGraph('g.E()'); + * for (let edge of result) { + * console.log(edge.label); // created + * }); + * @example Using result.forEach() + * const result = await client.executeGraph('g.V().hasLabel("person")'); + * result.forEach(function(vertex) { + * console.log(vertex.type); // vertex + * console.log(vertex.label); // person + * }); + * @see {@link ExecutionProfile} to reuse a set of options across different query executions. + */ + executeGraph( + traversal: string, + parameters: { [name: string]: any } | undefined, + options: GraphQueryOptions, + callback: ValueCallback): void; + + executeGraph( + traversal: string, + parameters: { [name: string]: any } | undefined, + callback: ValueCallback): void; + + executeGraph(traversal: string, callback: ValueCallback): void; + + executeGraph( + traversal: string, + parameters?: { [name: string]: any }, + options?: GraphQueryOptions): Promise; + + executeGraph(query: string, parameters: { [name: string]: any } | ValueCallback, options?: GraphQueryOptions | ValueCallback, callback?: ValueCallback) { + callback = callback || (options ? options : parameters) as ValueCallback; + + if (typeof callback === 'function') { + parameters = typeof parameters !== 'function' ? parameters : null; + return promiseUtils.toCallback(this._graphExecutor.send(query, parameters, options as GraphQueryOptions), callback); + } + + return this._graphExecutor.send(query, parameters, options as GraphQueryOptions); + } + /** + * Executes the query and calls rowCallback for each row as soon as they are received. Calls the final + * callback after all rows have been sent, or when there is an error. + *

+ * The query can be prepared (recommended) or not depending on the [prepare]{@linkcode QueryOptions} flag. + *

+ * @param {String} query The query to execute + * @param {Array|Object} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value. + * @param {QueryOptions} [options] The query options. + * @param {function} rowCallback Executes rowCallback(n, row) per each row received, where n is the row + * index and row is the current Row. + * @param {function} [callback] Executes callback(err, result) after all rows have been received. + *

+ * When dealing with paged results, [ResultSet#nextPage()]{@link module:types~ResultSet#nextPage} method can be used + * to retrieve the following page. In that case, rowCallback() will be again called for each row and + * the final callback will be invoked when all rows in the following page has been retrieved. + *

+ * @example Using per-row callback and arrow functions + * client.eachRow(query, params, { prepare: true }, (n, row) => console.log(n, row), err => console.error(err)); + * @example Overloads + * client.eachRow(query, rowCallback); + * client.eachRow(query, params, rowCallback); + * client.eachRow(query, params, options, rowCallback); + * client.eachRow(query, params, rowCallback, callback); + * client.eachRow(query, params, options, rowCallback, callback); + */ + eachRow(query: string, + params: ArrayOrObject, + options: QueryOptions, + rowCallback: (n: number, row: Row) => void, + callback?: ValueCallback): void; + + eachRow(query: string, + params: ArrayOrObject, + rowCallback: (n: number, row: Row) => void, + callback?: ValueCallback): void; + + eachRow(query: string, + rowCallback: (n: number, row: Row) => void): void; + + eachRow(query: string, params: Array | ((n: number, row: Row) => void), options?: QueryOptions | ((n: number, row: Row) => void), rowCallback?: ((n: number, row: Row) => void) | ValueCallback, callback?: ValueCallback) { + if (!callback && rowCallback && typeof options === 'function') { + callback = utils.validateFn(rowCallback as ValueCallback, 'rowCallback'); + rowCallback = options; + } else { + callback = callback || utils.noop; + rowCallback = utils.validateFn((rowCallback || options || params) as ((n: number, row: Row) => void), 'rowCallback'); + } + + params = typeof params !== 'function' ? params : null; + + let execOptions; + try { + execOptions = DefaultExecutionOptions.create(options as QueryOptions, this, rowCallback); + } + catch (e) { + return callback(e, null); + } + + let rowLength = 0; + + const nextPage = () => promiseUtils.toCallback(this._execute(query, params, execOptions), pageCallback); + + function pageCallback(err, result) { + if (err) { + return callback(err, null); + } + // Next requests in case paging (auto or explicit) is used + rowLength += result.rowLength; + + if (result.rawPageState !== undefined) { + // Use new page state as next request page state + execOptions.setPageState(result.rawPageState); + if (execOptions.isAutoPage()) { + // Issue next request for the next page + return nextPage(); + } + // Allows for explicit (manual) paging, in case the caller needs it + result.nextPage = nextPage; + } + + // Finished auto-paging + result.rowLength = rowLength; + callback(null, result); + } + + promiseUtils.toCallback(this._execute(query, params, execOptions), pageCallback); + } + /** + * Executes the query and pushes the rows to the result stream as soon as they received. + *

+ * The stream is a [ReadableStream]{@linkcode https://nodejs.org/api/stream.html#stream_class_stream_readable} object + * that emits rows. + * It can be piped downstream and provides automatic pause/resume logic (it buffers when not read). + *

+ *

+ * The query can be prepared (recommended) or not depending on {@link QueryOptions}.prepare flag. Retries on multiple + * hosts if needed. + *

+ * @param {String} query The query to prepare and execute. + * @param {ArrayOrObject} [params] Array of parameter values or an associative array (object) containing parameter names + * as keys and its value + * @param {QueryOptions} [options] The query options. + * @param {function} [callback] executes callback(err) after all rows have been received or if there is an error + * @returns {ResultStream} + */ + stream(query: string, params?: ArrayOrObject, options?: QueryOptions, callback?: EmptyCallback): ResultStream { + callback = callback || utils.noop; + // NOTE: the nodejs stream maintains yet another internal buffer + // we rely on the default stream implementation to keep memory + // usage reasonable. + const resultStream = new types.ResultStream({ objectMode: 1 }); + function onFinish(err, result) { + if (err) { + resultStream.emit('error', err); + } + if (result && result.nextPage) { + // allows for throttling as per the + // default nodejs stream implementation + resultStream._valve(function pageValve() { + try { + result.nextPage(); + } + catch (ex) { + resultStream.emit('error', ex); + } + }); + return; + } + // Explicitly dropping the valve (closure) + resultStream._valve(null); + resultStream.add(null); + callback(err); + } + let sync = true; + this.eachRow(query, params, options, function rowCallback(n, row) { + resultStream.add(row); + }, function eachRowFinished(err, result) { + if (sync) { + // Prevent sync callback + return setImmediate(function eachRowFinishedImmediate() { + onFinish(err, result); + }); + } + onFinish(err, result); + }); + sync = false; + return resultStream; + } + /** + * Executes batch of queries on an available connection to a host. + *

It returns a Promise when a callback is not provided.

+ * @param {Array.|Array.<{query, params}>} queries The queries to execute as an Array of strings or as an array + * of object containing the query and params + * @param {QueryOptions} [options] The query options. + * @param {ResultCallback} [callback] Executes callback(err, result) when the batch was executed + */ + batch( + queries: Array, + options?: QueryOptions): Promise; + + batch( + queries: Array, + options: QueryOptions, + callback: ValueCallback): void; + + batch( + queries: Array, + callback: ValueCallback): void; + + batch(queries: Array, options?: QueryOptions | ValueCallback, callback?: ValueCallback) { + if (!callback && typeof options === 'function') { + callback = options; + options = null; + } + + return promiseUtils.optionalCallback(this._batch(queries, options as GraphQueryOptions), callback); + } + /** + * Async-only version of {@link Client#batch()} . + * @param {Array.|Array.<{query, params}>}queries + * @param {QueryOptions} options + * @returns {Promise} + * @private + */ + private async _batch(queries: Array, options: QueryOptions): Promise { + if (!Array.isArray(queries)) { + throw new errors.ArgumentError('Queries should be an Array'); + } + + if (queries.length === 0) { + throw new errors.ArgumentError('Queries array should not be empty'); + } + + await this._connect(); + + const execOptions = DefaultExecutionOptions.create(options, this); + let queryItems; + + if (execOptions.isPrepared()) { + // use keyspace from query options if protocol supports per-query keyspace, otherwise use connection keyspace. + const version = this.controlConnection.protocolVersion; + const queryKeyspace = types.protocolVersion.supportsKeyspaceInRequest(version) && options.keyspace || this.keyspace; + queryItems = await PrepareHandler.getPreparedMultiple( + this, execOptions.getLoadBalancingPolicy(), queries, queryKeyspace); + } else { + queryItems = new Array(queries.length); + + for (let i = 0; i < queries.length; i++) { + const item = queries[i]; + if (!item) { + throw new errors.ArgumentError(`Invalid query at index ${i}`); + } + + const query = typeof item === 'string' ? item : item.query; + if (!query) { + throw new errors.ArgumentError(`Invalid query at index ${i}`); + } + + queryItems[i] = { query, params: item["params"] }; + } + } + + const request = await this._createBatchRequest(queryItems, execOptions); + return await RequestHandler.send(request, execOptions, this); + } + /** + * Gets the host that are replicas of a given token. + * @param {String} keyspace + * @param {Buffer} token + * @returns {Array} + */ + getReplicas(keyspace: string, token: Buffer): Array { + return this.metadata.getReplicas(keyspace, token); + } + /** + * Gets a snapshot containing information on the connections pools held by this Client at the current time. + *

+ * The information provided in the returned object only represents the state at the moment this method was called and + * it's not maintained in sync with the driver metadata. + *

+ * @returns {ClientState} A [ClientState]{@linkcode module:metadata~ClientState} instance. + */ + getState(): ClientState { + return ClientState.from(this); + } + /** + * Closes all connections to all hosts. + *

It returns a Promise when a callback is not provided.

+ * @param {Function} [callback] Optional callback to be invoked when finished closing all connections. + */ + shutdown(): Promise; + shutdown(callback: EmptyCallback): void; + shutdown(callback?: Function): Promise | void { + return promiseUtils.optionalCallback(this._shutdown(), callback); + } + /** @private */ + private async _shutdown() { + this.log('info', 'Shutting down'); + + if (!this.hosts || !this.connected) { + // not initialized + this.connected = false; + return; + } + + if (this.connecting) { + this.log('warning', 'Shutting down while connecting'); + // wait until finish connecting for easier troubleshooting + await promiseUtils.fromEvent(this, 'connected'); + } + + this.connected = false; + this.isShuttingDown = true; + const hosts = this.hosts.values(); + + this.insightsClient.shutdown(); + + // Shutdown the ControlConnection before shutting down the pools + this.controlConnection.shutdown(); + this.options.policies.speculativeExecution.shutdown(); + + if (this.options.requestTracker) { + this.options.requestTracker.shutdown(); + } + + // go through all the host and shut down their pools + await Promise.all(hosts.map(h => h.shutdown(false))); + } + /** + * Waits until that the schema version in all nodes is the same or the waiting time passed. + * @param {Connection} connection + * @returns {Promise} + * @ignore @internal + */ + private async _waitForSchemaAgreement(connection: Connection): Promise { + if (this.hosts.length === 1) { + return true; + } + + const start = process.hrtime(); + const maxWaitSeconds = this.options.protocolOptions.maxSchemaAgreementWaitSeconds; + + this.log('info', 'Waiting for schema agreement'); + + let versionsMatch; + + while (!versionsMatch && process.hrtime(start)[0] < maxWaitSeconds) { + versionsMatch = await this.metadata.compareSchemaVersions(connection); + + if (versionsMatch) { + this.log('info', 'Schema versions match'); + break; + } + + // Let some time pass before the next check + await promiseUtils.delay(500); + } + + return versionsMatch; + } + /** + * Waits for schema agreements and schedules schema metadata refresh. + * @param {Connection} connection + * @param event + * @returns {Promise} + * @ignore + * @internal + */ + async handleSchemaAgreementAndRefresh(connection: Connection, event): Promise { + let agreement = false; + + try { + agreement = await this._waitForSchemaAgreement(connection); + } catch (err) { + //we issue a warning but we continue with the normal flow + this.log('warning', 'There was an error while waiting for the schema agreement between nodes', err); + } + + if (!this.options.isMetadataSyncEnabled) { + return agreement; + } + + // Refresh metadata immediately + try { + await this.controlConnection.handleSchemaChange(event, true); + } catch (err) { + this.log('warning', 'There was an error while handling schema change', err); + } + + return agreement; + } + /** + * Connects and handles the execution of prepared and simple statements. + * @param {string} query + * @param {Array} params + * @param {ExecutionOptions} execOptions + * @returns {Promise} + * @private + */ + private async _execute(query: string, params: Array, execOptions: ExecutionOptions): Promise { + const version = this.controlConnection.protocolVersion; + + if (!execOptions.isPrepared() && params && !Array.isArray(params) && + !types.protocolVersion.supportsNamedParameters(version)) { + // Only Cassandra 2.1 and above supports named parameters + throw new errors.ArgumentError('Named parameters for simple statements are not supported, use prepare flag'); + } + + let request; + + if (!this.connected) { + // Micro optimization to avoid an async execution for a simple check + await this._connect(); + } + + if (!execOptions.isPrepared()) { + request = await this._createQueryRequest(query, execOptions, params); + } else { + const lbp = execOptions.getLoadBalancingPolicy(); + + // Use keyspace from query options if protocol supports per-query keyspace, otherwise use connection keyspace. + const queryKeyspace = types.protocolVersion.supportsKeyspaceInRequest(version) && + execOptions.getKeyspace() || this.keyspace; + + const { queryId, meta } = await PrepareHandler.getPrepared(this, lbp, query, queryKeyspace); + request = await this._createExecuteRequest(query, queryId, execOptions, params, meta); + } + + return await RequestHandler.send(request, execOptions, this); + } + /** + * Sets the listeners for the nodes. + * @private + */ + private _setHostListeners() { + function getHostUpListener(emitter, h) { + return () => emitter.emit('hostUp', h); + } + + function getHostDownListener(emitter, h) { + return () => emitter.emit('hostDown', h); + } + + const self = this; + + // Add status listeners when new nodes are added and emit hostAdd + this.hosts.on('add', function hostAddedListener(h) { + h.on('up', getHostUpListener(self, h)); + h.on('down', getHostDownListener(self, h)); + self.emit('hostAdd', h); + }); + + // Remove all listeners and emit hostRemove + this.hosts.on('remove', function hostRemovedListener(h) { + h.removeAllListeners(); + self.emit('hostRemove', h); + }); + + // Add status listeners for existing hosts + this.hosts.forEach(function (h) { + h.on('up', getHostUpListener(self, h)); + h.on('down', getHostDownListener(self, h)); + }); + } + /** + * Sets the distance to each host and when warmup is true, creates all connections to local hosts. + * @returns {Promise} + * @private + */ + private _warmup(): Promise { + const hosts = this.hosts.values(); + + return promiseUtils.times(hosts.length, warmupLimit, async (index) => { + const h = hosts[index]; + const distance = this.profileManager.getDistance(h); + + if (distance === types.distance.ignored) { + return; + } + + if (this.options.pooling.warmup && distance === types.distance.local) { + try { + await h.warmupPool(this.keyspace); + } catch (err) { + // An error while trying to create a connection to one of the hosts. + // Warn the user and move on. + this.log('warning', `Connection pool to host ${h.address} could not be created: ${err}`, err); + } + } else { + h.initializePool(); + } + }); + } + /** + * @returns {Encoder} + * @private + */ + private _getEncoder(): Encoder { + const encoder = this.controlConnection.getEncoder(); + if (!encoder) { + throw new errors.DriverInternalError('Encoder is not defined'); + } + return encoder; + } + /** + * Returns a BatchRequest instance and fills the routing key information in the provided options. + * @private + */ + private async _createBatchRequest(queryItems: {query; params; info?}[], info) { + const firstQuery = queryItems[0]; + if (!firstQuery["meta"]) { + return new requests.BatchRequest(queryItems, info); + } + + await this._setRoutingInfo(info, firstQuery.params, firstQuery["meta"]); + return new requests.BatchRequest(queryItems, info); + } + /** + * Returns an ExecuteRequest instance and fills the routing key information in the provided options. + * @private + */ + private async _createExecuteRequest(query, queryId, info, params, meta) { + params = utils.adaptNamedParamsPrepared(params, meta.columns); + await this._setRoutingInfo(info, params, meta); + return new requests.ExecuteRequest(query, queryId, params, info, meta); + } + /** + * Returns a QueryRequest instance and fills the routing key information in the provided options. + * @private + */ + private async _createQueryRequest(query, execOptions, params) { + await this.metadata.adaptUserHints(this.keyspace, execOptions.getHints()); + const paramsInfo = utils.adaptNamedParamsWithHints(params, execOptions); + this._getEncoder().setRoutingKeyFromUser(paramsInfo.params, execOptions, paramsInfo.keyIndexes); + + return new requests.QueryRequest(query, paramsInfo.params, execOptions, paramsInfo.namedParameters); + } + /** + * Sets the routing key based on the parameter values or the provided routing key components. + * @param {ExecutionOptions} execOptions + * @param {Array} params + * @param meta + * @private + */ + private async _setRoutingInfo(execOptions: ExecutionOptions, params: Array, meta) { + const encoder = this._getEncoder(); + + if (!execOptions.getKeyspace() && meta.keyspace) { + execOptions.setKeyspace(meta.keyspace); + } + if (execOptions.getRoutingKey()) { + // Routing information provided by the user + return encoder.setRoutingKeyFromUser(params, execOptions); + } + if (Array.isArray(meta.partitionKeys)) { + // The partition keys are provided as part of the metadata for modern protocol versions + execOptions.setRoutingIndexes(meta.partitionKeys); + return encoder.setRoutingKeyFromMeta(meta, params, execOptions); + } + + // Older versions of the protocol (v3 and below) don't provide routing information + try { + const tableInfo = await this.metadata.getTable(meta.keyspace, meta.table); + + if (!tableInfo) { + // The schema data is not there, maybe it is being recreated, avoid setting the routing information + return; + } + + execOptions.setRoutingIndexes(tableInfo.partitionKeys.map(c => meta.columnsByName[c.name])); + // Skip parsing metadata next time + meta.partitionKeys = execOptions.getRoutingIndexes(); + encoder.setRoutingKeyFromMeta(meta, params, execOptions); + } catch (_err) { + this.log('warning', util.format('Table %s.%s metadata could not be retrieved', meta.keyspace, meta.table)); + } + } +} + + +export default Client; + +export { + type ClientOptions, type GraphQueryOptions, type QueryOptions +}; diff --git a/lib/concurrent/index.d.ts b/lib/concurrent/index.d.ts deleted file mode 100644 index 91ab56f7d..000000000 --- a/lib/concurrent/index.d.ts +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Client } from '../../'; -import { Readable } from 'stream'; - -export namespace concurrent { - interface ResultSetGroup { - errors: Error[]; - resultItems: any[]; - totalExecuted: number; - } - - type Options = { - collectResults?: boolean; - concurrencyLevel?: number; - executionProfile?: string; - maxErrors?: number; - raiseOnFirstError?: boolean; - } - - function executeConcurrent( - client: Client, - query: string, - parameters: any[][]|Readable, - options?: Options): Promise; - - function executeConcurrent( - client: Client, - queries: Array<{query: string, params: any[]}>, - options?: Options): Promise; -} \ No newline at end of file diff --git a/lib/concurrent/index.js b/lib/concurrent/index.ts similarity index 80% rename from lib/concurrent/index.js rename to lib/concurrent/index.ts index c618693bb..64fec2c33 100644 --- a/lib/concurrent/index.js +++ b/lib/concurrent/index.ts @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { Readable, Stream } from "stream"; +import Client from "../client"; +import utils from "../utils"; -'use strict'; -const { Stream } = require('stream'); -const utils = require('../utils'); /** * Utilities for concurrent query execution with the DataStax Node.js Driver. @@ -64,7 +64,16 @@ const utils = require('../utils'); * * const result = await executeConcurrent(client, queryAndParameters); */ -function executeConcurrent(client, query, parameters, options) { +function executeConcurrent( + client: Client, + query: string, + parameters: any[][]|Readable, + options?: Options): Promise; +function executeConcurrent( + client: Client, + queries: Array<{query: string, params: any[]}>, + options?: Options): Promise; +function executeConcurrent(client: Client, query: string | Array<{query: string, params: any[]}>, parameters?: any[][]|Readable|Options, options?: Options): Promise { if (!client) { throw new TypeError('Client instance is not defined'); } @@ -82,18 +91,34 @@ function executeConcurrent(client, query, parameters, options) { } if (Array.isArray(query)) { - options = parameters; + options = parameters as object; return new ArrayBasedExecutor(client, null, query, options).execute(); } throw new TypeError('A string query or query and parameters array should be provided'); } +type Options = { + collectResults?: boolean; + concurrencyLevel?: number; + executionProfile?: string; + maxErrors?: number; + raiseOnFirstError?: boolean; +} + /** * Wraps the functionality to execute given an Array. - * @ignore + * @ignore @internal */ class ArrayBasedExecutor { + private _client: Client; + private _query: string; + private _parameters: any[][] | { query: any; params: any; }[]; + private _raiseOnFirstError: boolean; + private _concurrencyLevel: number; + private _queryOptions: { prepare: boolean; executionProfile: any; }; + private _result: ResultSetGroup; + private _stop: boolean; /** * @param {Client} client @@ -102,7 +127,7 @@ class ArrayBasedExecutor { * @param {Object} [options] The execution options. * @private */ - constructor(client, query, parameters, options) { + constructor(client: Client, query: string, parameters: Array> | Array<{ query; params; }>, options: Options) { this._client = client; this._query = query; this._parameters = parameters; @@ -131,7 +156,7 @@ class ArrayBasedExecutor { return Promise.resolve(); } - const item = this._parameters[index]; + const item = this._parameters[index] as { query; params; }; let query; let params; @@ -161,9 +186,21 @@ class ArrayBasedExecutor { /** * Wraps the functionality to execute given a Stream. - * @ignore + * @ignore @internal */ class StreamBasedExecutor { + private _client: Client; + private _query: string; + private _stream: Stream; + private _raiseOnFirstError: boolean; + private _concurrencyLevel: any; + private _queryOptions: { prepare: boolean; executionProfile: any; }; + private _inFlight: number; + private _index: number; + private _result: ResultSetGroup; + private _resolveCallback: Function; + private _rejectCallback: Function; + private _readEnded: boolean; /** * @param {Client} client @@ -172,7 +209,7 @@ class StreamBasedExecutor { * @param {Object} [options] The execution options. * @private */ - constructor(client, query, stream, options) { + constructor(client: Client, query: string, stream: Stream, options: Options) { this._client = client; this._query = query; this._stream = stream; @@ -189,7 +226,7 @@ class StreamBasedExecutor { } execute() { - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { this._resolveCallback = resolve; this._rejectCallback = reject; @@ -224,8 +261,8 @@ class StreamBasedExecutor { this._setError(index, err); }) .then(() => { - if (this._stream.isPaused()) { - this._stream.resume(); + if ((this._stream as Readable).isPaused()) { + (this._stream as Readable).resume(); } if (this._readEnded && this._inFlight === 0) { @@ -239,7 +276,7 @@ class StreamBasedExecutor { }); if (this._inFlight >= this._concurrencyLevel) { - this._stream.pause(); + (this._stream as Readable).pause(); } } @@ -248,7 +285,7 @@ class StreamBasedExecutor { * @param {Error} [err] The stream read error. * @private */ - _setReadEnded(err) { + _setReadEnded(err?: Error) { if (!this._readEnded) { this._readEnded = true; @@ -277,10 +314,15 @@ class StreamBasedExecutor { * Represents results from different related executions. */ class ResultSetGroup { + private _collectResults: any; + private _maxErrors: any; + totalExecuted: number; + errors: Error[]; + resultItems: any[]; /** * Creates a new instance of {@link ResultSetGroup}. - * @ignore + * @ignore @internal */ constructor(options) { this._collectResults = options.collectResults; @@ -305,7 +347,7 @@ class ResultSetGroup { } } - /** @ignore */ + /** @ignore @internal */ setResultItem(index, rs) { this.totalExecuted++; @@ -316,7 +358,7 @@ class ResultSetGroup { /** * Internal method to set the error of an execution. - * @ignore + * @ignore @internal */ setError(index, err) { this.totalExecuted++; @@ -331,5 +373,13 @@ class ResultSetGroup { } } -exports.executeConcurrent = executeConcurrent; -exports.ResultSetGroup = ResultSetGroup; \ No newline at end of file +export { + executeConcurrent, + ResultSetGroup, + type Options +}; + +export default { + executeConcurrent, + ResultSetGroup +}; \ No newline at end of file diff --git a/lib/connection.js b/lib/connection.ts similarity index 89% rename from lib/connection.js rename to lib/connection.ts index 843828729..a6c1b537b 100644 --- a/lib/connection.js +++ b/lib/connection.ts @@ -13,30 +13,63 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import events from "events"; +import net from "net"; +import tls from "tls"; +import util from "util"; +import { Authenticator } from "./auth"; +import type { ClientOptions } from "./client"; +import Encoder from "./encoder"; +import errors from "./errors"; +import { ExecutionOptions } from "./execution-options"; +import type { PreparedQueryInfo } from "./metadata"; +import OperationState from "./operation-state"; +import promiseUtils from "./promise-utils"; +import requests, { Request } from "./requests"; +import StreamIdStack from "./stream-id-stack"; +import streams from "./streams"; +import types, { ResultSet } from "./types/index"; +import utils from "./utils"; +import { WriteQueue } from "./writers"; + -'use strict'; - -const events = require('events'); -const util = require('util'); -const tls = require('tls'); -const net = require('net'); - -const Encoder = require('./encoder.js'); -const { WriteQueue } = require('./writers'); -const requests = require('./requests'); -const streams = require('./streams'); -const utils = require('./utils'); -const types = require('./types'); -const errors = require('./errors'); -const StreamIdStack = require('./stream-id-stack'); -const OperationState = require('./operation-state'); -const promiseUtils = require('./promise-utils'); -const { ExecutionOptions } = require('./execution-options'); /** * Represents a connection to a Cassandra node + * @ignore @internal */ class Connection extends events.EventEmitter { + endpoint: string; + endpointFriendlyName: any; + _serverName: string; + address: any; + port: any; + _checkingVersion: boolean; + log: (type: string, info: string, furtherInfo?: any, options?: any) => void; + protocolVersion: number; + _operations: Map; + _pendingWrites: any[]; + _preparing: Map; + _idleTimeout: NodeJS.Timeout; + timedOutOperations: number; + _streamIds: StreamIdStack; + _metrics: any; + encoder: Encoder; + keyspace: string; + emitDrain: boolean; + connected: boolean; + isSocketOpen: boolean; + send: (arg1: Request, arg2: ExecutionOptions) => Promise; + closeAsync: () => Promise; + openAsync: () => Promise; + prepareOnceAsync: (arg1: string, arg2: string) => Promise<{id; meta}>; + netClient: any; + protocol: any; + parser: any; + writeQueue: WriteQueue; + options: ClientOptions; + toBeKeyspace: string; + sendingIdleQuery: any; /** * Creates a new instance of Connection. @@ -44,7 +77,7 @@ class Connection extends events.EventEmitter { * @param {Number|null} protocolVersion * @param {ClientOptions} options */ - constructor(endpoint, protocolVersion, options) { + constructor(endpoint: string, protocolVersion: number | null, options: ClientOptions) { super(); this.setMaxListeners(0); @@ -94,7 +127,7 @@ class Connection extends events.EventEmitter { this._checkingVersion = true; } - this.log = utils.log; + this.log = utils.log.bind(this); this.protocolVersion = protocolVersion; this._operations = new Map(); this._pendingWrites = []; @@ -172,6 +205,10 @@ class Connection extends events.EventEmitter { this.log('info', `Connecting to ${this.endpointFriendlyName}`); if (!this.options.sslOptions) { + //TODO: this highWaterMark is not doing what it's supposed to do + // because Socket does not even accept such option + // How should we fix this?? + // @ts-ignore this.netClient = new net.Socket({ highWaterMark: this.options.socketOptions.coalescingThreshold }); this.netClient.connect(this.port, this.address, function connectCallback() { self.log('verbose', `Socket connected to ${self.endpointFriendlyName}`); @@ -220,7 +257,7 @@ class Connection extends events.EventEmitter { * Determines the protocol version to use and sends the STARTUP request * @param {Function} callback */ - startup(callback) { + startup(callback: Function) { if (this._checkingVersion) { this.log('info', 'Trying to use protocol version 0x' + this.protocolVersion.toString(16)); } @@ -324,7 +361,7 @@ class Connection extends events.EventEmitter { } /** @param {Number} lowerVersion */ - decreaseVersion(lowerVersion) { + decreaseVersion(lowerVersion: number) { // The response already has the max protocol version supported by the Cassandra host. this.protocolVersion = lowerVersion; this.encoder.setProtocolVersion(lowerVersion); @@ -342,7 +379,7 @@ class Connection extends events.EventEmitter { /** * Cleans all internal state and invokes all pending callbacks of sent streams */ - clearAndInvokePending(innerError) { + clearAndInvokePending(innerError?) { if (this._idleTimeout) { //Remove the idle request clearTimeout(this._idleTimeout); @@ -386,7 +423,7 @@ class Connection extends events.EventEmitter { * @param {String} authenticatorName * @param {Function} callback */ - startAuthenticating(authenticatorName, callback) { + startAuthenticating(authenticatorName: string, callback: Function) { if (!this.options.authProvider) { return callback(new errors.AuthenticationError('Authentication provider not set')); } @@ -407,17 +444,17 @@ class Connection extends events.EventEmitter { * @param {Buffer} token * @param {Function} callback */ - authenticate(authenticator, token, callback) { + authenticate(authenticator: Authenticator, token: Buffer, callback: Function) { const self = this; - let request = new requests.AuthResponseRequest(token); + let request : Request = new requests.AuthResponseRequest(token); if (this.protocolVersion === 1) { //No Sasl support, use CREDENTIALS - if (!authenticator.username) { + if (!authenticator["username"]) { return self.onAuthenticationError( callback, new errors.AuthenticationError('Only plain text authenticator providers allowed under protocol v1')); } - request = new requests.CredentialsRequest(authenticator.username, authenticator.password); + request = new requests.CredentialsRequest(authenticator["username"], authenticator["password"]); } this.sendStream(request, null, function authResponseCallback(err, result) { @@ -458,7 +495,7 @@ class Connection extends events.EventEmitter { * Executes a 'USE ' query, if keyspace is provided and it is different from the current keyspace * @param {?String} keyspace */ - async changeKeyspace(keyspace) { + async changeKeyspace(keyspace: string | null) { if (!keyspace || this.keyspace === keyspace) { return; } @@ -491,15 +528,18 @@ class Connection extends events.EventEmitter { * @param {String} keyspace * @param {function} callback */ - prepareOnce(query, keyspace, callback) { + prepareOnce(query: string, keyspace: string, callback: (...args: any[]) => void): void{ const name = ( keyspace || '' ) + query; let info = this._preparing.get(name); if (info) { // Its being already prepared + //TODO: I believe you actually don't need to return this + // @ts-ignore return info.once('prepared', callback); } + // @ts-ignore info = new events.EventEmitter(); info.setMaxListeners(0); info.once('prepared', callback); @@ -519,7 +559,7 @@ class Connection extends events.EventEmitter { * @param {function} callback Function to be called once the response has been received * @return {OperationState} */ - sendStream(request, execOptions, callback) { + sendStream(request: Request, execOptions: ExecutionOptions | null, callback: Function): OperationState { execOptions = execOptions || ExecutionOptions.empty(); // Create a new operation that will contain the request, callback and timeouts @@ -557,7 +597,7 @@ class Connection extends events.EventEmitter { * @param {Number} streamId * @private */ - _write(operation, streamId) { + _write(operation: OperationState, streamId: number) { operation.streamId = streamId; const self = this; this.writeQueue.push(operation, function writeCallback (err) { @@ -620,7 +660,7 @@ class Connection extends events.EventEmitter { * Returns an available streamId or null if there isn't any available * @returns {Number} */ - _getStreamId() { + _getStreamId(): number { return this._streamIds.pop(); } @@ -673,7 +713,7 @@ class Connection extends events.EventEmitter { * Returns the number of requests waiting for response * @returns {Number} */ - getInFlight() { + getInFlight(): number { return this._streamIds.inUse; } @@ -727,7 +767,7 @@ class Connection extends events.EventEmitter { * Multiple calls to this method have no additional side-effects. * @param {Function} [callback] */ - close(callback) { + close(callback?: () => void) { callback = callback || utils.noop; if (!this.connected && !this.isSocketOpen) { @@ -778,7 +818,7 @@ class Connection extends events.EventEmitter { * Gets the local IP address to which this connection socket is bound to. * @returns {String|undefined} */ - getLocalAddress() { + getLocalAddress(): string | undefined { if (!this.netClient) { return undefined; } @@ -787,4 +827,4 @@ class Connection extends events.EventEmitter { } } -module.exports = Connection; +export default Connection; diff --git a/lib/control-connection.js b/lib/control-connection.ts similarity index 91% rename from lib/control-connection.js rename to lib/control-connection.ts index 54b3e6171..aa50fadb3 100644 --- a/lib/control-connection.js +++ b/lib/control-connection.ts @@ -13,21 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const events = require('events'); -const util = require('util'); -const net = require('net'); -const dns = require('dns'); - -const errors = require('./errors'); -const { Host, HostMap } = require('./host'); -const Metadata = require('./metadata'); -const EventDebouncer = require('./metadata/event-debouncer'); -const Connection = require('./connection'); -const requests = require('./requests'); -const utils = require('./utils'); -const types = require('./types'); -const promiseUtils = require('./promise-utils'); +import dns from "dns"; +import events from "events"; +import net from "net"; +import util from "util"; +import type { ClientOptions } from "./client"; +import Connection from "./connection"; +import Encoder from "./encoder"; +import errors from "./errors"; +import { ProfileManager } from "./execution-profile"; +import { Host, HostMap } from "./host"; +import EventDebouncer from "./metadata/event-debouncer"; +import Metadata from "./metadata/index"; +import promiseUtils from "./promise-utils"; +import requests, { Request } from "./requests"; +import types, { ResultSet, Row } from "./types/index"; +import utils from "./utils"; + + const f = util.format; const selectPeers = "SELECT * FROM system.peers"; @@ -45,8 +48,29 @@ const supportedDbaas = 'DATASTAX_APOLLO'; /** * Represents a connection used by the driver to receive events and to check the status of the cluster. *

It uses an existing connection from the hosts' connection pool to maintain the driver metadata up-to-date.

+ * @ignore @internal */ class ControlConnection extends events.EventEmitter { + protocolVersion: number; + hosts: HostMap; + log: (type: string, info: string, furtherInfo?: any, options?: any) => void; + metadata: Metadata; + private options: ClientOptions; + initialized: boolean; + host: Host; + connection: Connection; + private _addressTranslator: any; + private _reconnectionPolicy: any; + private _reconnectionSchedule: any; + private _isShuttingDown: boolean; + private _encoder: null; + private _debouncer: EventDebouncer; + private _profileManager: ProfileManager; + private _triedHosts: Map; + private _resolvedContactPoints: Map; + private _contactPoints: Set; + private _topologyChangeTimeout: NodeJS.Timeout; + private _nodeStatusChangeTimeout: NodeJS.Timeout; /** * Creates a new instance of ControlConnection. @@ -55,13 +79,13 @@ class ControlConnection extends events.EventEmitter { * @param {{borrowHostConnection: function, createConnection: function}} [context] An object containing methods to * allow dependency injection. */ - constructor(options, profileManager, context) { + constructor(options: Partial, profileManager: ProfileManager, context?: { borrowHostConnection: (host: Host) => Connection; createConnection: (contactPoint: string) => Promise; }) { super(); this.protocolVersion = null; this.hosts = new HostMap(); this.setMaxListeners(0); - this.log = utils.log; + this.log = utils.log.bind(this); Object.defineProperty(this, "options", { value: options, enumerable: false, writable: false}); /** @@ -116,7 +140,7 @@ class ControlConnection extends events.EventEmitter { * @param {String} name * @param {Boolean} isIPv6 */ - _addContactPoint(address, port, name, isIPv6) { + _addContactPoint(address: string | null, port: string, name: string, isIPv6: boolean) { if (address === null) { // Contact point could not be resolved, store that the resolution came back empty this._resolvedContactPoints.set(name, utils.emptyArray); @@ -196,7 +220,7 @@ class ControlConnection extends events.EventEmitter { const separatorIndex = address.lastIndexOf(':'); if (separatorIndex === -1) { - throw new new errors.DriverInternalError('The SNI endpoint address should contain ip/name and port'); + throw new errors.DriverInternalError('The SNI endpoint address should contain ip/name and port'); } const nameOrIp = address.substr(0, separatorIndex); @@ -264,7 +288,7 @@ class ControlConnection extends events.EventEmitter { * @param {Iterator} hostIterator * @returns {Connection!} */ - _borrowAConnection(hostIterator) { + _borrowAConnection(hostIterator: Iterator): Connection { let connection = null; while (!connection) { @@ -296,7 +320,7 @@ class ControlConnection extends events.EventEmitter { * @param {Iterator} contactPointsIterator * @returns {Promise} */ - async _borrowFirstConnection(contactPointsIterator) { + async _borrowFirstConnection(contactPointsIterator: Iterator): Promise { let connection = null; while (!connection) { @@ -326,7 +350,7 @@ class ControlConnection extends events.EventEmitter { } /** Default implementation for borrowing connections, that can be injected at constructor level */ - _borrowHostConnection(host) { + private _borrowHostConnection(host: Host) { // Borrow any open connection, regardless of the keyspace return host.borrowConnection(); } @@ -335,7 +359,7 @@ class ControlConnection extends events.EventEmitter { * Default implementation for creating initial connections, that can be injected at constructor level * @param {String} contactPoint */ - async _createConnection(contactPoint) { + async _createConnection(contactPoint: string) { const c = new Connection(contactPoint, null, this.options); try { @@ -356,7 +380,7 @@ class ControlConnection extends events.EventEmitter { * @param {Boolean} isReconnecting Determines whether the refresh is being done because the ControlConnection is * switching to use this connection to this host. */ - async _refreshHosts(initializing, isReconnecting) { + async _refreshHosts(initializing: boolean, isReconnecting: boolean) { // Get a reference to the current connection as it might change from external events const c = this.connection; @@ -428,7 +452,7 @@ class ControlConnection extends events.EventEmitter { } else { try { this.connection = this._borrowAConnection(hostIterator); } - catch(err) { + catch(_err) { /* NODEJS-632: refresh nodes before getting hosts for reconnect since some hostnames may have * shifted during the flight. */ @@ -450,7 +474,7 @@ class ControlConnection extends events.EventEmitter { * iterator from query plan / host list

* @param {Iterator} [hostIterator] */ - async _refresh(hostIterator) { + async _refresh(hostIterator?: Iterator) { if (this._isShuttingDown) { this.log('info', 'The ControlConnection will not be refreshed as the Client is being shutdown'); return; @@ -463,7 +487,7 @@ class ControlConnection extends events.EventEmitter { try { if (!hostIterator) { this.log('info', 'Trying to acquire a connection to a new host'); - this._triedHosts = {}; + this._triedHosts = new Map(); hostIterator = await promiseUtils.newQueryPlan(this._profileManager.getDefaultLoadBalancing(), null, null); } @@ -511,13 +535,13 @@ class ControlConnection extends events.EventEmitter { * Acquires a connection and refreshes topology and keyspace metadata for the first time. * @returns {Promise} */ - async _initializeConnection() { + async _initializeConnection(): Promise { this.log('info', 'Getting first connection'); // Reset host and connection this.host = null; this.connection = null; - this._triedHosts = {}; + this._triedHosts = new Map(); // Randomize order of contact points resolved. const contactPointsIterator = utils.shuffleArray(Array.from(this._contactPoints))[Symbol.iterator](); @@ -554,6 +578,7 @@ class ControlConnection extends events.EventEmitter { const response = await this.connection.send(requests.options, null); // response.supported is a string multi map, decoded as an Object. + // @ts-expect-error const productType = response.supported && response.supported[supportedProductTypeKey]; if (Array.isArray(productType) && productType[0] === supportedDbaas) { this.metadata.setProductTypeAsDbaas(); @@ -632,7 +657,7 @@ class ControlConnection extends events.EventEmitter { * @param {Boolean} processNow * @returns {Promise} */ - handleSchemaChange(event, processNow) { + handleSchemaChange(event: { keyspace: string; isKeyspace: boolean; schemaChangeType; table; udt; functionName; aggregate; }, processNow: boolean): Promise { const self = this; let handler, cqlObject; @@ -698,7 +723,7 @@ class ControlConnection extends events.EventEmitter { * @param {Boolean} processNow * @returns {Promise} */ - _scheduleObjectRefresh(handler, keyspace, cqlObject, processNow) { + _scheduleObjectRefresh(handler: Function, keyspace: string, cqlObject: string, processNow: boolean): Promise { return this._debouncer.eventReceived({ handler, keyspace, cqlObject }, processNow); } @@ -707,7 +732,7 @@ class ControlConnection extends events.EventEmitter { * @param {Boolean} processNow * @returns {Promise} */ - _scheduleKeyspaceRefresh(keyspace, processNow) { + _scheduleKeyspaceRefresh(keyspace: string, processNow: boolean): Promise { return this._debouncer.eventReceived({ handler: () => this.metadata.refreshKeyspace(keyspace), keyspace @@ -715,7 +740,7 @@ class ControlConnection extends events.EventEmitter { } /** @returns {Promise} */ - _scheduleRefreshHosts() { + _scheduleRefreshHosts(): Promise { return this._debouncer.eventReceived({ handler: () => this._refreshHosts(false, false), all: true @@ -729,7 +754,7 @@ class ControlConnection extends events.EventEmitter { * @param {Boolean} setCurrentHost Determines if the host retrieved must be set as the current host * @param result */ - _setLocalInfo(initializing, setCurrentHost, c, result) { + _setLocalInfo(initializing: boolean, setCurrentHost: boolean, c: Connection, result) { if (!result || !result.rows || !result.rows.length) { this.log('warning', 'No local info could be obtained'); return; @@ -777,7 +802,7 @@ class ControlConnection extends events.EventEmitter { * connection the first time. * @param {ResultSet} result */ - async setPeersInfo(initializing, result) { + async setPeersInfo(initializing: boolean, result: ResultSet) { if (!result || !result.rows) { return; } @@ -870,7 +895,7 @@ class ControlConnection extends events.EventEmitter { * @param {Number} defaultPort * @returns {Promise} */ - getAddressForPeerHost(row, defaultPort) { + getAddressForPeerHost(row: object | Row, defaultPort: number): Promise { return new Promise(resolve => { let address = row['rpc_address']; const peer = row['peer']; @@ -933,7 +958,7 @@ class ControlConnection extends events.EventEmitter { * Waits for a connection to be available. If timeout expires before getting a connection it callbacks in error. * @returns {Promise} */ - _waitForReconnection() { + _waitForReconnection(): Promise { return new Promise((resolve, reject) => { const callback = promiseUtils.getCallback(resolve, reject); @@ -960,7 +985,7 @@ class ControlConnection extends events.EventEmitter { * @param {Boolean} [waitReconnect] Determines if it should wait for reconnection in case the control connection is not * connected at the moment. Default: true. */ - async query(cqlQuery, waitReconnect = true) { + async query(cqlQuery: string | Request, waitReconnect: boolean = true) { const queryOnConnection = async () => { if (!this.connection || this._isShuttingDown) { throw new errors.NoHostAvailableError({}, 'ControlConnection is not connected at the time'); @@ -979,7 +1004,7 @@ class ControlConnection extends events.EventEmitter { } /** @returns {Encoder} The encoder used by the current connection */ - getEncoder() { + getEncoder(): Encoder { if (!this._encoder) { throw new errors.DriverInternalError('Encoder is not defined'); } @@ -1027,7 +1052,7 @@ class ControlConnection extends events.EventEmitter { * Gets the local IP address to which the control connection socket is bound to. * @returns {String|undefined} */ - getLocalAddress() { + getLocalAddress(): string | undefined { if (!this.connection) { return undefined; } @@ -1039,7 +1064,7 @@ class ControlConnection extends events.EventEmitter { * Gets the address and port of host the control connection is connected to. * @returns {String|undefined} */ - getEndpoint() { + getEndpoint(): string | undefined { if (!this.connection) { return undefined; } @@ -1054,7 +1079,7 @@ class ControlConnection extends events.EventEmitter { * @param {Row} row * @private */ -function setDseParameters(host, row) { +function setDseParameters(host: Host, row: Row) { if (row['workloads'] !== undefined) { host.workloads = row['workloads']; } @@ -1070,4 +1095,4 @@ function setDseParameters(host, row) { } } -module.exports = ControlConnection; +export default ControlConnection; diff --git a/lib/datastax/cloud/index.js b/lib/datastax/cloud/index.ts similarity index 83% rename from lib/datastax/cloud/index.js rename to lib/datastax/cloud/index.ts index e44a4c7b9..f6498b5c0 100644 --- a/lib/datastax/cloud/index.js +++ b/lib/datastax/cloud/index.ts @@ -14,17 +14,17 @@ * limitations under the License. */ -'use strict'; - -const https = require('https'); -const fs = require('fs'); -const util = require('util'); -const AdmZip = require('adm-zip'); -const { URL } = require('url'); - -const errors = require('../../errors'); -const utils = require('../../utils'); -const { DsePlainTextAuthProvider, NoAuthProvider } = require('../../auth'); +//TODO: we need to rethink how we expose and document the types of the cloud features. +// Do our users know they can configure in the following ways? +import AdmZip from "adm-zip"; +import fs from "fs"; +import https from "https"; +import { URL } from "url"; +import util from "util"; +import { DsePlainTextAuthProvider, NoAuthProvider } from '../../auth/index'; +import type { ClientOptions } from '../../client'; +import errors from "../../errors"; +import utils from "../../utils"; // Use the callback-based method fs.readFile() instead of fs.promises as we have to support Node.js 8+ const readFile = util.promisify(fs.readFile); @@ -34,8 +34,9 @@ const readFile = util.promisify(fs.readFile); * setting the connection options * @param {ClientOptions} options * @returns {Promise} + * @internal */ -async function init(options) { +async function init(options: ClientOptions): Promise { if (!options.cloud) { return; } @@ -44,27 +45,35 @@ async function init(options) { await parseZipFile(cloudOptions); await getMetadataServiceInfoAsync(cloudOptions); - if (!cloudOptions.clientOptions.sslOptions.checkServerIdentity) { + if (!cloudOptions.clientOptions.sslOptions["checkServerIdentity"]) { // With SNI enabled, hostname (uuid) and CN will not match // Use a custom validation function to validate against the proxy address. // Note: this function is only called if the certificate passed all other checks, like CA validation. - cloudOptions.clientOptions.sslOptions.checkServerIdentity = (_, cert) => - checkServerIdentity(cert, cloudOptions.clientOptions.sni.address); + cloudOptions.clientOptions.sslOptions["checkServerIdentity"] = (_, cert) => + checkServerIdentity(cert, cloudOptions.clientOptions["sni"].address); } } class CloudOptions { - constructor(clientOptions) { + clientOptions: ClientOptions; + secureConnectBundle: any; + serviceUrl: string; + logEmitter: any; + contactPoints: string[]; + localDataCenter: string; + constructor(clientOptions: ClientOptions) { this.clientOptions = clientOptions; if (clientOptions.cloud.secureConnectBundle) { this.secureConnectBundle = clientOptions.cloud.secureConnectBundle; this.serviceUrl = null; } else { + //TODO: where was this clientOptions.cloud.endpoint exposed or documented? + // @ts-ignore this.serviceUrl = clientOptions.cloud.endpoint; } // Include a log emitter to enable logging within the cloud connection logic - this.logEmitter = clientOptions.logEmitter; + this.logEmitter = (clientOptions as any).logEmitter; this.contactPoints = null; this.localDataCenter = null; @@ -74,7 +83,7 @@ class CloudOptions { * The sslOptions in the client options from a given map. * @param {Map} zipEntries */ - setSslOptions(zipEntries) { + setSslOptions(zipEntries: Map) { this.clientOptions.sslOptions = Object.assign({ ca: [zipEntries.get('ca.crt') ], cert: zipEntries.get('cert'), @@ -106,7 +115,7 @@ class CloudOptions { * @param {CloudOptions} cloudOptions * @returns {Promise} */ -async function parseZipFile(cloudOptions) { +async function parseZipFile(cloudOptions: CloudOptions): Promise { if (cloudOptions.serviceUrl) { // Service url already was provided return; @@ -118,7 +127,7 @@ async function parseZipFile(cloudOptions) { const data = await readFile(cloudOptions.secureConnectBundle); const zip = new AdmZip(data); - const zipEntries = new Map(zip.getEntries().map(e => [e.entryName, e.getData()])); + const zipEntries : Map = new Map(zip.getEntries().map(e => [e.entryName, e.getData()])); if (!zipEntries.get('config.json')) { throw new TypeError('Config file must be contained in secure bundle'); @@ -140,7 +149,7 @@ async function parseZipFile(cloudOptions) { * @param {CloudOptions} cloudOptions * @param {Function} callback */ -function getMetadataServiceInfo(cloudOptions, callback) { +function getMetadataServiceInfo(cloudOptions: CloudOptions, callback: Function) { const regex = /^(.+?):(\d+)(.*)$/; const matches = regex.exec(cloudOptions.serviceUrl); callback = utils.callbackOnce(callback); @@ -159,6 +168,7 @@ function getMetadataServiceInfo(cloudOptions, callback) { const req = https.get(requestOptions, res => { let data = ''; + // @ts-ignore utils.log('verbose', `Connected to metadata service with SSL/TLS protocol ${res.socket.getProtocol()}`, {}, cloudOptions); res @@ -186,7 +196,7 @@ function getMetadataServiceInfo(cloudOptions, callback) { // Set the connect options cloudOptions.clientOptions.contactPoints = contactInfo['contact_points']; cloudOptions.clientOptions.localDataCenter = contactInfo['local_dc']; - cloudOptions.clientOptions.sni = { address: contactInfo['sni_proxy_address'] }; + cloudOptions.clientOptions["sni"] = { address: contactInfo['sni_proxy_address'] }; callback(); }); @@ -206,7 +216,7 @@ const getMetadataServiceInfoAsync = util.promisify(getMetadataServiceInfo); * Returns an Error that wraps the inner error obtained while fetching metadata information. * @private */ -function getServiceRequestError(err, requestOptions, isParsingError) { +function getServiceRequestError(err, requestOptions, isParsingError?) { const message = isParsingError ? 'There was an error while parsing the metadata service information' : 'There was an error fetching the metadata information'; @@ -224,7 +234,7 @@ function getServiceRequestError(err, requestOptions, isParsingError) { * @internal * @ignore */ -function checkServerIdentity(cert, sniAddress) { +function checkServerIdentity(cert: { subject: { CN: string; }; subjectaltname?: string; }, sniAddress: string): Error | undefined { // Based on logic defined by the Node.js Core module // https://github.com/nodejs/node/blob/ff48009fefcecedfee2c6ff1719e5be3f6969049/lib/tls.js#L212-L290 @@ -262,9 +272,9 @@ function checkServerIdentity(cert, sniAddress) { if (!valid) { const error = new Error(`Host: ${hostName} is not cert's CN/altnames: ${cn} / ${altNames}`); - error.reason = error.message; - error.host = hostName; - error.cert = cert; + error["reason"] = error.message; + error["host"] = hostName; + error["cert"] = cert; return error; } } @@ -275,7 +285,7 @@ function checkServerIdentity(cert, sniAddress) { * @private * @returns {boolean} */ -function checkParts(hostParts, pattern) { +function checkParts(hostParts, pattern): boolean { // Empty strings, null, undefined, etc. never match. if (!pattern) { return false; @@ -332,7 +342,12 @@ function checkParts(hostParts, pattern) { return true; } -module.exports = { +export { + checkServerIdentity, + init +}; + +export default { checkServerIdentity, init }; \ No newline at end of file diff --git a/lib/datastax/graph/complex-type-helper.js b/lib/datastax/graph/complex-type-helper.ts similarity index 90% rename from lib/datastax/graph/complex-type-helper.js rename to lib/datastax/graph/complex-type-helper.ts index 130bf0a1d..e50613517 100644 --- a/lib/datastax/graph/complex-type-helper.js +++ b/lib/datastax/graph/complex-type-helper.ts @@ -13,14 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import Encoder from "../../encoder"; +import types from "../../types/index"; +import { GraphTypeWrapper, UdtGraphWrapper } from "./wrappers"; -'use strict'; -const { GraphTypeWrapper, UdtGraphWrapper } = require('./wrappers'); -const types = require('../../types'); -const Encoder = require('../../encoder'); const { dataTypes } = types; +/** + * @internal + */ function getTypeDefinitionByValue(value) { if (value instanceof types.Tuple) { return { @@ -92,8 +94,14 @@ function getUdtTypeDefinition(udtInfo) { }; } +/** + * @internal + */ function getUdtTypeDefinitionByValue(wrappedValue) { return getUdtTypeDefinition(wrappedValue.udtInfo); } -module.exports = { getTypeDefinitionByValue, getUdtTypeDefinitionByValue }; \ No newline at end of file +export { + getTypeDefinitionByValue, + getUdtTypeDefinitionByValue +}; diff --git a/lib/datastax/graph/custom-type-serializers.js b/lib/datastax/graph/custom-type-serializers.ts similarity index 90% rename from lib/datastax/graph/custom-type-serializers.js rename to lib/datastax/graph/custom-type-serializers.ts index e25ef3bbc..6de90d3a9 100644 --- a/lib/datastax/graph/custom-type-serializers.js +++ b/lib/datastax/graph/custom-type-serializers.ts @@ -13,20 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import { LineString, Point, Polygon } from "../../geometry/index"; +import types, { dataTypes, Duration, Tuple } from "../../types/index"; +import utils from "../../utils"; +import { getTypeDefinitionByValue, getUdtTypeDefinitionByValue } from "./complex-type-helper"; +import { Edge } from "./structure"; +import { GraphTypeWrapper, UdtGraphWrapper } from "./wrappers"; + -const types = require('../../types'); -const utils = require('../../utils'); -const { getTypeDefinitionByValue, getUdtTypeDefinitionByValue } = require('./complex-type-helper'); -const { Point, Polygon, LineString } = require('../../geometry'); -const { Edge } = require('./structure'); -const { GraphTypeWrapper, UdtGraphWrapper } = require('./wrappers'); -const { Tuple, dataTypes } = types; const typeKey = '@type'; const valueKey = '@value'; class EdgeDeserializer { + key: string; + reader: any; constructor() { this.key = 'g:Edge'; } @@ -43,13 +44,15 @@ class EdgeDeserializer { * @private */ class StringBasedTypeSerializer { + key: any; + targetType: any; /** * Creates a new instance of the deserializer. * @param {String} key * @param {Function} targetType */ - constructor(key, targetType) { + constructor(key: string, targetType: Function) { if (!key) { throw new Error('Deserializer must provide a type key'); } @@ -175,6 +178,9 @@ class PolygonSerializer extends StringBasedTypeSerializer { } class TupleSerializer { + key: string; + reader: any; + writer: any; constructor() { this.key = 'dse:Tuple'; } @@ -197,7 +203,7 @@ class TupleSerializer { } /** @param {Tuple} tuple */ - serialize(tuple) { + serialize(tuple: Tuple) { const result = { 'cqlType': 'tuple', 'definition': tuple.elements.map(getTypeDefinitionByValue), @@ -216,6 +222,8 @@ class TupleSerializer { } class DurationSerializer { + key: string; + reader: any; constructor() { this.key = 'dse:Duration'; } @@ -229,7 +237,7 @@ class DurationSerializer { } /** @param {Duration} value */ - serialize(value) { + serialize(value: Duration) { return { [typeKey]: this.key, [valueKey]: { @@ -246,6 +254,9 @@ class DurationSerializer { } class UdtSerializer { + key: string; + reader: any; + writer: any; constructor() { this.key = 'dse:UDT'; } @@ -281,7 +292,9 @@ class UdtSerializer { } class InternalSerializer { - constructor(name, transformFn) { + _name: any; + _transformFn: any; + constructor(name, transformFn?) { this._name = name; this._transformFn = transformFn || (x => x); } @@ -304,6 +317,7 @@ const graphSONSerializerByCqlType = { }; class GraphTypeWrapperSerializer { + key: string; constructor() { // Use a fixed name that doesn't conflict with TinkerPop and DS Graph this.key = 'client:wrapper'; @@ -344,6 +358,9 @@ const serializersArray = [ DurationSerializer ]; +/** + * @ignore @internal + */ function getCustomSerializers() { const customSerializers = {}; @@ -359,4 +376,4 @@ function getCustomSerializers() { return customSerializers; } -module.exports = getCustomSerializers; \ No newline at end of file +export default getCustomSerializers; \ No newline at end of file diff --git a/lib/datastax/graph/graph-executor.js b/lib/datastax/graph/graph-executor.ts similarity index 84% rename from lib/datastax/graph/graph-executor.js rename to lib/datastax/graph/graph-executor.ts index 701582675..5ed224553 100644 --- a/lib/datastax/graph/graph-executor.js +++ b/lib/datastax/graph/graph-executor.ts @@ -13,15 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import type Client from "../../client"; +import type { GraphQueryOptions } from "../../client"; +import { type ClientOptions } from "../../client"; +import type { Host } from "../../host"; +import policies from "../../policies/index"; +import utils from "../../utils"; +import getCustomTypeSerializers from "./custom-type-serializers"; +import { GraphSON2Reader, GraphSON2Writer, GraphSON3Reader, GraphSON3Writer } from "./graph-serializer"; +import { GraphExecutionOptions, graphProtocol } from "./options"; +import GraphResultSet from "./result-set"; -'use strict'; - -const utils = require('../../utils'); -const policies = require('../../policies'); -const GraphResultSet = require('./result-set'); -const { GraphSON2Reader, GraphSON2Writer, GraphSON3Reader, GraphSON3Writer } = require('./graph-serializer'); -const getCustomTypeSerializers = require('./custom-type-serializers'); -const { GraphExecutionOptions, graphProtocol } = require('./options'); const graphLanguageGroovyString = 'gremlin-groovy'; const graphEngineCore = 'Core'; @@ -42,11 +44,16 @@ const defaultWriters = new Map([ [ graphProtocol.graphson3, getDefaultWriter(graphSON3Writer) ] ]); +type QueryObject = {graphLanguage: any, value: any, queryWriterFactory: any}; /** * Internal class that contains the logic for executing a graph traversal. - * @ignore + * @ignore @internal */ class GraphExecutor { + _client: Client; + _handler: Function; + _defaultProfileRetryPolicy: any; + _graphBaseOptions: any; /** * Creates a new instance of GraphExecutor. @@ -54,7 +61,7 @@ class GraphExecutor { * @param {ClientOptions} rawOptions * @param {Function} handler */ - constructor(client, rawOptions, handler) { + constructor(client: Client, rawOptions: ClientOptions, handler: Function) { this._client = client; this._handler = handler; @@ -70,6 +77,8 @@ class GraphExecutor { // As the default retry policy might retry non-idempotent queries // we should use default retry policy for all graph queries that does not retry retry: new policies.retry.FallthroughRetryPolicy() + //TODO: what are we trying to do here?? The last argument is alwasy ignored + // @ts-ignore }, rawOptions.graphOptions, client.profileManager.getDefault().graphOptions); if (this._graphBaseOptions.readTimeout === null) { @@ -79,11 +88,11 @@ class GraphExecutor { /** * Executes the graph traversal. - * @param {String|Object} query + * @param {String|QueryObject} query * @param {Object} parameters * @param {GraphQueryOptions} options */ - async send(query, parameters, options) { + async send(query: string | QueryObject, parameters: object, options: GraphQueryOptions) { if (Array.isArray(parameters)) { throw new TypeError('Parameters must be a Object instance as an associative array'); } @@ -105,11 +114,12 @@ class GraphExecutor { if (isQueryObject) { // Use the provided graph language to override the current - execOptions.setGraphLanguage(query.graphLanguage); + execOptions.setGraphLanguage((query as QueryObject).graphLanguage); } this._setGraphProtocol(execOptions); execOptions.setGraphPayload(); + // @ts-ignore parameters = GraphExecutor._buildGraphParameters(parameters, execOptions.getGraphSubProtocol()); if (typeof query !== 'string') { @@ -127,7 +137,7 @@ class GraphExecutor { query = queryWriter(!isQueryObject ? query : query.value); } - return await this._executeGraphQuery(query, parameters, execOptions); + return await this._executeGraphQuery(query as string, parameters, execOptions); } /** @@ -138,7 +148,7 @@ class GraphExecutor { * @returns {Promise} * @private */ - async _executeGraphQuery(query, parameters, execOptions) { + async _executeGraphQuery(query: string, parameters: object, execOptions: GraphExecutionOptions): Promise { const result = await this._handler.call(this._client, query, parameters, execOptions); // Instances of rowParser transform Row instances into Traverser instances. @@ -153,7 +163,7 @@ class GraphExecutor { * @returns {Promise} * @private */ - async _getAnalyticsMaster() { + async _getAnalyticsMaster(): Promise { try { const result = await this._client.execute('CALL DseClientTool.getAnalyticsGraphServer()', utils.emptyArray); @@ -197,7 +207,7 @@ class GraphExecutor { * * @param {GraphExecutionOptions} execOptions */ - _setGraphProtocol(execOptions) { + _setGraphProtocol(execOptions: GraphExecutionOptions) { let protocol = execOptions.getGraphSubProtocol(); if (protocol) { @@ -230,7 +240,7 @@ class GraphExecutor { * @returns {string[]|null} * @private */ - static _buildGraphParameters(parameters, protocol) { + static _buildGraphParameters(parameters: Array | Function | null, protocol: string): string[] | null { if (!parameters || typeof parameters !== 'object') { return null; } @@ -277,4 +287,4 @@ function getDefaultWriter(writer) { return value => writer.write(value); } -module.exports = GraphExecutor; \ No newline at end of file +export default GraphExecutor; \ No newline at end of file diff --git a/lib/datastax/graph/graph-serializer.js b/lib/datastax/graph/graph-serializer.ts similarity index 83% rename from lib/datastax/graph/graph-serializer.js rename to lib/datastax/graph/graph-serializer.ts index 4331161a5..ea6926c59 100644 --- a/lib/datastax/graph/graph-serializer.js +++ b/lib/datastax/graph/graph-serializer.ts @@ -1,39 +1,37 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Copyright DataStax, Inc. * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ +import typeSerializers, { type TypeSerializer } from "./type-serializers"; /** * @module datastax/graph/tinkerpop/graphSerializers - * @ignore + * @ignore @internal */ /** * @author Jorge Bay Gondra */ -'use strict'; -const typeSerializers = require('./type-serializers'); /** * GraphSON2 writer. */ class GraphSON2Writer { + _options: { serializers?: object; }; + _serializers: any[]; /** * @param {Object} [options] @@ -41,7 +39,7 @@ class GraphSON2Writer { * serializer instances as values, ie: { 'g:Int64': longSerializer }. * @constructor */ - constructor(options) { + constructor(options: { serializers?: object; }) { this._options = options || {}; // Create instance of the default serializers this._serializers = this.getDefaultSerializers().map(serializerConstructor => { @@ -67,7 +65,7 @@ class GraphSON2Writer { * Gets the default serializers to be used. * @returns {Array} */ - getDefaultSerializers() { + getDefaultSerializers(): Array { return graphSON2Serializers; } @@ -101,7 +99,7 @@ class GraphSON2Writer { * @param {Object} obj * @returns {String} */ - write(obj) { + write(obj: object): string { return JSON.stringify(this.adaptObject(obj)); } } @@ -119,6 +117,8 @@ class GraphSON3Writer extends GraphSON2Writer { * GraphSON2 reader. */ class GraphSON2Reader { + _options: { serializers?: object; }; + _deserializers: Record; /** * GraphSON Reader * @param {Object} [options] @@ -126,7 +126,7 @@ class GraphSON2Reader { * deserializer instances as values, ie: { 'g:Int64': longSerializer }. * @constructor */ - constructor(options) { + constructor(options: { serializers?: object; }) { this._options = options || {}; this._deserializers = {}; @@ -155,7 +155,7 @@ class GraphSON2Reader { * Gets the default deserializers as an associative array. * @returns {Object} */ - getDefaultDeserializers() { + getDefaultDeserializers(): object { return graphSON2Deserializers; } @@ -250,11 +250,6 @@ const graphSON3Serializers = graphSON2Serializers.concat([ typeSerializers.MapSerializer ]); -module.exports = { - GraphSON3Writer, - GraphSON3Reader, - GraphSON2Writer, - GraphSON2Reader, - GraphSONWriter: GraphSON3Writer, - GraphSONReader: GraphSON3Reader -}; \ No newline at end of file +export { + GraphSON2Reader, GraphSON2Writer, GraphSON3Reader, GraphSON3Writer, GraphSON3Reader as GraphSONReader, GraphSON3Writer as GraphSONWriter +}; diff --git a/lib/datastax/graph/index.d.ts b/lib/datastax/graph/index.d.ts deleted file mode 100644 index b6e860c42..000000000 --- a/lib/datastax/graph/index.d.ts +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { types } from '../../types'; - -export namespace graph { - interface Edge extends Element { - outV?: Vertex; - outVLabel?: string; - inV?: Vertex; - inVLabel?: string; - properties?: object; - } - - interface Element { - id: any; - label: string; - } - - class GraphResultSet implements Iterator { - constructor(rs: types.ResultSet); - - first(): any; - - toArray(): any[]; - - values(): Iterator; - - next(value?: any): IteratorResult; - } - - interface Path { - labels: any[]; - objects: any[]; - } - - interface Property { - value: any - key: any - } - - interface Vertex extends Element { - properties?: { [key: string]: any[] } - } - - interface VertexProperty extends Element { - value: any - key: string - properties?: any - } - - function asDouble(value: number): object; - - function asFloat(value: number): object; - - function asInt(value: number): object; - - function asTimestamp(value: Date): object; - - function asUdt(value: object): object; - - interface EnumValue { - toString(): string - } - - namespace t { - const id: EnumValue; - const key: EnumValue; - const label: EnumValue; - const value: EnumValue; - } - - namespace direction { - // `in` is a reserved word - const in_: EnumValue; - const out: EnumValue; - const both: EnumValue; - } -} \ No newline at end of file diff --git a/lib/datastax/graph/index.js b/lib/datastax/graph/index.ts similarity index 54% rename from lib/datastax/graph/index.js rename to lib/datastax/graph/index.ts index a0333a06e..932e62739 100644 --- a/lib/datastax/graph/index.js +++ b/lib/datastax/graph/index.ts @@ -13,19 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import getCustomTypeSerializers from "./custom-type-serializers"; +import GraphResultSet from "./result-set"; +import { Edge, Element, Path, Property, Vertex, VertexProperty } from "./structure"; +import { asDouble, asFloat, asInt, asTimestamp, asUdt, GraphTypeWrapper, UdtGraphWrapper } from "./wrappers"; -/** - * Graph module. - * @module datastax/graph - */ -const GraphResultSet = require('./result-set'); -const getCustomTypeSerializers = require('./custom-type-serializers'); -const { asInt, asDouble, asFloat, asTimestamp, asUdt, UdtGraphWrapper, GraphTypeWrapper} = require('./wrappers'); -const { Edge, Element, Path, Property, Vertex, VertexProperty } = require('./structure'); class EnumValue { + /** @internal */ + typeName: any; + /** @internal */ + elementName: any; + /** @internal */ constructor(typeName, elementName) { this.typeName = typeName; this.elementName = elementName; @@ -46,37 +46,58 @@ const t = { value: new EnumValue('T', 'value'), }; +const directionIn = new EnumValue('Direction', 'IN'); /** * Represents the edge direction. */ const direction = { 'both': new EnumValue('Direction', 'BOTH'), - 'in': new EnumValue('Direction', 'IN'), - 'out': new EnumValue('Direction', 'OUT') + 'in': directionIn, + 'out': new EnumValue('Direction', 'OUT'), + // `in` is a reserved keyword depending on the context + // TinkerPop JavaScript GLV only exposes `in` but it can lead to issues for TypeScript users and others. + // Expose an extra property to represent `Direction.IN`. + 'in_': directionIn }; -// `in` is a reserved keyword depending on the context -// TinkerPop JavaScript GLV only exposes `in` but it can lead to issues for TypeScript users and others. -// Expose an extra property to represent `Direction.IN`. -direction.in_ = direction.in; - -module.exports = { +export default { Edge, Element, Path, Property, Vertex, VertexProperty, - asInt, asDouble, asFloat, asTimestamp, asUdt, direction, + /** @internal */ getCustomTypeSerializers, GraphResultSet, + /** @internal */ GraphTypeWrapper, t, + /** @internal */ UdtGraphWrapper -}; \ No newline at end of file +}; + +export { + asDouble, + asFloat, asInt, asTimestamp, + asUdt, + direction, Edge, + Element, + /** @internal */ + getCustomTypeSerializers, + GraphResultSet, + /** @internal */ + GraphTypeWrapper, + Path, + Property, t, + /** @internal */ + UdtGraphWrapper, + Vertex, + VertexProperty +}; diff --git a/lib/datastax/graph/options.js b/lib/datastax/graph/options.ts similarity index 85% rename from lib/datastax/graph/options.js rename to lib/datastax/graph/options.ts index 2e0e7e711..8e8d23aa7 100644 --- a/lib/datastax/graph/options.js +++ b/lib/datastax/graph/options.ts @@ -13,11 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const util = require('util'); -const types = require('../../types'); -const utils = require('../../utils'); -const { DefaultExecutionOptions, proxyExecuteKey } = require('../../execution-options'); +import util from "util"; +import type Client from "../../client"; +import type { GraphQueryOptions, QueryOptions } from "../../client"; +import { DefaultExecutionOptions, proxyExecuteKey } from "../../execution-options"; +import type { ExecutionProfile, ProfileManager } from "../../execution-profile"; +import type { Host } from "../../host"; +import type { RetryPolicy } from "../../policies/retry"; +import types from "../../types/index"; +import utils from "../../utils"; + + const Long = types.Long; let consistencyNames; @@ -81,11 +87,11 @@ const payloadKeys = Object.freeze({ * @returns {DseClientOptions} * @private */ -function getDefaultGraphOptions(profileManager, baseOptions, defaultRetryPolicy, profile) { +function getDefaultGraphOptions(profileManager: ProfileManager, baseOptions, defaultRetryPolicy: RetryPolicy | null, profile: ExecutionProfile): GraphQueryOptions { return profileManager.getOrCreateGraphOptions(profile, function createDefaultOptions() { - const profileOptions = profile.graphOptions || utils.emptyObject; + const profileOptions : ExecutionProfile["graphOptions"] = profile.graphOptions || utils.emptyObject; const defaultProfile = profileManager.getDefault(); - const options = { + const options : GraphQueryOptions = { customPayload: { [payloadKeys.language]: utils.allocBufferFromString(profileOptions.language || baseOptions.language), [payloadKeys.source]: utils.allocBufferFromString(profileOptions.source || baseOptions.source) @@ -149,7 +155,7 @@ function getDefaultGraphOptions(profileManager, baseOptions, defaultRetryPolicy, * @param {Function} [converter] * @private */ -function setPayloadKey(payload, profileOptions, key, value, converter) { +function setPayloadKey(payload: object, profileOptions: QueryOptions, key: string, value: string | number | null | undefined, converter?: Function) { converter = converter || utils.allocBufferFromString; if (value === null) { // Use null to avoid set payload for a key @@ -168,7 +174,7 @@ function setPayloadKey(payload, profileOptions, key, value, converter) { function longBuffer(value) { value = Long.fromNumber(value); - return Long.toBuffer(value); + return Long["toBuffer"](value); } /** @@ -176,7 +182,7 @@ function longBuffer(value) { * @param {Number} consistency * @private */ -function getConsistencyName(consistency) { +function getConsistencyName(consistency: number) { // eslint-disable-next-line if (consistency == undefined) { //null or undefined => undefined @@ -215,6 +221,10 @@ function loadConsistencyNames() { * @ignore */ class GraphExecutionOptions extends DefaultExecutionOptions { + _defaultGraphOptions: GraphQueryOptions; + _preferredHost: Host; + _graphSubProtocol: any; + _graphLanguage: any; /** * Creates a new instance of GraphExecutionOptions. @@ -223,7 +233,7 @@ class GraphExecutionOptions extends DefaultExecutionOptions { * @param graphBaseOptions The default graph base options. * @param {RetryPolicy} defaultProfileRetryPolicy */ - constructor(queryOptions, client, graphBaseOptions, defaultProfileRetryPolicy) { + constructor(queryOptions: GraphQueryOptions, client: Client, graphBaseOptions, defaultProfileRetryPolicy: RetryPolicy) { queryOptions = queryOptions || utils.emptyObject; super(queryOptions, client, null); @@ -245,7 +255,7 @@ class GraphExecutionOptions extends DefaultExecutionOptions { } getGraphSource() { - return this.getRawQueryOptions().graphSource || this._defaultGraphOptions.graphSource; + return this.getRawQueryOptions()["graphSource"] || this._defaultGraphOptions.graphSource; } getGraphLanguage() { @@ -257,7 +267,7 @@ class GraphExecutionOptions extends DefaultExecutionOptions { } getGraphName() { - return utils.ifUndefined(this.getRawQueryOptions().graphName, this._defaultGraphOptions.graphName); + return utils.ifUndefined(this.getRawQueryOptions()["graphName"], this._defaultGraphOptions.graphName); } getGraphSubProtocol() { @@ -279,7 +289,7 @@ class GraphExecutionOptions extends DefaultExecutionOptions { } getRowParser() { - const factory = this.getRawQueryOptions().rowParserFactory; + const factory = this.getRawQueryOptions()["rowParserFactory"]; if (!factory) { return null; @@ -289,7 +299,7 @@ class GraphExecutionOptions extends DefaultExecutionOptions { } getQueryWriter() { - const factory = this.getRawQueryOptions().queryWriterFactory; + const factory = this.getRawQueryOptions()["queryWriterFactory"]; if (!factory) { return null; @@ -308,12 +318,12 @@ class GraphExecutionOptions extends DefaultExecutionOptions { // Override the payload for DSE Graph exclusive options setPayloadKey(payload, defaultOptions, payloadKeys.language, this.getGraphLanguage() !== this._defaultGraphOptions.graphLanguage ? this.getGraphLanguage() : undefined); - setPayloadKey(payload, defaultOptions, payloadKeys.source, options.graphSource); - setPayloadKey(payload, defaultOptions, payloadKeys.name, options.graphName); + setPayloadKey(payload, defaultOptions, payloadKeys.source, options["graphSource"]); + setPayloadKey(payload, defaultOptions, payloadKeys.name, options["graphName"]); setPayloadKey(payload, defaultOptions, payloadKeys.readConsistency, - getConsistencyName(options.graphReadConsistency)); + getConsistencyName(options["graphReadConsistency"])); setPayloadKey(payload, defaultOptions, payloadKeys.writeConsistency, - getConsistencyName(options.graphWriteConsistency)); + getConsistencyName(options["graphWriteConsistency"])); // Use the read timeout defined by the user or the one default to graph executions setPayloadKey(payload, defaultOptions, payloadKeys.timeout, @@ -327,8 +337,8 @@ class GraphExecutionOptions extends DefaultExecutionOptions { } } -module.exports = { +export { GraphExecutionOptions, graphProtocol, payloadKeys -}; \ No newline at end of file +}; diff --git a/lib/datastax/graph/result-set.js b/lib/datastax/graph/result-set.js deleted file mode 100644 index 8e8467065..000000000 --- a/lib/datastax/graph/result-set.js +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const utils = require('../../utils'); - -/** - * Creates a new instance of GraphResultSet. - * @class - * @classdesc - * Represents the result set of a [graph query execution]{@link Client#executeGraph} containing vertices, edges or - * scalar values depending on the query. - *

- * It allows iteration of the items using for..of statements under ES2015 and exposes - * forEach(), first() and toArray() to access the underlying items. - *

- * @example - * for (let vertex of result} { ... } - * @example - * const arr = result.toArray(); - * @example - * const vertex = result.first(); - * @param {ResultSet} result - * @param {Function} [rowParser] - * @alias module:datastax/graph~GraphResultSet - * @constructor - */ -function GraphResultSet(result, rowParser) { - /** - * Information on the execution of a successful query: - * @member {Object} - * @property {Number} achievedConsistency The consistency level that has been actually achieved by the query. - * @property {String} queriedHost The Cassandra host that coordinated this query. - * @property {Object} triedHosts Gets the associative array of host that were queried before getting a valid response, - * being the last host the one that replied correctly. - * @property {Uuid} traceId Identifier of the trace session. - * @property {Array.} warnings Warning messages generated by the server when executing the query. - */ - this.info = result.info; - const rows = result.rows; - rowParser = rowParser || parsePlainJsonRow; - - /** - * This property has been deprecated because it may return a lower value than the actual length of the results. - * Use toArray() instead. - *

Gets the length of the result.

- * @deprecated Use toArray() instead. This property will be removed in the following major version. - * @member {Number} - */ - this.length = result.rowLength; - - /** - * A string token representing the current page state of query. It can be used in the following executions to - * continue paging and retrieve the remained of the result for the query. - * @member {String} - */ - this.pageState = result.pageState; - - /** - * Returns the first element of the result or null if the result is empty. - * @returns {Object} - */ - this.first = function first() { - const iterator = this.values(); - const item = iterator.next(); - if (item.done) { - return null; - } - - return item.value; - }; - - /** - * Executes a provided function once per result element. - * @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index. - * @param {Object} [thisArg] Value to use as this when executing callback. - */ - this.forEach = function forEach(callback, thisArg) { - if (!rows.length) { - return; - } - const iterator = this.values(); - let item = iterator.next(); - let index = 0; - while (!item.done) { - callback.call(thisArg || this, item.value, index++); - item = iterator.next(); - } - }; - - /** - * Results an Array of graph result elements (vertex, edge, scalar). - * @returns {Array} - */ - this.toArray = function toArray() { - if (!rows.length) { - return utils.emptyArray; - } - return utils.iteratorToArray(this.values()); - }; - - /** - * Returns a new Iterator object that contains the values for each index in the result. - * @returns {Iterator} - */ - this.values = function* values() { - for (const traverser of this.getTraversers()) { - const bulk = traverser.bulk || 1; - - for (let j = 0; j < bulk; j++) { - yield traverser.object; - } - } - }; - - /** - * Gets the traversers represented contained in the result set. - * @returns {Iterator} - */ - this.getTraversers = function* () { - for (const row of rows) { - yield rowParser(row); - } - }; -} - -if (typeof Symbol !== 'undefined' && typeof Symbol.iterator === 'symbol') { - // Make iterable - GraphResultSet.prototype[Symbol.iterator] = function getIterator() { - return this.values(); - }; -} - -/** - * @param {Row} row - * @private - */ -function parsePlainJsonRow(row) { - const parsed = JSON.parse(row['gremlin']); - return { object: parsed.result, bulk: parsed.bulk || 1 }; -} - -module.exports = GraphResultSet; \ No newline at end of file diff --git a/lib/datastax/graph/result-set.ts b/lib/datastax/graph/result-set.ts new file mode 100644 index 000000000..424236539 --- /dev/null +++ b/lib/datastax/graph/result-set.ts @@ -0,0 +1,155 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ResultSet } from "../../types"; +import utils from "../../utils"; + +/** + * Represents the result set of a [graph query execution]{@link Client#executeGraph} containing vertices, edges, or + * scalar values depending on the query. + *

+ * It allows iteration of the items using for..of statements under ES2015 and exposes + * forEach(), first(), and toArray() to access the underlying items. + *

+ * @example + * for (let vertex of result) { ... } + * @example + * const arr = result.toArray(); + * @example + * const vertex = result.first(); + * @alias module:datastax/graph~GraphResultSet + */ +class GraphResultSet implements Iterable { + //TODO: the following three were not exposed in .d.ts. I think we should. + info: typeof ResultSet.prototype.info; + length: number; + pageState: string; + private rows: any[]; + private rowParser: Function; + + /** + * @param {ResultSet} result The result set from the query execution. + * @param {Function} [rowParser] Optional row parser function. + * @constructor + */ + constructor(result: ResultSet, rowParser: Function = parsePlainJsonRow) { + this.info = result.info; + this.rows = result.rows; + this.rowParser = rowParser || parsePlainJsonRow; + + /** + * This property has been deprecated because it may return a lower value than the actual length of the results. + * Use toArray() instead. + *

Gets the length of the result.

+ * @deprecated Use toArray() instead. This property will be removed in the following major version. + */ + this.length = result.rowLength; + + /** + * A string token representing the current page state of the query. It can be used in the following executions to + * continue paging and retrieve the remainder of the result for the query. + */ + this.pageState = result.pageState; + } + + /** + * Returns the first element of the result or null if the result is empty. + * @returns {Object} + */ + first(): object | null { + const iterator = this.values(); + const item = iterator.next(); + if (item.done) { + return null; + } + + return item.value; + } + + /** + * Executes a provided function once per result element. + * @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index. + * @param {Object} [thisArg] Value to use as this when executing callback. + */ + forEach(callback: Function, thisArg?: object): void { + if (!this.rows.length) { + return; + } + const iterator = this.values(); + let item = iterator.next(); + let index = 0; + while (!item.done) { + callback.call(thisArg || this, item.value, index++); + item = iterator.next(); + } + } + + /** + * Returns an Array of graph result elements (vertex, edge, scalar). + * @returns {Array} + */ + toArray(): Array { + if (!this.rows.length) { + return utils.emptyArray as any[]; + } + return utils.iteratorToArray(this.values()); + } + + /** + * Returns a new Iterator object that contains the values for each index in the result. + * @returns {Iterator} + */ + *values(): Iterator { + for (const traverser of this.getTraversers()) { + const bulk = traverser.bulk || 1; + + for (let j = 0; j < bulk; j++) { + yield traverser.object; + } + } + } + + //TODO: this was not exposed in .d.ts. I think we should. + /** + * Gets the traversers contained in the result set. + * @returns {IterableIterator} + */ + *getTraversers(): IterableIterator { + for (const row of this.rows) { + yield this.rowParser(row); + } + } + + /** + * Makes the result set iterable using `for..of`. + * @returns {Iterator} + */ + [Symbol.iterator]() { + return this.values(); + } +} + +/** + * Parses a row into a traverser object. + * @param {Row} row The row to parse. + * @returns {Object} The parsed traverser object. + * @private + */ +function parsePlainJsonRow(row: any): { object: any; bulk: number } { + const parsed = JSON.parse(row['gremlin']); + return { object: parsed.result, bulk: parsed.bulk || 1 }; +} + +export default GraphResultSet; \ No newline at end of file diff --git a/lib/datastax/graph/structure.js b/lib/datastax/graph/structure.js deleted file mode 100644 index deef3e134..000000000 --- a/lib/datastax/graph/structure.js +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -const util = require('util'); - -/** - * @classdesc - * Represents a graph Element. - * @param id - * @param label - * @abstract - * @memberOf module:datastax/graph - * @constructor - */ -function Element(id, label) { - /** - * Gets the element id. - */ - this.id = id; - /** - * Gets the element label. - * @type {String} - */ - this.label = label; -} - -/** - * @classdesc - * Represents a graph Vertex. - * @param id - * @param {String} label - * @param {Object} properties - * @extends {Element} - * @memberOf module:datastax/graph - * @constructor - */ -function Vertex(id, label, properties) { - Element.call(this, id, label); - /** - * Gets the vertex properties. - * @type {Object} - */ - this.properties = properties; -} - -util.inherits(Vertex, Element); - -/** - * @classdesc - * Represents a graph Edge. - * @param id - * @param outV - * @param {outVLabel} outVLabel - * @param {String} label - * @param inV - * @param {String} inVLabel - * @param {Object} properties - * @extends {Element} - * @memberOf module:datastax/graph - * @constructor - */ -function Edge(id, outV, outVLabel, label, inV, inVLabel, properties) { - Element.call(this, id, label); - /** - * Gets the id of outgoing vertex of the edge. - */ - this.outV = outV; - /** - * Gets the label of the outgoing vertex. - */ - this.outVLabel = outVLabel; - /** - * Gets the id of the incoming vertex of the edge. - */ - this.inV = inV; - - /** - * Gets the label of the incoming vertex. - */ - this.inVLabel = inVLabel; - /** - * Gets the properties of the edge as an associative array. - * @type {Object} - */ - this.properties = {}; - (function adaptProperties(self) { - if (properties) { - const keys = Object.keys(properties); - for (let i = 0; i < keys.length; i++) { - const k = keys[i]; - self.properties[k] = properties[k].value; - } - } - })(this); -} - -util.inherits(Edge, Element); - -/** - * @classdesc - * Represents a graph vertex property. - * @param id - * @param {String} label - * @param value - * @param {Object} properties - * @extends {Element} - * @memberOf module:datastax/graph - * @constructor - */ -function VertexProperty(id, label, value, properties) { - Element.call(this, id, label); - this.value = value; - this.key = this.label; - this.properties = properties; -} - -util.inherits(VertexProperty, Element); - -/** - * @classdesc - * Represents a property. - * @param key - * @param value - * @memberOf module:datastax/graph - * @constructor - */ -function Property(key, value) { - this.key = key; - this.value = value; -} - -/** - * @classdesc - * Represents a walk through a graph as defined by a traversal. - * @param {Array} labels - * @param {Array} objects - * @memberOf module:datastax/graph - * @constructor - */ -function Path(labels, objects) { - this.labels = labels; - this.objects = objects; -} - -module.exports = { - Edge, - Element, - Path, - Property, - Vertex, - VertexProperty -}; \ No newline at end of file diff --git a/lib/datastax/graph/structure.ts b/lib/datastax/graph/structure.ts new file mode 100644 index 000000000..fd5377f60 --- /dev/null +++ b/lib/datastax/graph/structure.ts @@ -0,0 +1,199 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Represents a graph Element. + * @abstract + * @memberOf module:datastax/graph + */ +abstract class Element { + id: any; + label: string; + + /** + * @param id + * @param label + */ + constructor(id: any, label: string) { + /** + * Gets the element id. + */ + this.id = id; + /** + * Gets the element label. + * @type {String} + */ + this.label = label; + } +} + +/** + * Represents a graph Vertex. + * @extends Element + * @memberOf module:datastax/graph + */ +class Vertex extends Element { + properties: { [key: string]: any[] }; + + /** + * @param id + * @param {String} label + * @param {{ [key: string]: any[] }} properties + */ + constructor(id: any, label: string, properties?: { [key: string]: any[] }) { + super(id, label); + /** + * Gets the vertex properties. + * @type {Object} + */ + this.properties = properties; + } +} + +/** + * Represents a graph Edge. + * @extends Element + * @memberOf module:datastax/graph + */ +class Edge extends Element { + outV: Vertex; + outVLabel: string; + inV: Vertex; + inVLabel: string; + properties: { [s: string]: any }; //TODO: it was object. But I think it should be { [s: string]: any } + + /** + * @param id + * @param outV + * @param {String} outVLabel + * @param {String} label + * @param inV + * @param {String} inVLabel + * @param {Object} properties + */ + constructor( + id: any, + outV: Vertex, + outVLabel: string, + label: string, + inV: Vertex, + inVLabel?: string, + properties?: { [s: string]: Property } + ) { + super(id, label); + /** + * Gets the id of outgoing vertex of the edge. + */ + this.outV = outV; + /** + * Gets the label of the outgoing vertex. + */ + this.outVLabel = outVLabel; + /** + * Gets the id of the incoming vertex of the edge. + */ + this.inV = inV; + + /** + * Gets the label of the incoming vertex. + */ + this.inVLabel = inVLabel; + /** + * Gets the properties of the edge as an associative array. + * @type {Object} + */ + this.properties = {}; + this.adaptProperties(properties); + } + + private adaptProperties(properties: { [s: string]: Property }): void { + if (properties) { + const keys = Object.keys(properties); + for (const key of keys) { + this.properties[key] = properties[key].value; + } + } + } +} + +/** + * Represents a graph vertex property. + * @extends Element + * @memberOf module:datastax/graph + */ +class VertexProperty extends Element { + value: any; + key: string; + properties: any; + + /** + * @param id + * @param {String} label + * @param value + * @param {Object} properties + */ + constructor(id: any, label: string, value: any, properties: object) { + super(id, label); + this.value = value; + this.key = this.label; + this.properties = properties; + } +} + +/** + * Represents a property. + * @memberOf module:datastax/graph + */ +class Property { + key: string; + value: any; + + /** + * @param key + * @param value + */ + constructor(key: string, value: any) { + this.key = key; + this.value = value; + } +} + +/** + * Represents a walk through a graph as defined by a traversal. + * @memberOf module:datastax/graph + */ +class Path { + labels: any[]; + objects: any[]; + + /** + * @param {any[]} labels + * @param {any[]} objects + */ + constructor(labels: any[], objects: any[]) { + this.labels = labels; + this.objects = objects; + } +} + +export { + Edge, + Element, + Path, + Property, + Vertex, + VertexProperty +}; diff --git a/lib/datastax/graph/type-serializers.js b/lib/datastax/graph/type-serializers.ts similarity index 82% rename from lib/datastax/graph/type-serializers.js rename to lib/datastax/graph/type-serializers.ts index 880c0f336..05a33eb14 100644 --- a/lib/datastax/graph/type-serializers.js +++ b/lib/datastax/graph/type-serializers.ts @@ -1,31 +1,30 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Copyright DataStax, Inc. * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import g, { Edge, Vertex } from "./index"; /** * @module datastax/graph/tinkerpop/typeSerializers - * @ignore + * @ignore @internal */ /** * @author Jorge Bay Gondra */ -'use strict'; + // Replace dependencies to minimize code changes from Apache TinkerPop const t = { @@ -34,9 +33,10 @@ const t = { }; const ts = { TraversalStrategy: UnsupportedType }; const Bytecode = UnsupportedType; -const g = require('./index'); const utils = { Long: UnsupportedType }; +// @ts-ignore t.t = g.t; +// @ts-ignore t.direction = g.direction; function UnsupportedType() { } @@ -47,16 +47,18 @@ const typeKey = '@type'; /** * @abstract */ -class TypeSerializer { - serialize() { +export class TypeSerializer { + reader: any; + writer: any; + serialize(item) { throw new Error('serialize() method not implemented for ' + this.constructor.name); } - deserialize() { + deserialize(obj) { throw new Error('deserialize() method not implemented for ' + this.constructor.name); } - canBeUsedFor() { + canBeUsedFor(value) { throw new Error('canBeUsedFor() method not implemented for ' + this.constructor.name); } } @@ -78,22 +80,22 @@ class NumberSerializer extends TypeSerializer { [typeKey]: 'g:Double', [valueKey]: '-Infinity' }; - } else { - return item; - } + } + return item; + } deserialize(obj) { - var val = obj[valueKey]; + const val = obj[valueKey]; if (val === 'NaN') { return NaN; } else if (val === 'Infinity') { return Number.POSITIVE_INFINITY; } else if (val === '-Infinity') { return Number.NEGATIVE_INFINITY; - } else { - return parseFloat(val); - } + } + return parseFloat(val); + } canBeUsedFor(value) { @@ -135,6 +137,7 @@ class BytecodeSerializer extends TypeSerializer { serialize(item) { let bytecode = item; if (item instanceof t.Traversal) { + // @ts-ignore bytecode = item.getBytecode(); } const result = {}; @@ -214,7 +217,7 @@ class TextPSerializer extends TypeSerializer { class LambdaSerializer extends TypeSerializer { /** @param {Function} item */ - serialize(item) { + serialize(item: Function) { return { [typeKey]: 'g:Lambda', [valueKey]: { @@ -258,6 +261,7 @@ class TraverserSerializer extends TypeSerializer { deserialize(obj) { const value = obj[valueKey]; + // @ts-ignore return new t.Traverser(this.reader.read(value['value']), this.reader.read(value['bulk'])); } @@ -287,7 +291,7 @@ class VertexSerializer extends TypeSerializer { } /** @param {Vertex} item */ - serialize(item) { + serialize(item: Vertex) { return { [typeKey]: 'g:Vertex', [valueKey]: { @@ -326,17 +330,20 @@ class PropertySerializer extends TypeSerializer { class EdgeSerializer extends TypeSerializer { deserialize(obj) { const value = obj[valueKey]; + //TODO: Edge constructor needs 7 arguments. I couldn't see how it would work in the past. return new g.Edge( this.reader.read(value['id']), new g.Vertex(this.reader.read(value['outV']), this.reader.read(value['outVLabel'])), + this.reader.read(value['outVLabel']), value['label'], new g.Vertex(this.reader.read(value['inV']), this.reader.read(value['inVLabel'])), + this.reader.read(value['inVLabel']), this.reader.read(value['properties']) ); } /** @param {Edge} item */ - serialize(item) { + serialize(item: Edge) { return { [typeKey]: 'g:Edge', [valueKey]: { @@ -372,17 +379,19 @@ class Path3Serializer extends TypeSerializer { class TSerializer extends TypeSerializer { deserialize(obj) { + // @ts-ignore return t.t[obj[valueKey]]; } } class DirectionSerializer extends TypeSerializer { - deserialize(obj) { - return t.direction[obj[valueKey].toLowerCase()]; - } + deserialize(obj) { + return t["direction"][obj[valueKey].toLowerCase()]; + } } class ArraySerializer extends TypeSerializer { + typeKey: any; constructor(typeKey) { super(); this.typeKey = typeKey; @@ -397,7 +406,7 @@ class ArraySerializer extends TypeSerializer { } /** @param {Array} item */ - serialize(item) { + serialize(item: Array) { return { [typeKey]: this.typeKey, [valueKey]: item.map(x => this.writer.adaptObject(x)) @@ -411,22 +420,22 @@ class ArraySerializer extends TypeSerializer { class BulkSetSerializer extends TypeSerializer { deserialize(obj) { - const value = obj[valueKey]; - if (!Array.isArray(value)) { - throw new Error('Expected Array, obtained: ' + value); - } + const value = obj[valueKey]; + if (!Array.isArray(value)) { + throw new Error('Expected Array, obtained: ' + value); + } - // coerce the BulkSet to List. if the bulk exceeds the int space then we can't coerce to List anyway, - // so this query will be trouble. we'd need a legit BulkSet implementation here in js. this current - // implementation is here to replicate the previous functionality that existed on the server side in - // previous versions. - let result = []; - for (let ix = 0, iy = value.length; ix < iy; ix += 2) { - const pair = value.slice(ix, ix + 2); - result = result.concat(Array(this.reader.read(pair[1])).fill(this.reader.read(pair[0]))); - } + // coerce the BulkSet to List. if the bulk exceeds the int space then we can't coerce to List anyway, + // so this query will be trouble. we'd need a legit BulkSet implementation here in js. this current + // implementation is here to replicate the previous functionality that existed on the server side in + // previous versions. + let result = []; + for (let ix = 0, iy = value.length; ix < iy; ix += 2) { + const pair = value.slice(ix, ix + 2); + result = result.concat(Array(this.reader.read(pair[1])).fill(this.reader.read(pair[0]))); + } - return result; + return result; } } @@ -444,7 +453,7 @@ class MapSerializer extends TypeSerializer { } /** @param {Map} map */ - serialize(map) { + serialize(map: Map) { const arr = []; map.forEach((v, k) => { arr.push(this.writer.adaptObject(k)); @@ -473,7 +482,7 @@ class SetSerializer extends ArraySerializer { } } -module.exports = { +export default { BulkSetSerializer, BytecodeSerializer, DateSerializer, @@ -498,4 +507,4 @@ module.exports = { valueKey, VertexPropertySerializer, VertexSerializer -}; +}; \ No newline at end of file diff --git a/lib/datastax/graph/wrappers.js b/lib/datastax/graph/wrappers.ts similarity index 69% rename from lib/datastax/graph/wrappers.js rename to lib/datastax/graph/wrappers.ts index 0bbf8c086..99c9a7945 100644 --- a/lib/datastax/graph/wrappers.js +++ b/lib/datastax/graph/wrappers.ts @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import types from "../../types/index"; -'use strict'; +//TODO: Why not expose GraphTypeWrapper? It is the return value of exposed API anyway -const types = require('../../types'); const { dataTypes } = types; /** @@ -25,6 +25,8 @@ const { dataTypes } = types; * @ignore */ class GraphTypeWrapper { + value: any; + typeInfo: any; constructor(value, typeInfo) { this.value = value; this.typeInfo = typeof typeInfo === 'number' ? { code: typeInfo } : typeInfo; @@ -38,6 +40,8 @@ class GraphTypeWrapper { * @ignore */ class UdtGraphWrapper { + value: any; + udtInfo: any; constructor(value, udtInfo) { this.value = value; @@ -53,32 +57,35 @@ class UdtGraphWrapper { * Wraps a number or null value to hint the client driver that the data type of the value is an int * @memberOf module:datastax/graph */ -function asInt(value) { return new GraphTypeWrapper(value, dataTypes.int); } +function asInt(value: number):object { return new GraphTypeWrapper(value, dataTypes.int); } /** * Wraps a number or null value to hint the client driver that the data type of the value is a double * @memberOf module:datastax/graph */ -function asDouble(value) { return new GraphTypeWrapper(value, dataTypes.double); } +function asDouble(value: number):object { return new GraphTypeWrapper(value, dataTypes.double); } /** * Wraps a number or null value to hint the client driver that the data type of the value is a double * @memberOf module:datastax/graph */ -function asFloat(value) { return new GraphTypeWrapper(value, dataTypes.float); } +function asFloat(value: number):object { return new GraphTypeWrapper(value, dataTypes.float); } /** * Wraps a Date or null value to hint the client driver that the data type of the value is a timestamp * @memberOf module:datastax/graph */ -function asTimestamp(value) { return new GraphTypeWrapper(value, dataTypes.timestamp); } +function asTimestamp(value: Date):object { return new GraphTypeWrapper(value, dataTypes.timestamp); } +//TODO: it was exposed as +// function asUdt(value: object): object; +// That was clearly wrong because udtInfo argument is mandatory /** * Wraps an Object or null value to hint the client driver that the data type of the value is a user-defined type. * @memberOf module:datastax/graph * @param {object} value The object representing the UDT. * @param {{name: string, keyspace: string, fields: Array}} udtInfo The UDT metadata as defined by the driver. */ -function asUdt(value, udtInfo) { return new UdtGraphWrapper(value, udtInfo); } +function asUdt(value: object, udtInfo: { name: string; keyspace: string; fields: Array; }):object { return new UdtGraphWrapper(value, udtInfo); } -module.exports = { asInt, asDouble, asFloat, asTimestamp, asUdt, UdtGraphWrapper, GraphTypeWrapper }; \ No newline at end of file +export { asDouble, asFloat, asInt, asTimestamp, asUdt, GraphTypeWrapper, UdtGraphWrapper }; diff --git a/lib/datastax/index.js b/lib/datastax/index.js deleted file mode 100644 index b193461a0..000000000 --- a/lib/datastax/index.js +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -/** - * DataStax module. - *

- * Contains modules and classes to represent functionality that is specific to DataStax products. - *

- * @module datastax - */ - -exports.graph = require('./graph'); -exports.search = require('./search'); \ No newline at end of file diff --git a/lib/datastax/index.ts b/lib/datastax/index.ts new file mode 100644 index 000000000..43267b7cd --- /dev/null +++ b/lib/datastax/index.ts @@ -0,0 +1,70 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +/** + * DataStax module. + *

+ * Contains modules and classes to represent functionality that is specific to DataStax products. + *

+ * @module datastax + */ + +import graph, { + asDouble, asFloat, asInt, asTimestamp, asUdt, direction, Edge, Element, getCustomTypeSerializers, GraphResultSet, GraphTypeWrapper, Path, Property, + t, UdtGraphWrapper, Vertex, VertexProperty +} from "./graph/index"; +import search, { DateRange, DateRangeBound, dateRangePrecision } from "./search/index"; + +export default { + // Had to do this for api-extractor to remove the internal exports + graph: { + Edge, + Element, + Path, + Property, + Vertex, + VertexProperty, + asInt, + asDouble, + asFloat, + asTimestamp, + asUdt, + direction, + /** @internal */ + getCustomTypeSerializers, + GraphResultSet, + /** @internal */ + GraphTypeWrapper, + t, + /** @internal */ + UdtGraphWrapper + }, + search: { + DateRange, + DateRangeBound, + dateRangePrecision + } +}; + +export { + search, graph +}; + +export * from "./graph"; +export * from "./search"; +export * from "./cloud"; \ No newline at end of file diff --git a/lib/datastax/search/date-range.js b/lib/datastax/search/date-range.js deleted file mode 100644 index 877c9a392..000000000 --- a/lib/datastax/search/date-range.js +++ /dev/null @@ -1,537 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const utils = require('../../utils'); -const Long = require('long'); - -/** - * Regex to parse dates in the following format YYYY-MM-DDThh:mm:ss.mssZ - * Looks cumbersome but it's straightforward: - * - "(\d{1,6})": year mandatory 1 to 6 digits - * - (?:-(\d{1,2}))?(?:-(\d{1,2}))? two non-capturing groups representing the month and day (1 to 2 digits captured). - * - (?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2}))?)?Z? A non-capturing group for the time portion - * @private - */ -const dateRegex = - /^[-+]?(\d{1,6})(?:-(\d{1,2}))?(?:-(\d{1,2}))?(?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2})(?:\.(\d{1,3}))?)?)?Z?$/; -const multipleBoundariesRegex = /^\[(.+?) TO (.+)]$/; -const unbounded = Object.freeze(new DateRangeBound(null, -1)); - -const dateRangeType = { - // single value as in "2001-01-01" - singleValue: 0, - // closed range as in "[2001-01-01 TO 2001-01-31]" - closedRange: 1, - // open range high as in "[2001-01-01 TO *]" - openRangeHigh: 2, - // - 0x03 - open range low as in "[* TO 2001-01-01]" - openRangeLow: 3, - // - 0x04 - both ranges open as in "[* TO *]" - openBoth: 4, - // - 0x05 - single open range as in "[*]" - openSingle: 5 -}; - -/** - * Defines the possible values of date range precision. - * @type {Object} - * @property {Number} year - * @property {Number} month - * @property {Number} day - * @property {Number} hour - * @property {Number} minute - * @property {Number} second - * @property {Number} millisecond - * @memberof module:search - */ -const dateRangePrecision = { - year: 0, - month: 1, - day: 2, - hour: 3, - minute: 4, - second: 5, - millisecond: 6 -}; - -/** - * Creates a new instance of DateRange using a lower bound and an upper bound. - *

Consider using DateRange.fromString() to create instances more easily.

- * @classdesc - * Represents a range of dates, corresponding to the Apache Solr type - * DateRangeField. - *

- * A date range can have one or two bounds, namely lower bound and upper bound, to represent an interval of time. - * Date range bounds are both inclusive. For example: - *

- *
    - *
  • 2015 TO 2016-10 represents from the first day of 2015 to the last day of October 2016
  • - *
  • 2015 represents during the course of the year 2015.
  • - *
  • 2017 TO * represents any date greater or equals to the first day of the year 2017.
  • - *
- *

- * Note that this JavaScript representation of DateRangeField does not support Dates outside of the range - * supported by ECMAScript Date: –100,000,000 days to 100,000,000 days measured relative to midnight at the - * beginning of 01 January, 1970 UTC. Being -271821-04-20T00:00:00.000Z the minimum lower boundary - * and 275760-09-13T00:00:00.000Z the maximum higher boundary. - *

- * @param {DateRangeBound} lowerBound A value representing the range lower bound, composed by a - * Date and a precision. Use DateRangeBound.unbounded for an open lower bound. - * @param {DateRangeBound} [upperBound] A value representing the range upper bound, composed by a - * Date and a precision. Use DateRangeBound.unbounded for an open upper bound. When it's not - * defined, the DateRange instance is considered as a single value range. - * @constructor - * @memberOf module:datastax/search - */ -function DateRange(lowerBound, upperBound) { - if (!lowerBound) { - throw new TypeError('The lower boundaries must be defined'); - } - /** - * Gets the lower bound of this range (inclusive). - * @type {DateRangeBound} - */ - this.lowerBound = lowerBound; - /** - * Gets the upper bound of this range (inclusive). - * @type {DateRangeBound|null} - */ - this.upperBound = upperBound || null; - - // Define the type - if (this.upperBound === null) { - if (this.lowerBound !== unbounded) { - this._type = dateRangeType.singleValue; - } - else { - this._type = dateRangeType.openSingle; - } - } - else { - if (this.lowerBound !== unbounded) { - this._type = this.upperBound !== unbounded ? dateRangeType.closedRange : dateRangeType.openRangeHigh; - } - else { - this._type = this.upperBound !== unbounded ? dateRangeType.openRangeLow : dateRangeType.openBoth; - } - } -} - -/** - * Returns true if the value of this DateRange instance and other are the same. - * @param {DateRange} other - * @returns {Boolean} - */ -DateRange.prototype.equals = function (other) { - if (!(other instanceof DateRange)) { - return false; - } - return (other.lowerBound.equals(this.lowerBound) && - (other.upperBound ? other.upperBound.equals(this.upperBound) : !this.upperBound)); -}; - -/** - * Returns the string representation of the instance. - * @return {String} - */ -DateRange.prototype.toString = function () { - if (this.upperBound === null) { - return this.lowerBound.toString(); - } - return '[' + this.lowerBound.toString() + ' TO ' + this.upperBound.toString() + ']'; -}; - -DateRange.prototype.toBuffer = function () { - // Serializes the value containing: - // [] - if (this._type === dateRangeType.openBoth || this._type === dateRangeType.openSingle) { - return utils.allocBufferFromArray([ this._type ]); - } - let buffer; - let offset = 0; - if (this._type !== dateRangeType.closedRange) { - // byte + long + byte - const boundary = this._type !== dateRangeType.openRangeLow ? this.lowerBound : this.upperBound; - buffer = utils.allocBufferUnsafe(10); - buffer.writeUInt8(this._type, offset++); - offset = writeDate(boundary.date, buffer, offset); - buffer.writeUInt8(boundary.precision, offset); - return buffer; - } - // byte + long + byte + long + byte - buffer = utils.allocBufferUnsafe(19); - buffer.writeUInt8(this._type, offset++); - offset = writeDate(this.lowerBound.date, buffer, offset); - buffer.writeUInt8(this.lowerBound.precision, offset++); - offset = writeDate(this.upperBound.date, buffer, offset); - buffer.writeUInt8(this.upperBound.precision, offset); - return buffer; -}; - -/** - * Returns the DateRange representation of a given string. - *

String representations of dates are always expressed in Coordinated Universal Time (UTC)

- * @param {String} dateRangeString - */ -DateRange.fromString = function (dateRangeString) { - const matches = multipleBoundariesRegex.exec(dateRangeString); - if (!matches) { - return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(dateRangeString))); - } - return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(matches[1])), DateRangeBound.toUpperBound(DateRangeBound.fromString(matches[2]))); -}; - -/** - * Deserializes the buffer into a DateRange - * @param {Buffer} buffer - * @return {DateRange} - */ -DateRange.fromBuffer = function (buffer) { - if (buffer.length === 0) { - throw new TypeError('DateRange serialized value must have at least 1 byte'); - } - const type = buffer.readUInt8(0); - if (type === dateRangeType.openBoth) { - return new DateRange(unbounded, unbounded); - } - if (type === dateRangeType.openSingle) { - return new DateRange(unbounded); - } - let offset = 1; - let date1; - let lowerBound; - let upperBound = null; - if (type !== dateRangeType.closedRange) { - date1 = readDate(buffer, offset); - offset += 8; - lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset)); - if (type === dateRangeType.openRangeLow) { - // lower boundary is open, the first serialized boundary is the upperBound - upperBound = lowerBound; - lowerBound = unbounded; - } - else { - upperBound = type === dateRangeType.openRangeHigh ? unbounded : null; - } - return new DateRange(lowerBound, upperBound); - } - date1 = readDate(buffer, offset); - offset += 8; - lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset++)); - const date2 = readDate(buffer, offset); - offset += 8; - upperBound = new DateRangeBound(date2, buffer.readUInt8(offset)); - return new DateRange(lowerBound, upperBound); -}; - -/** - * Writes a Date, long millis since epoch, to a buffer starting from offset. - * @param {Date} date - * @param {Buffer} buffer - * @param {Number} offset - * @return {Number} The new offset. - * @private - */ -function writeDate(date, buffer, offset) { - const long = Long.fromNumber(date.getTime()); - buffer.writeUInt32BE(long.getHighBitsUnsigned(), offset); - buffer.writeUInt32BE(long.getLowBitsUnsigned(), offset + 4); - return offset + 8; -} - -/** - * Reads a Date, long millis since epoch, from a buffer starting from offset. - * @param {Buffer} buffer - * @param {Number} offset - * @return {Date} - * @private - */ -function readDate(buffer, offset) { - const long = new Long(buffer.readInt32BE(offset+4), buffer.readInt32BE(offset)); - return new Date(long.toNumber()); -} - -/** - * @classdesc - * Represents a date range boundary, composed by a Date and a precision. - * @param {Date} date The timestamp portion, representing a single moment in time. Consider using - * Date.UTC() method to build the Date instance. - * @param {Number} precision The precision portion. Valid values for DateRangeBound precision are - * defined in the [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. - * @constructor - * @memberOf module:datastax/search - */ -function DateRangeBound(date, precision) { - /** - * The timestamp portion of the boundary. - * @type {Date} - */ - this.date = date; - /** - * The precision portion of the boundary. Valid values are defined in the - * [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. - * @type {Number} - */ - this.precision = precision; -} - -/** - * Returns the string representation of the instance. - * @return {String} - */ -DateRangeBound.prototype.toString = function () { - if (this.precision === -1) { - return '*'; - } - let precision = 0; - const isoString = this.date.toISOString(); - let i; - let char; - // The years take at least the first 4 characters - for (i = 4; i < isoString.length && precision <= this.precision; i++) { - char = isoString.charAt(i); - if (precision === dateRangePrecision.day && char === 'T') { - precision = dateRangePrecision.hour; - continue; - } - if (precision >= dateRangePrecision.hour && char === ':' || char === '.') { - precision++; - continue; - } - if (precision < dateRangePrecision.day && char === '-') { - precision++; - } - } - let start = 0; - const firstChar = isoString.charAt(0); - let sign = ''; - let toRemoveIndex = 4; - if (firstChar === '+' || firstChar === '-') { - sign = firstChar; - if (firstChar === '-') { - // since we are retaining the -, don't remove as many zeros. - toRemoveIndex = 3; - } - // Remove additional zeros - for (start = 1; start < toRemoveIndex; start++) { - if (isoString.charAt(start) !== '0') { - break; - } - } - } - if (this.precision !== dateRangePrecision.millisecond) { - // i holds the position of the first char that marks the end of a precision (ie: '-', 'T', ...), - // we should not include it in the result, except its the 'Z' char for the complete representation - i--; - } - return sign + isoString.substring(start, i); -}; - -/** - * Returns true if the value of this DateRange instance and other are the same. - * @param {DateRangeBound} other - * @return {boolean} - */ -DateRangeBound.prototype.equals = function (other) { - if (!(other instanceof DateRangeBound)) { - return false; - } - if (other.precision !== this.precision) { - return false; - } - return datesEqual(other.date, this.date); -}; - -function datesEqual(d1, d2) { - const t1 = d1 ? d1.getTime() : null; - const t2 = d2 ? d2.getTime() : null; - return t1 === t2; -} - -DateRangeBound.prototype.isUnbounded = function () { - return (this.precision === -1); -}; - -/** - * Parses a date string and returns a DateRangeBound. - * @param {String} boundaryString - * @return {DateRangeBound} - */ -DateRangeBound.fromString = function(boundaryString) { - if (!boundaryString) { - return null; - } - if (boundaryString === '*') { - return unbounded; - } - const matches = dateRegex.exec(boundaryString); - if (!matches) { - throw TypeError('String provided is not a valid date ' + boundaryString); - } - if (matches[7] !== undefined && matches[5] === undefined) { - // Due to a limitation in the regex, its possible to match dates like 2015T03:02.001, without the seconds - // portion but with the milliseconds specified. - throw new TypeError('String representation of the date contains the milliseconds portion but not the seconds: ' + - boundaryString); - } - const builder = new BoundaryBuilder(boundaryString.charAt(0) === '-'); - for (let i = 1; i < matches.length; i++) { - builder.set(i-1, matches[i], boundaryString); - } - return builder.build(); -}; - -/** - * The unbounded {@link DateRangeBound} instance. Unbounded bounds are syntactically represented by a * - * (star) sign. - * @type {DateRangeBound} - */ -DateRangeBound.unbounded = unbounded; - -/** - * Converts a {DateRangeBound} into a lower-bounded bound by rounding down its date - * based on its precision. - * - * @param {DateRangeBound} bound The bound to round down. - * @returns {DateRangeBound} with the date rounded down to the given precision. - */ -DateRangeBound.toLowerBound = function (bound) { - if(bound === unbounded) { - return bound; - } - const rounded = new Date(bound.date.getTime()); - // in this case we want to fallthrough - /* eslint-disable no-fallthrough */ - switch (bound.precision) { - case dateRangePrecision.year: - rounded.setUTCMonth(0); - case dateRangePrecision.month: - rounded.setUTCDate(1); - case dateRangePrecision.day: - rounded.setUTCHours(0); - case dateRangePrecision.hour: - rounded.setUTCMinutes(0); - case dateRangePrecision.minute: - rounded.setUTCSeconds(0); - case dateRangePrecision.second: - rounded.setUTCMilliseconds(0); - } - /* eslint-enable no-fallthrough */ - return new DateRangeBound(rounded, bound.precision); -}; - -/** - * Converts a {DateRangeBound} into a upper-bounded bound by rounding up its date - * based on its precision. - * - * @param {DateRangeBound} bound The bound to round up. - * @returns {DateRangeBound} with the date rounded up to the given precision. - */ -DateRangeBound.toUpperBound = function (bound) { - if (bound === unbounded) { - return bound; - } - const rounded = new Date(bound.date.getTime()); - // in this case we want to fallthrough - /* eslint-disable no-fallthrough */ - switch (bound.precision) { - case dateRangePrecision.year: - rounded.setUTCMonth(11); - case dateRangePrecision.month: - // Advance to the beginning of next month and set day of month to 0 - // which sets the date to the last day of the previous month. - // This gives us the effect of YYYY-MM-LastDayOfThatMonth - rounded.setUTCMonth(rounded.getUTCMonth() + 1, 0); - case dateRangePrecision.day: - rounded.setUTCHours(23); - case dateRangePrecision.hour: - rounded.setUTCMinutes(59); - case dateRangePrecision.minute: - rounded.setUTCSeconds(59); - case dateRangePrecision.second: - rounded.setUTCMilliseconds(999); - } - /* eslint-enable no-fallthrough */ - return new DateRangeBound(rounded, bound.precision); -}; - -/** @private */ -function BoundaryBuilder(isNegative) { - this._sign = isNegative ? -1 : 1; - this._index = 0; - this._values = new Int32Array(7); -} - -BoundaryBuilder.prototype.set = function (index, value, stringDate) { - if (value === undefined) { - return; - } - if (index > 6) { - throw new TypeError('Index out of bounds: ' + index); - } - if (index > this._index) { - this._index = index; - } - const numValue = +value; - switch (index) { - case dateRangePrecision.month: - if (numValue < 1 || numValue > 12) { - throw new TypeError('Month portion is not valid for date: ' + stringDate); - } - break; - case dateRangePrecision.day: - if (numValue < 1 || numValue > 31) { - throw new TypeError('Day portion is not valid for date: ' + stringDate); - } - break; - case dateRangePrecision.hour: - if (numValue > 23) { - throw new TypeError('Hour portion is not valid for date: ' + stringDate); - } - break; - case dateRangePrecision.minute: - case dateRangePrecision.second: - if (numValue > 59) { - throw new TypeError('Minute/second portion is not valid for date: ' + stringDate); - } - break; - case dateRangePrecision.millisecond: - if (numValue > 999) { - throw new TypeError('Millisecond portion is not valid for date: ' + stringDate); - } - break; - } - this._values[index] = numValue; -}; - -/** @return {DateRangeBound} */ -BoundaryBuilder.prototype.build = function () { - const date = new Date(0); - let month = this._values[1]; - if (month) { - // ES Date months are represented from 0 to 11 - month--; - } - date.setUTCFullYear(this._sign * this._values[0], month, this._values[2] || 1); - date.setUTCHours(this._values[3], this._values[4], this._values[5], this._values[6]); - return new DateRangeBound(date, this._index); -}; - -exports.unbounded = unbounded; -exports.dateRangePrecision = dateRangePrecision; -exports.DateRange = DateRange; -exports.DateRangeBound = DateRangeBound; \ No newline at end of file diff --git a/lib/datastax/search/date-range.ts b/lib/datastax/search/date-range.ts new file mode 100644 index 000000000..cbdda977b --- /dev/null +++ b/lib/datastax/search/date-range.ts @@ -0,0 +1,555 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import Long from "long"; +import utils from "../../utils"; + +/** + * Regex to parse dates in the following format YYYY-MM-DDThh:mm:ss.mssZ + * Looks cumbersome but it's straightforward: + * - "(\d{1,6})": year mandatory 1 to 6 digits + * - (?:-(\d{1,2}))?(?:-(\d{1,2}))? two non-capturing groups representing the month and day (1 to 2 digits captured). + * - (?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2}))?)?Z? A non-capturing group for the time portion + * @private + */ +const dateRegex = + /^[-+]?(\d{1,6})(?:-(\d{1,2}))?(?:-(\d{1,2}))?(?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2})(?:\.(\d{1,3}))?)?)?Z?$/; +const multipleBoundariesRegex = /^\[(.+?) TO (.+)]$/; + +const dateRangeType = { + // single value as in "2001-01-01" + singleValue: 0, + // closed range as in "[2001-01-01 TO 2001-01-31]" + closedRange: 1, + // open range high as in "[2001-01-01 TO *]" + openRangeHigh: 2, + // - 0x03 - open range low as in "[* TO 2001-01-01]" + openRangeLow: 3, + // - 0x04 - both ranges open as in "[* TO *]" + openBoth: 4, + // - 0x05 - single open range as in "[*]" + openSingle: 5 +} as const; + +//TODO: dateRangePrecision was exposed in .d.ts, but not used anywhere in .d.ts. Do we want to expose it or not? +/** + * Defines the possible values of date range precision. + * @type {Object} + * @property {Number} year + * @property {Number} month + * @property {Number} day + * @property {Number} hour + * @property {Number} minute + * @property {Number} second + * @property {Number} millisecond + * @memberof module:search + */ +const dateRangePrecision = { + year: 0, + month: 1, + day: 2, + hour: 3, + minute: 4, + second: 5, + millisecond: 6 +} as const; + +/** + * @classdesc + * Represents a range of dates, corresponding to the Apache Solr type + * DateRangeField. + *

+ * A date range can have one or two bounds, namely lower bound and upper bound, to represent an interval of time. + * Date range bounds are both inclusive. For example: + *

+ *
    + *
  • 2015 TO 2016-10 represents from the first day of 2015 to the last day of October 2016
  • + *
  • 2015 represents during the course of the year 2015.
  • + *
  • 2017 TO * represents any date greater or equals to the first day of the year 2017.
  • + *
+ *

+ * Note that this JavaScript representation of DateRangeField does not support Dates outside of the range + * supported by ECMAScript Date: –100,000,000 days to 100,000,000 days measured relative to midnight at the + * beginning of 01 January, 1970 UTC. Being -271821-04-20T00:00:00.000Z the minimum lower boundary + * and 275760-09-13T00:00:00.000Z the maximum higher boundary. + *

+ * @memberOf module:datastax/search + */ +class DateRange { + lowerBound: DateRangeBound; + upperBound: DateRangeBound; + private _type: number; + constructor(lowerBound: DateRangeBound, upperBound?: DateRangeBound) { + if (!lowerBound) { + throw new TypeError('The lower boundaries must be defined'); + } + /** + * Gets the lower bound of this range (inclusive). + * @type {DateRangeBound} + */ + this.lowerBound = lowerBound; + /** + * Gets the upper bound of this range (inclusive). + * @type {DateRangeBound|null} + */ + this.upperBound = upperBound || null; + + // Define the type + if (this.upperBound === null) { + if (this.lowerBound !== unbounded) { + this._type = dateRangeType.singleValue; + } + else { + this._type = dateRangeType.openSingle; + } + } + else { + if (this.lowerBound !== unbounded) { + this._type = this.upperBound !== unbounded ? dateRangeType.closedRange : dateRangeType.openRangeHigh; + } + else { + this._type = this.upperBound !== unbounded ? dateRangeType.openRangeLow : dateRangeType.openBoth; + } + } + } + /** + * Returns the DateRange representation of a given string. + *

String representations of dates are always expressed in Coordinated Universal Time (UTC)

+ * @param {String} dateRangeString + */ + static fromString(dateRangeString: string): DateRange { + const matches = multipleBoundariesRegex.exec(dateRangeString); + if (!matches) { + return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(dateRangeString))); + } + return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(matches[1])), DateRangeBound.toUpperBound(DateRangeBound.fromString(matches[2]))); + } + /** + * Deserializes the buffer into a DateRange + * @param {Buffer} buffer + * @return {DateRange} + */ + static fromBuffer(buffer: Buffer): DateRange { + if (buffer.length === 0) { + throw new TypeError('DateRange serialized value must have at least 1 byte'); + } + const type = buffer.readUInt8(0); + if (type === dateRangeType.openBoth) { + return new DateRange(unbounded, unbounded); + } + if (type === dateRangeType.openSingle) { + return new DateRange(unbounded); + } + let offset = 1; + let date1; + let lowerBound; + let upperBound = null; + if (type !== dateRangeType.closedRange) { + date1 = readDate(buffer, offset); + offset += 8; + lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset)); + if (type === dateRangeType.openRangeLow) { + // lower boundary is open, the first serialized boundary is the upperBound + upperBound = lowerBound; + lowerBound = unbounded; + } + else { + upperBound = type === dateRangeType.openRangeHigh ? unbounded : null; + } + return new DateRange(lowerBound, upperBound); + } + date1 = readDate(buffer, offset); + offset += 8; + lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset++)); + const date2 = readDate(buffer, offset); + offset += 8; + upperBound = new DateRangeBound(date2, buffer.readUInt8(offset)); + return new DateRange(lowerBound, upperBound); + } + /** + * Returns true if the value of this DateRange instance and other are the same. + * @param {DateRange} other + * @returns {Boolean} + */ + equals(other: DateRange): boolean { + if (!(other instanceof DateRange)) { + return false; + } + return (other.lowerBound.equals(this.lowerBound) && + (other.upperBound ? other.upperBound.equals(this.upperBound) : !this.upperBound)); + } + /** + * Returns the string representation of the instance. + * @return {String} + */ + toString(): string { + if (this.upperBound === null) { + return this.lowerBound.toString(); + } + return '[' + this.lowerBound.toString() + ' TO ' + this.upperBound.toString() + ']'; + } + + /** + * @intenal + */ + toBuffer() { + // Serializes the value containing: + // [] + if (this._type === dateRangeType.openBoth || this._type === dateRangeType.openSingle) { + return utils.allocBufferFromArray([this._type]); + } + let buffer; + let offset = 0; + if (this._type !== dateRangeType.closedRange) { + // byte + long + byte + const boundary = this._type !== dateRangeType.openRangeLow ? this.lowerBound : this.upperBound; + buffer = utils.allocBufferUnsafe(10); + buffer.writeUInt8(this._type, offset++); + offset = writeDate(boundary.date, buffer, offset); + buffer.writeUInt8(boundary.precision, offset); + return buffer; + } + // byte + long + byte + long + byte + buffer = utils.allocBufferUnsafe(19); + buffer.writeUInt8(this._type, offset++); + offset = writeDate(this.lowerBound.date, buffer, offset); + buffer.writeUInt8(this.lowerBound.precision, offset++); + offset = writeDate(this.upperBound.date, buffer, offset); + buffer.writeUInt8(this.upperBound.precision, offset); + return buffer; + } +} + + + + + + +/** + * Writes a Date, long millis since epoch, to a buffer starting from offset. + * @param {Date} date + * @param {Buffer} buffer + * @param {Number} offset + * @return {Number} The new offset. + * @private + */ +function writeDate(date: Date, buffer: Buffer, offset: number): number { + const long = Long.fromNumber(date.getTime()); + buffer.writeUInt32BE(long.getHighBitsUnsigned(), offset); + buffer.writeUInt32BE(long.getLowBitsUnsigned(), offset + 4); + return offset + 8; +} + +/** + * Reads a Date, long millis since epoch, from a buffer starting from offset. + * @param {Buffer} buffer + * @param {Number} offset + * @return {Date} + * @private + */ +function readDate(buffer: Buffer, offset: number): Date { + const long = new Long(buffer.readInt32BE(offset+4), buffer.readInt32BE(offset)); + return new Date(long.toNumber()); +} + +/** + * @classdesc + * Represents a date range boundary, composed by a Date and a precision. + * @param {Date} date The timestamp portion, representing a single moment in time. Consider using + * Date.UTC() method to build the Date instance. + * @param {Number} precision The precision portion. Valid values for DateRangeBound precision are + * defined in the [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. + * @constructor + * @memberOf module:datastax/search + */ +class DateRangeBound { + date: Date; + precision: number; + /** @internal */ + static unbounded: Readonly; + /** + * Represents a date range boundary, composed by a Date and a precision. + * @param {Date} date The timestamp portion, representing a single moment in time. Consider using + * Date.UTC() method to build the Date instance. + * @param {Number} precision The precision portion. Valid values for DateRangeBound precision are + * defined in the [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. + * @constructor + */ + constructor(date: Date, precision: number) { + /** + * The timestamp portion of the boundary. + * @type {Date} + */ + this.date = date; + /** + * The precision portion of the boundary. Valid values are defined in the + * [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member. + * @type {Number} + */ + this.precision = precision; + } + /** + * Parses a date string and returns a DateRangeBound. + * @param {String} boundaryString + * @return {DateRangeBound} + */ + static fromString(boundaryString: string): DateRangeBound { + if (!boundaryString) { + return null; + } + if (boundaryString === '*') { + return unbounded; + } + const matches = dateRegex.exec(boundaryString); + if (!matches) { + throw TypeError('String provided is not a valid date ' + boundaryString); + } + if (matches[7] !== undefined && matches[5] === undefined) { + // Due to a limitation in the regex, its possible to match dates like 2015T03:02.001, without the seconds + // portion but with the milliseconds specified. + throw new TypeError('String representation of the date contains the milliseconds portion but not the seconds: ' + + boundaryString); + } + const builder = new BoundaryBuilder(boundaryString.charAt(0) === '-'); + for (let i = 1; i < matches.length; i++) { + builder.set(i - 1, matches[i], boundaryString); + } + return builder.build(); + } + /** + * Converts a {DateRangeBound} into a lower-bounded bound by rounding down its date + * based on its precision. + * + * @param {DateRangeBound} bound The bound to round down. + * @returns {DateRangeBound} with the date rounded down to the given precision. + */ + static toLowerBound(bound: DateRangeBound): DateRangeBound { + if (bound === unbounded) { + return bound; + } + const rounded = new Date(bound.date.getTime()); + // in this case we want to fallthrough + /* eslint-disable no-fallthrough */ + switch (bound.precision) { + case dateRangePrecision.year: + rounded.setUTCMonth(0); + case dateRangePrecision.month: + rounded.setUTCDate(1); + case dateRangePrecision.day: + rounded.setUTCHours(0); + case dateRangePrecision.hour: + rounded.setUTCMinutes(0); + case dateRangePrecision.minute: + rounded.setUTCSeconds(0); + case dateRangePrecision.second: + rounded.setUTCMilliseconds(0); + } + /* eslint-enable no-fallthrough */ + return new DateRangeBound(rounded, bound.precision); + } + /** + * Converts a {DateRangeBound} into a upper-bounded bound by rounding up its date + * based on its precision. + * + * @param {DateRangeBound} bound The bound to round up. + * @returns {DateRangeBound} with the date rounded up to the given precision. + */ + static toUpperBound(bound: DateRangeBound): DateRangeBound { + if (bound === unbounded) { + return bound; + } + const rounded = new Date(bound.date.getTime()); + // in this case we want to fallthrough + /* eslint-disable no-fallthrough */ + switch (bound.precision) { + case dateRangePrecision.year: + rounded.setUTCMonth(11); + case dateRangePrecision.month: + // Advance to the beginning of next month and set day of month to 0 + // which sets the date to the last day of the previous month. + // This gives us the effect of YYYY-MM-LastDayOfThatMonth + rounded.setUTCMonth(rounded.getUTCMonth() + 1, 0); + case dateRangePrecision.day: + rounded.setUTCHours(23); + case dateRangePrecision.hour: + rounded.setUTCMinutes(59); + case dateRangePrecision.minute: + rounded.setUTCSeconds(59); + case dateRangePrecision.second: + rounded.setUTCMilliseconds(999); + } + /* eslint-enable no-fallthrough */ + return new DateRangeBound(rounded, bound.precision); + } + /** + * Returns the string representation of the instance. + * @return {String} + */ + toString(): string { + if (this.precision === -1) { + return '*'; + } + let precision = 0; + const isoString = this.date.toISOString(); + let i; + let char; + // The years take at least the first 4 characters + for (i = 4; i < isoString.length && precision <= this.precision; i++) { + char = isoString.charAt(i); + if (precision === dateRangePrecision.day && char === 'T') { + precision = dateRangePrecision.hour; + continue; + } + if (precision >= dateRangePrecision.hour && char === ':' || char === '.') { + precision++; + continue; + } + if (precision < dateRangePrecision.day && char === '-') { + precision++; + } + } + let start = 0; + const firstChar = isoString.charAt(0); + let sign = ''; + let toRemoveIndex = 4; + if (firstChar === '+' || firstChar === '-') { + sign = firstChar; + if (firstChar === '-') { + // since we are retaining the -, don't remove as many zeros. + toRemoveIndex = 3; + } + // Remove additional zeros + for (start = 1; start < toRemoveIndex; start++) { + if (isoString.charAt(start) !== '0') { + break; + } + } + } + if (this.precision !== dateRangePrecision.millisecond) { + // i holds the position of the first char that marks the end of a precision (ie: '-', 'T', ...), + // we should not include it in the result, except its the 'Z' char for the complete representation + i--; + } + return sign + isoString.substring(start, i); + } + /** + * Returns true if the value of this DateRange instance and other are the same. + * @param {DateRangeBound} other + * @return {boolean} + */ + equals(other: DateRangeBound): boolean { + if (!(other instanceof DateRangeBound)) { + return false; + } + if (other.precision !== this.precision) { + return false; + } + return datesEqual(other.date, this.date); + } + /** @internal */ + isUnbounded() { + return (this.precision === -1); + } +} + + + +function datesEqual(d1, d2) { + const t1 = d1 ? d1.getTime() : null; + const t2 = d2 ? d2.getTime() : null; + return t1 === t2; +} + + +const unbounded = Object.freeze(new DateRangeBound(null, -1)); + +/** + * The unbounded {@link DateRangeBound} instance. Unbounded bounds are syntactically represented by a * + * (star) sign. + * @type {DateRangeBound} + */ +DateRangeBound.unbounded = unbounded; + + + +/** @private */ +class BoundaryBuilder { + _sign: number; + _index: number; + _values: Int32Array; + constructor(isNegative) { + this._sign = isNegative ? -1 : 1; + this._index = 0; + this._values = new Int32Array(7); + } + set(index, value, stringDate) { + if (value === undefined) { + return; + } + if (index > 6) { + throw new TypeError('Index out of bounds: ' + index); + } + if (index > this._index) { + this._index = index; + } + const numValue = +value; + switch (index) { + case dateRangePrecision.month: + if (numValue < 1 || numValue > 12) { + throw new TypeError('Month portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.day: + if (numValue < 1 || numValue > 31) { + throw new TypeError('Day portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.hour: + if (numValue > 23) { + throw new TypeError('Hour portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.minute: + case dateRangePrecision.second: + if (numValue > 59) { + throw new TypeError('Minute/second portion is not valid for date: ' + stringDate); + } + break; + case dateRangePrecision.millisecond: + if (numValue > 999) { + throw new TypeError('Millisecond portion is not valid for date: ' + stringDate); + } + break; + } + this._values[index] = numValue; + } + /** @return {DateRangeBound} */ + build(): DateRangeBound { + const date = new Date(0); + let month = this._values[1]; + if (month) { + // ES Date months are represented from 0 to 11 + month--; + } + date.setUTCFullYear(this._sign * this._values[0], month, this._values[2] || 1); + date.setUTCHours(this._values[3], this._values[4], this._values[5], this._values[6]); + return new DateRangeBound(date, this._index); + } +} + +export { + DateRange, + DateRangeBound, dateRangePrecision, unbounded +}; diff --git a/lib/datastax/search/index.d.ts b/lib/datastax/search/index.d.ts deleted file mode 100644 index 57ba44f7a..000000000 --- a/lib/datastax/search/index.d.ts +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export namespace search { - enum dateRangePrecision { - year = 0, - month, - day, - hour, - minute, - second, - millisecond - } - - class DateRange { - lowerBound: DateRangeBound; - upperBound: DateRangeBound; - - constructor(lowerBound: DateRangeBound, upperBound: DateRangeBound); - - equals(other: DateRangeBound): boolean; - - toString(): string; - - static fromString(value: string): DateRange; - - static fromBuffer(value: Buffer): DateRange; - } - - class DateRangeBound { - date: Date; - - precision: number; - - equals(other: DateRangeBound): boolean; - - toString(): string; - - static fromString(value: string): DateRangeBound; - - static toLowerBound(bound: DateRangeBound): DateRangeBound; - - static toUpperBound(bound: DateRangeBound): DateRangeBound; - } -} \ No newline at end of file diff --git a/lib/datastax/search/index.js b/lib/datastax/search/index.ts similarity index 76% rename from lib/datastax/search/index.js rename to lib/datastax/search/index.ts index c101f5d14..023d6eb92 100644 --- a/lib/datastax/search/index.js +++ b/lib/datastax/search/index.ts @@ -13,9 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const dateRangeModule = require('./date-range'); /** * Search module. @@ -24,7 +22,16 @@ const dateRangeModule = require('./date-range'); *

* @module datastax/search */ +import { DateRange, DateRangeBound, dateRangePrecision } from "./date-range"; -exports.DateRange = dateRangeModule.DateRange; -exports.DateRangeBound = dateRangeModule.DateRangeBound; -exports.dateRangePrecision = dateRangeModule.dateRangePrecision; \ No newline at end of file +export { + DateRange, + DateRangeBound, + dateRangePrecision +}; + +export default { + DateRange, + DateRangeBound, + dateRangePrecision +}; \ No newline at end of file diff --git a/lib/encoder.js b/lib/encoder.ts similarity index 74% rename from lib/encoder.js rename to lib/encoder.ts index 7a78754d6..98bab655e 100644 --- a/lib/encoder.js +++ b/lib/encoder.ts @@ -13,24 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const util = require('util'); - -const types = require('./types'); -const dataTypes = types.dataTypes; -const Long = types.Long; -const Integer = types.Integer; -const BigDecimal = types.BigDecimal; -const MutableLong = require('./types/mutable-long'); -const utils = require('./utils'); -const token = require('./token'); -const { DateRange } = require('./datastax/search'); -const geo = require('./geometry'); -const Vector = require('./types/vector'); -const Geometry = geo.Geometry; -const LineString = geo.LineString; -const Point = geo.Point; -const Polygon = geo.Polygon; +import util from "util"; +import { type ClientOptions } from "./client"; +import { DateRange } from "./datastax/search/index"; +import type { ExecutionOptions } from "./execution-options"; +import { Geometry, LineString, Point, Polygon } from "./geometry/index"; +import token from "./token"; +import types, { BigDecimal, dataTypes, InetAddress, Integer, LocalDate, LocalTime, Long, Uuid } from "./types/index"; +import MutableLong from "./types/mutable-long"; +import Vector from "./types/vector"; +import utils from "./utils"; + const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i; @@ -62,7 +55,7 @@ const complexTypeNames = Object.freeze({ composite : 'org.apache.cassandra.db.marshal.CompositeType', empty : 'org.apache.cassandra.db.marshal.EmptyType', collection: 'org.apache.cassandra.db.marshal.ColumnToCollectionType' -}); +} as const); const cqlNames = Object.freeze({ frozen: 'frozen', list: 'list', @@ -72,7 +65,7 @@ const cqlNames = Object.freeze({ empty: 'empty', duration: 'duration', vector: 'vector' -}); +} as const); const singleTypeNames = Object.freeze({ 'org.apache.cassandra.db.marshal.UTF8Type': dataTypes.varchar, 'org.apache.cassandra.db.marshal.AsciiType': dataTypes.ascii, @@ -94,9 +87,9 @@ const singleTypeNames = Object.freeze({ 'org.apache.cassandra.db.marshal.DecimalType': dataTypes.decimal, 'org.apache.cassandra.db.marshal.IntegerType': dataTypes.varint, 'org.apache.cassandra.db.marshal.CounterColumnType': dataTypes.counter -}); +} as const); -// eslint-disable-next-line no-unused-vars +// eslint-disable-next-line @typescript-eslint/no-unused-vars const singleTypeNamesByDataType = invertObject(singleTypeNames); const singleFqTypeNamesLength = Object.keys(singleTypeNames).reduce(function (previous, current) { return current.length > previous ? current.length : previous; @@ -109,7 +102,7 @@ const customTypeNames = Object.freeze({ polygon: 'org.apache.cassandra.db.marshal.PolygonType', dateRange: 'org.apache.cassandra.db.marshal.DateRangeType', vector: 'org.apache.cassandra.db.marshal.VectorType' -}); +} as const); const nullValueBuffer = utils.allocBufferFromArray([255, 255, 255, 255]); const unsetValueBuffer = utils.allocBufferFromArray([255, 255, 255, 254]); @@ -120,7 +113,7 @@ const unsetValueBuffer = utils.allocBufferFromArray([255, 255, 255, 254]); * @private * @type {Set} */ -const zeroLengthTypesSupported = new Set([ +const zeroLengthTypesSupported: Set = new Set([ dataTypes.text, dataTypes.ascii, dataTypes.varchar, @@ -128,53 +121,82 @@ const zeroLengthTypesSupported = new Set([ dataTypes.blob ]); -/** - * @typedef {(singleTypeNames[keyof singleTypeNames] | types.dataTypes.duration | types.dataTypes.text)} SingleTypeCodes - * @typedef {('point' | 'polygon' | 'duration' | 'lineString' | 'dateRange')} CustomSimpleTypeCodes - * @typedef {(customTypeNames[CustomSimpleTypeCodes]) | CustomSimpleTypeCodes | 'empty'} CustomSimpleTypeNames - * @typedef {{code : SingleTypeCodes, info?: null, options? : {frozen?:boolean, reversed?:boolean} }} SingleColumnInfo - * @typedef {{code : (types.dataTypes.custom), info : CustomSimpleTypeNames, options? : {frozen?:boolean, reversed?:boolean}}} CustomSimpleColumnInfo - * @typedef {{code : (types.dataTypes.map), info : [ColumnInfo, ColumnInfo], options?: {frozen?: Boolean, reversed?: Boolean}}} MapColumnInfo - * @typedef {{code : (types.dataTypes.tuple), info : Array, options?: {frozen?: Boolean, reversed?: Boolean}}} TupleColumnInfo - * @typedef {{code : (types.dataTypes.tuple | types.dataTypes.list)}} TupleListColumnInfoWithoutSubtype TODO: guessDataType can return null on tuple/list info, why? - * @typedef {{code : (types.dataTypes.list | types.dataTypes.set), info : ColumnInfo, options?: {frozen?: Boolean, reversed?: Boolean}}} ListSetColumnInfo - * @typedef {{code : (types.dataTypes.udt), info : {name : string, fields : Array<{name : string, type : ColumnInfo}>}, options? : {frozen?: Boolean, reversed?: Boolean}}} UdtColumnInfo - * @typedef {{code : (types.dataTypes.custom), customTypeName : ('vector'), info : [ColumnInfo, number], options? : {frozen?:boolean, reversed?:boolean}}} VectorColumnInfo - * @typedef {{code : (types.dataTypes.custom), info : string, options? : {frozen?:boolean, reversed?:boolean}}} OtherCustomColumnInfo - * @typedef {SingleColumnInfo | CustomSimpleColumnInfo | MapColumnInfo | TupleColumnInfo | ListSetColumnInfo | VectorColumnInfo | OtherCustomColumnInfo | UdtColumnInfo | TupleListColumnInfoWithoutSubtype} ColumnInfo If this is a simple type, info is null; if this is a collection type with a simple subtype, info is a string, if this is a nested collection type, info is a ColumnInfo object - */ +type SingleTypeCodes = (typeof singleTypeNames[keyof typeof singleTypeNames] | dataTypes.duration | dataTypes.text); -/** - * Serializes and deserializes to and from a CQL type and a Javascript Type. - * @param {Number} protocolVersion - * @param {import('./client.js').ClientOptions} options - * @constructor - */ -function Encoder(protocolVersion, options) { - this.encodingOptions = options.encoding || utils.emptyObject; - defineInstanceMembers.call(this); - this.setProtocolVersion(protocolVersion); - setEncoders.call(this); - if (this.encodingOptions.copyBuffer) { - this.handleBuffer = handleBufferCopy; - } - else { - this.handleBuffer = handleBufferRef; +type CustomSimpleTypeCodes = ('point' | 'polygon' | 'duration' | 'lineString' | 'dateRange'); + +type CustomSimpleTypeNames = (typeof customTypeNames[CustomSimpleTypeCodes]) | CustomSimpleTypeCodes | 'empty'; + +type SingleColumnInfo = { code: SingleTypeCodes; info?: null; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type CustomSimpleColumnInfo = { code: (dataTypes.custom); info: CustomSimpleTypeNames; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type MapColumnInfo = { code: (dataTypes.map); info: [DataTypeInfo, DataTypeInfo]; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type TupleColumnInfo = { code: (dataTypes.tuple); info: Array; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type TupleListColumnInfoWithoutSubtype = { code: (dataTypes.tuple | dataTypes.list); }; + +type ListSetColumnInfo = { code: (dataTypes.list | dataTypes.set); info: DataTypeInfo; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type UdtColumnInfo = { code: (dataTypes.udt); info: { name: string; fields: Array<{ name: string; type: DataTypeInfo; }>; }; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type VectorColumnInfo = { code: (dataTypes.custom); customTypeName: ('vector'); info: [DataTypeInfo, number]; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type OtherCustomColumnInfo = { code: (dataTypes.custom); info: string; options?: { frozen?: boolean; reversed?: boolean; }; }; + +type DataTypeInfo = SingleColumnInfo | CustomSimpleColumnInfo | MapColumnInfo | TupleColumnInfo | ListSetColumnInfo | VectorColumnInfo | OtherCustomColumnInfo | UdtColumnInfo | TupleListColumnInfoWithoutSubtype; + + +class Encoder{ + private encodingOptions: ClientOptions['encoding']; + private handleBuffer: (buffer: Buffer) => Buffer; + private decodeCollectionLength : (bytes: any, offset: any) => number; + private getLengthBuffer: (value: Buffer | number) => Buffer; + private collectionLengthSize : number; + /** @internal */ + protocolVersion: number; + + private readonly customDecoders = { + [customTypeNames.duration]: decodeDuration, + [customTypeNames.lineString]: decodeLineString, + [customTypeNames.point]: decodePoint, + [customTypeNames.polygon]: decodePolygon, + [customTypeNames.dateRange]: decodeDateRange + }; + + private readonly customEncoders = { + [customTypeNames.duration]: encodeDuration, + [customTypeNames.lineString]: encodeLineString, + [customTypeNames.point]: encodePoint, + [customTypeNames.polygon]: encodePolygon, + [customTypeNames.dateRange]: encodeDateRange + }; + + /** + * Serializes and deserializes to and from a CQL type and a Javascript Type. + * @param {Number} protocolVersion + * @param {ClientOptions} options + * @constructor + */ + constructor(protocolVersion: number, options: ClientOptions) { + this.encodingOptions = options.encoding || utils.emptyObject; + this.setProtocolVersion(protocolVersion); + if (this.encodingOptions.copyBuffer) { + this.handleBuffer = handleBufferCopy; + } + else { + this.handleBuffer = handleBufferRef; + } } -} -/** - * Declares the privileged instance members. - * @private - */ -function defineInstanceMembers() { /** * Sets the protocol version and the encoding/decoding methods depending on the protocol version * @param {Number} value * @ignore * @internal */ - this.setProtocolVersion = function (value) { + setProtocolVersion = function (value: number) { this.protocolVersion = value; //Set the collection serialization based on the protocol version this.decodeCollectionLength = decodeCollectionLengthV3; @@ -187,24 +209,9 @@ function defineInstanceMembers() { } }; - const customDecoders = { - [customTypeNames.duration]: decodeDuration, - [customTypeNames.lineString]: decodeLineString, - [customTypeNames.point]: decodePoint, - [customTypeNames.polygon]: decodePolygon, - [customTypeNames.dateRange]: decodeDateRange - }; - - const customEncoders = { - [customTypeNames.duration]: encodeDuration, - [customTypeNames.lineString]: encodeLineString, - [customTypeNames.point]: encodePoint, - [customTypeNames.polygon]: encodePolygon, - [customTypeNames.dateRange]: encodeDateRange - }; // Decoding methods - this.decodeBlob = function (bytes) { + private decodeBlob = function (bytes) { return this.handleBuffer(bytes); }; @@ -214,7 +221,7 @@ function defineInstanceMembers() { * @param {OtherCustomColumnInfo | VectorColumnInfo} columnInfo * @returns */ - this.decodeCustom = function (bytes, columnInfo) { + private decodeCustom = function (bytes: Buffer, columnInfo: OtherCustomColumnInfo | VectorColumnInfo) { // Make sure we actually have something to process in typeName before we go any further if (!columnInfo) { @@ -231,55 +238,63 @@ function defineInstanceMembers() { return this.decodeVector(bytes, vectorColumnInfo); } - const handler = customDecoders[columnInfo.info]; + const handler = this.customDecoders[columnInfo.info as string]; if (handler) { return handler.call(this, bytes); } return this.handleBuffer(bytes); }; - this.decodeUtf8String = function (bytes) { + private decodeUtf8String = function (bytes) { return bytes.toString('utf8'); }; - this.decodeAsciiString = function (bytes) { + private decodeAsciiString = function (bytes) { return bytes.toString('ascii'); }; - this.decodeBoolean = function (bytes) { + private decodeBoolean = function (bytes) { return !!bytes.readUInt8(0); }; - this.decodeDouble = function (bytes) { + private decodeDouble = function (bytes) { return bytes.readDoubleBE(0); }; - this.decodeFloat = function (bytes) { + private decodeFloat = function (bytes) { return bytes.readFloatBE(0); }; - this.decodeInt = function (bytes) { + private decodeInt = function (bytes) { return bytes.readInt32BE(0); }; - this.decodeSmallint = function (bytes) { + private decodeSmallint = function (bytes) { return bytes.readInt16BE(0); }; - this.decodeTinyint = function (bytes) { + private decodeTinyint = function (bytes) { return bytes.readInt8(0); }; - this._decodeCqlLongAsLong = function (bytes) { - return Long.fromBuffer(bytes); + private decodeLong = function (bytes: Buffer): Long | bigint { + return this.encodingOptions.useBigIntAsLong + ? this._decodeCqlLongAsBigInt(bytes) + : this._decodeCqlLongAsLong(bytes); + }; + + private _decodeCqlLongAsLong = function (bytes: Buffer): Long { + return Long["fromBuffer"](bytes); }; - this._decodeCqlLongAsBigInt = function (bytes) { + private _decodeCqlLongAsBigInt = function (bytes: Buffer): bigint { return BigInt.asIntN(64, (BigInt(bytes.readUInt32BE(0)) << bigInt32) | BigInt(bytes.readUInt32BE(4))); }; - this.decodeLong = this.encodingOptions.useBigIntAsLong - ? this._decodeCqlLongAsBigInt - : this._decodeCqlLongAsLong; + private decodeVarint = function (bytes: Buffer): Integer | bigint { + return this.encodingOptions.useBigIntAsVarint + ? this._decodeVarintAsBigInt(bytes) + : this._decodeVarintAsInteger(bytes); + }; - this._decodeVarintAsInteger = function (bytes) { + private _decodeVarintAsInteger = function (bytes: Buffer): Integer { return Integer.fromBuffer(bytes); }; - this._decodeVarintAsBigInt = function decodeVarintAsBigInt(bytes) { + private _decodeVarintAsBigInt = function decodeVarintAsBigInt(bytes) { let result = bigInt0; if (bytes[0] <= 0x7f) { for (let i = 0; i < bytes.length; i++) { @@ -297,26 +312,22 @@ function defineInstanceMembers() { return result; }; - this.decodeVarint = this.encodingOptions.useBigIntAsVarint - ? this._decodeVarintAsBigInt - : this._decodeVarintAsInteger; - - this.decodeDecimal = function(bytes) { + private decodeDecimal = function(bytes) { return BigDecimal.fromBuffer(bytes); }; - this.decodeTimestamp = function(bytes) { + private decodeTimestamp = function(bytes) { return new Date(this._decodeCqlLongAsLong(bytes).toNumber()); }; - this.decodeDate = function (bytes) { + private decodeDate = function (bytes) { return types.LocalDate.fromBuffer(bytes); }; - this.decodeTime = function (bytes) { + private decodeTime = function (bytes) { return types.LocalTime.fromBuffer(bytes); }; /* * Reads a list from bytes */ - this.decodeList = function (bytes, columnInfo) { + private decodeList = function (bytes, columnInfo) { const subtype = columnInfo.info; const totalItems = this.decodeCollectionLength(bytes, 0); let offset = this.collectionLengthSize; @@ -334,7 +345,7 @@ function defineInstanceMembers() { /* * Reads a Set from bytes */ - this.decodeSet = function (bytes, columnInfo) { + private decodeSet = function (bytes, columnInfo) { const arr = this.decodeList(bytes, columnInfo); if (this.encodingOptions.set) { const setConstructor = this.encodingOptions.set; @@ -345,13 +356,13 @@ function defineInstanceMembers() { /* * Reads a map (key / value) from bytes */ - this.decodeMap = function (bytes, columnInfo) { + private decodeMap = function (bytes, columnInfo) { const subtypes = columnInfo.info; let map; const totalItems = this.decodeCollectionLength(bytes, 0); let offset = this.collectionLengthSize; const self = this; - function readValues(callback, thisArg) { + function readValues(callback, thisArg?) { for (let i = 0; i < totalItems; i++) { const keyLength = self.decodeCollectionLength(bytes, offset); offset += self.collectionLengthSize; @@ -381,13 +392,13 @@ function defineInstanceMembers() { } return map; }; - this.decodeUuid = function (bytes) { + private decodeUuid = function (bytes) { return new types.Uuid(this.handleBuffer(bytes)); }; - this.decodeTimeUuid = function (bytes) { + private decodeTimeUuid = function (bytes) { return new types.TimeUuid(this.handleBuffer(bytes)); }; - this.decodeInet = function (bytes) { + private decodeInet = function (bytes) { return new types.InetAddress(this.handleBuffer(bytes)); }; /** @@ -396,7 +407,7 @@ function defineInstanceMembers() { * @param {UdtColumnInfo} columnInfo * @private */ - this.decodeUdt = function (bytes, columnInfo) { + private decodeUdt = function (bytes: Buffer, columnInfo: UdtColumnInfo) { const udtInfo = columnInfo.info; const result = {}; let offset = 0; @@ -416,7 +427,7 @@ function defineInstanceMembers() { return result; }; - this.decodeTuple = function (bytes, columnInfo) { + private decodeTuple = function (bytes, columnInfo) { const tupleInfo = columnInfo.info; const elements = new Array(tupleInfo.length); let offset = 0; @@ -438,7 +449,7 @@ function defineInstanceMembers() { }; //Encoding methods - this.encodeFloat = function (value) { + private encodeFloat = function (value) { if (typeof value === 'string') { // All numeric types are supported as strings for historical reasons value = parseFloat(value); @@ -457,7 +468,7 @@ function defineInstanceMembers() { return buf; }; - this.encodeDouble = function (value) { + private encodeDouble = function (value) { if (typeof value === 'string') { // All numeric types are supported as strings for historical reasons value = parseFloat(value); @@ -480,7 +491,7 @@ function defineInstanceMembers() { * @param {Date|String|Long|Number} value * @private */ - this.encodeTimestamp = function (value) { + private encodeTimestamp = function (value: Date | string | Long | number) { const originalValue = value; if (typeof value === 'string') { value = new Date(value); @@ -493,6 +504,7 @@ function defineInstanceMembers() { } } if (this.encodingOptions.useBigIntAsLong) { + // @ts-ignore value = BigInt(value); } return this.encodeLong(value); @@ -503,7 +515,7 @@ function defineInstanceMembers() { * @throws {TypeError} * @private */ - this.encodeDate = function (value) { + private encodeDate = function (value: Date | string | LocalDate): Buffer { const originalValue = value; try { if (typeof value === 'string') { @@ -523,12 +535,12 @@ function defineInstanceMembers() { return value.toBuffer(); }; /** - * @param {String|LocalDate} value + * @param {String|LocalTime} value * @returns {Buffer} * @throws {TypeError} * @private */ - this.encodeTime = function (value) { + private encodeTime = function (value: string | LocalTime): Buffer { const originalValue = value; try { if (typeof value === 'string') { @@ -548,7 +560,7 @@ function defineInstanceMembers() { * @param {Uuid|String|Buffer} value * @private */ - this.encodeUuid = function (value) { + private encodeUuid = function (value: Uuid | string | Buffer) { if (typeof value === 'string') { try { value = types.Uuid.fromString(value).getBuffer(); @@ -569,7 +581,7 @@ function defineInstanceMembers() { * @returns {Buffer} * @private */ - this.encodeInet = function (value) { + private encodeInet = function (value: string | InetAddress | Buffer): Buffer { if (typeof value === 'string') { value = types.InetAddress.fromString(value); } @@ -586,19 +598,19 @@ function defineInstanceMembers() { * @param {Long|Buffer|String|Number} value * @private */ - this._encodeBigIntFromLong = function (value) { + private _encodeBigIntFromLong = function (value: Long | Buffer | string | number) { if (typeof value === 'number') { - value = Long.fromNumber(value); + value = Long["fromNumber"](value); } else if (typeof value === 'string') { - value = Long.fromString(value); + value = Long["fromString"](value); } let buf = null; if (value instanceof Long) { - buf = Long.toBuffer(value); + buf = Long["toBuffer"](value); } else if (value instanceof MutableLong) { - buf = Long.toBuffer(value.toImmutable()); + buf = Long["toBuffer"](value.toImmutable()); } if (buf === null) { @@ -608,7 +620,13 @@ function defineInstanceMembers() { return buf; }; - this._encodeBigIntFromBigInt = function (value) { + private encodeLong = function (value: Long | bigint | Buffer | string | number) { + return this.encodingOptions.useBigIntAsLong + ? this._encodeBigIntFromBigInt(value) + : this._encodeBigIntFromLong(value); + }; + + private _encodeBigIntFromBigInt = function (value: string | bigint) { if (typeof value === 'string') { // All numeric types are supported as strings for historical reasons value = BigInt(value); @@ -626,16 +644,12 @@ function defineInstanceMembers() { return buffer; }; - this.encodeLong = this.encodingOptions.useBigIntAsLong - ? this._encodeBigIntFromBigInt - : this._encodeBigIntFromLong; - /** * @param {Integer|Buffer|String|Number} value * @returns {Buffer} * @private */ - this._encodeVarintFromInteger = function (value) { + private _encodeVarintFromInteger = function (value: Integer | Buffer | string | number): Buffer { if (typeof value === 'number') { value = Integer.fromNumber(value); } @@ -655,7 +669,13 @@ function defineInstanceMembers() { return buf; }; - this._encodeVarintFromBigInt = function (value) { + private encodeVarint = function (value: Integer | bigint | Buffer | string | number) { + return this.encodingOptions.useBigIntAsVarint + ? this._encodeVarintFromBigInt(value) + : this._encodeVarintFromInteger(value); + }; + + private _encodeVarintFromBigInt = function (value: string | bigint) { if (typeof value === 'string') { // All numeric types are supported as strings for historical reasons value = BigInt(value); @@ -701,16 +721,12 @@ function defineInstanceMembers() { return utils.allocBufferFromArray(parts); }; - this.encodeVarint = this.encodingOptions.useBigIntAsVarint - ? this._encodeVarintFromBigInt - : this._encodeVarintFromInteger; - /** * @param {BigDecimal|Buffer|String|Number} value * @returns {Buffer} * @private */ - this.encodeDecimal = function (value) { + private encodeDecimal = function (value: BigDecimal | Buffer | string | number): Buffer { if (typeof value === 'number') { value = BigDecimal.fromNumber(value); } else if (typeof value === 'string') { @@ -727,19 +743,20 @@ function defineInstanceMembers() { return buf; }; - this.encodeString = function (value, encoding) { + private encodeString = function (value, encoding) { if (typeof value !== 'string') { throw new TypeError('Not a valid text value, expected String obtained ' + util.inspect(value)); } return utils.allocBufferFromString(value, encoding); }; - this.encodeUtf8String = function (value) { + private encodeUtf8String = function (value) { return this.encodeString(value, 'utf8'); }; - this.encodeAsciiString = function (value) { + private encodeAsciiString = function (value) { return this.encodeString(value, 'ascii'); }; - this.encodeBlob = function (value) { + + private encodeBlob = function (value) { if (!(value instanceof Buffer)) { throw new TypeError('Not a valid blob, expected Buffer obtained ' + util.inspect(value)); } @@ -752,7 +769,7 @@ function defineInstanceMembers() { * @param {OtherCustomColumnInfo | VectorColumnInfo} columnInfo * @returns */ - this.encodeCustom = function (value, columnInfo) { + private encodeCustom = function (value: any, columnInfo: OtherCustomColumnInfo | VectorColumnInfo) { if ('customTypeName' in columnInfo && columnInfo.customTypeName === 'vector') { return this.encodeVector(value, columnInfo); @@ -763,7 +780,7 @@ function defineInstanceMembers() { return this.encodeVector(value, vectorColumnInfo); } - const handler = customEncoders[columnInfo.info]; + const handler = this.customEncoders[columnInfo.info as string]; if (handler) { return handler.call(this, value); } @@ -774,14 +791,14 @@ function defineInstanceMembers() { * @returns {Buffer} * @private */ - this.encodeBoolean = function (value) { + private encodeBoolean = function (value: boolean): Buffer { return value ? buffers.int8One : buffers.int8Zero; }; /** * @param {Number|String} value * @private */ - this.encodeInt = function (value) { + private encodeInt = function (value: number) { if (isNaN(value)) { throw new TypeError('Expected Number, obtained ' + util.inspect(value)); } @@ -793,7 +810,7 @@ function defineInstanceMembers() { * @param {Number|String} value * @private */ - this.encodeSmallint = function (value) { + private encodeSmallint = function (value: number) { if (isNaN(value)) { throw new TypeError('Expected Number, obtained ' + util.inspect(value)); } @@ -805,7 +822,7 @@ function defineInstanceMembers() { * @param {Number} value * @private */ - this.encodeTinyint = function (value) { + private encodeTinyint = function (value: number) { if (isNaN(value)) { throw new TypeError('Expected Number, obtained ' + util.inspect(value)); } @@ -813,7 +830,7 @@ function defineInstanceMembers() { buf.writeInt8(value, 0); return buf; }; - this.encodeList = function (value, columnInfo) { + private encodeList = function (value, columnInfo) { const subtype = columnInfo.info; if (!Array.isArray(value)) { throw new TypeError('Not a valid list value, expected Array obtained ' + util.inspect(value)); @@ -836,7 +853,7 @@ function defineInstanceMembers() { } return Buffer.concat(parts); }; - this.encodeSet = function (value, columnInfo) { + private encodeSet = function (value, columnInfo) { if (this.encodingOptions.set && value instanceof this.encodingOptions.set) { const arr = []; value.forEach(function (x) { @@ -853,7 +870,7 @@ function defineInstanceMembers() { * @returns {Buffer} * @private */ - this.encodeMap = function (value, columnInfo) { + private encodeMap = function (value, columnInfo: MapColumnInfo): Buffer { const subtypes = columnInfo.info; const parts = []; let propCounter = 0; @@ -904,13 +921,15 @@ function defineInstanceMembers() { parts.unshift(this.getLengthBuffer(propCounter)); return Buffer.concat(parts); }; + + /** * * @param {any} value * @param {UdtColumnInfo} columnInfo * @returns */ - this.encodeUdt = function (value, columnInfo) { + private encodeUdt = function (value: any, columnInfo: UdtColumnInfo) { const udtInfo = columnInfo.info; const parts = []; let totalLength = 0; @@ -941,7 +960,7 @@ function defineInstanceMembers() { * @param {TupleColumnInfo} columnInfo * @returns */ - this.encodeTuple = function (value, columnInfo) { + private encodeTuple = function (value: any, columnInfo: TupleColumnInfo) { const tupleInfo = columnInfo.info; const parts = []; let totalLength = 0; @@ -979,7 +998,7 @@ function defineInstanceMembers() { * @param {VectorColumnInfo} params * @returns {Vector} */ - this.decodeVector = function(buffer, params) { + private decodeVector = function(buffer: Buffer, params: VectorColumnInfo): Vector { const subtype = params.info[0]; const dimension = params.info[1]; const elemLength = this.serializationSizeIfFixed(subtype); @@ -1012,10 +1031,10 @@ function defineInstanceMembers() { }; /** - * @param {ColumnInfo} cqlType + * @param {DataTypeInfo} cqlType * @returns {Number} */ - this.serializationSizeIfFixed = function (cqlType) { + private serializationSizeIfFixed = function (cqlType: DataTypeInfo): number { switch (cqlType.code) { case dataTypes.bigint: return 8; @@ -1052,7 +1071,7 @@ function defineInstanceMembers() { * @param {VectorColumnInfo} params * @returns {Buffer} */ - this.encodeVector = function(value, params) { + private encodeVector = function(value: Vector, params: VectorColumnInfo): Buffer { if (!(value instanceof Vector)) { throw new TypeError("Driver only supports Vector type when encoding a vector"); @@ -1088,14 +1107,14 @@ function defineInstanceMembers() { * @returns {VectorColumnInfo} * @internal */ - this.parseVectorTypeArgs = function(typeName, stringToExclude, subtypeResolveFn) { + private parseVectorTypeArgs = function(typeName: string, stringToExclude: string, subtypeResolveFn: Function): VectorColumnInfo { const argsStartIndex = stringToExclude.length + 1; const argsLength = typeName.length - (stringToExclude.length + 2); const params = parseParams(typeName, argsStartIndex, argsLength); if (params.length === 2) { /** @type {VectorColumnInfo} */ - const columnInfo = { code: dataTypes.custom, info: [subtypeResolveFn.bind(this)(params[0].trim()), parseInt(params[1].trim(), 10 )], customTypeName : 'vector'}; + const columnInfo: VectorColumnInfo = { code: dataTypes.custom, info: [subtypeResolveFn.bind(this)(params[0].trim()), parseInt(params[1].trim(), 10 )], customTypeName : 'vector'}; return columnInfo; } @@ -1111,7 +1130,7 @@ function defineInstanceMembers() { * @internal * @ignore */ - this.setRoutingKeyFromUser = function (params, execOptions, keys) { + setRoutingKeyFromUser = function (params: Array, execOptions: ExecutionOptions, keys?) { let totalLength = 0; const userRoutingKey = execOptions.getRoutingKey(); if (Array.isArray(userRoutingKey)) { @@ -1137,7 +1156,9 @@ function defineInstanceMembers() { } // If routingKey is present, ensure it is a Buffer, Token, or TokenRange. Otherwise throw an error. if (userRoutingKey) { + // @ts-ignore if (userRoutingKey instanceof Buffer || userRoutingKey instanceof token.Token + // @ts-ignore || userRoutingKey instanceof token.TokenRange) { return; } @@ -1168,7 +1189,7 @@ function defineInstanceMembers() { try { totalLength = this._encodeRoutingKeyParts(parts, routingIndexes, encodeParam); - } catch (e) { + } catch (_e) { // There was an error encoding a parameter that is part of the routing key, // ignore now to fail afterwards } @@ -1189,7 +1210,7 @@ function defineInstanceMembers() { * @internal * @ignore */ - this.setRoutingKeyFromMeta = function (meta, params, execOptions) { + setRoutingKeyFromMeta = function (meta: any, params: Array, execOptions: ExecutionOptions) { const routingIndexes = execOptions.getRoutingIndexes(); if (!routingIndexes) { return; @@ -1204,7 +1225,7 @@ function defineInstanceMembers() { try { totalLength = this._encodeRoutingKeyParts(parts, routingIndexes, encodeParam); - } catch (e) { + } catch (_e) { // There was an error encoding a parameter that is part of the routing key, // ignore now to fail afterwards } @@ -1223,7 +1244,7 @@ function defineInstanceMembers() { * @returns {Number} The total length * @private */ - this._encodeRoutingKeyParts = function (parts, routingIndexes, encodeParam) { + private _encodeRoutingKeyParts = function (parts: Array, routingIndexes: Array, encodeParam: Function): number { let totalLength = 0; for (let i = 0; i < routingIndexes.length; i++) { const paramIndex = routingIndexes[i]; @@ -1254,12 +1275,12 @@ function defineInstanceMembers() { * @param {Number|null} length * @param {Function} udtResolver * @async - * @returns {Promise.} callback Callback invoked with err and {{code: number, info: Object|Array|null, options: {frozen: Boolean}}} + * @returns {Promise.} callback Callback invoked with err and {{code: number, info: Object|Array|null, options: {frozen: Boolean}}} * @internal * @throws {Error} * @ignore */ - this.parseTypeName = async function (keyspace, typeName, startIndex, length, udtResolver) { + parseTypeName = async function (keyspace: string, typeName: string, startIndex: number, length: number | null, udtResolver: Function): Promise { startIndex = startIndex || 0; if (!length) { length = typeName.length; @@ -1397,7 +1418,7 @@ function defineInstanceMembers() { const typeCode = dataTypes[typeName]; if (typeof typeCode === 'number') { - return {code : typeCode, info: null}; + return {code : typeCode as SingleTypeCodes, info: null}; } if (typeName === cqlNames.duration) { @@ -1430,7 +1451,7 @@ function defineInstanceMembers() { * @returns {Promise} * @private */ - this._parseChildTypes = function (keyspace, typeNames, udtResolver) { + private _parseChildTypes = function (keyspace: string, typeNames: Array, udtResolver: Function): Promise { return Promise.all(typeNames.map(name => this.parseTypeName(keyspace, name.trim(), 0, null, udtResolver))); }; @@ -1440,11 +1461,11 @@ function defineInstanceMembers() { * @param {Number} [startIndex] * @param {Number} [length] * @throws {TypeError} - * @returns {ColumnInfo} + * @returns {DataTypeInfo} * @internal * @ignore */ - this.parseFqTypeName = function (typeName, startIndex, length) { + parseFqTypeName = function (typeName: string, startIndex?: number, length?: number): DataTypeInfo { let frozen = false; let reversed = false; startIndex = startIndex || 0; @@ -1459,7 +1480,7 @@ function defineInstanceMembers() { reversed = true; } if (length > complexTypeNames.frozen.length && - typeName.indexOf(complexTypeNames.frozen, startIndex) === startIndex) { + typeName.indexOf(complexTypeNames.frozen, startIndex) === startIndex) { //Remove the frozen token startIndex += complexTypeNames.frozen.length + 1; length -= complexTypeNames.frozen.length + 2; @@ -1484,7 +1505,7 @@ function defineInstanceMembers() { } const typeCode = singleTypeNames[typeName]; if (typeof typeCode === 'number') { - return {code : typeCode, info: null, options : options}; + return {code : typeCode as SingleTypeCodes, info: null, options : options}; } // special handling for duration if (typeName === customTypeNames.duration) { @@ -1568,14 +1589,14 @@ function defineInstanceMembers() { options: options }; } - + if (typeName.indexOf(customTypeNames.vector, startIndex) === startIndex) { // It's a vector, so record the subtype and dimension. const params = this.parseVectorTypeArgs.bind(this)(typeName, customTypeNames.vector, this.parseFqTypeName); params.options = options; return params; } - + // Assume custom type if cannot be parsed up to this point. const info = typeName.substr(startIndex, length); return { @@ -1584,14 +1605,14 @@ function defineInstanceMembers() { options: options }; }; - /** - * Parses type names with composites - * @param {String} typesString - * @returns {{types: Array, isComposite: Boolean, hasCollections: Boolean}} - * @internal - * @ignore - */ - this.parseKeyTypes = function (typesString) { + /** + * Parses type names with composites + * @param {String} typesString + * @returns {{types: Array, isComposite: Boolean, hasCollections: Boolean}} + * @internal + * @ignore + */ + parseKeyTypes = function (typesString: string): { types: Array; isComposite: boolean; hasCollections: boolean; } { let i = 0; let length = typesString.length; const isComposite = typesString.indexOf(complexTypeNames.composite) === 0; @@ -1648,23 +1669,23 @@ function defineInstanceMembers() { isComposite: isComposite }; }; - /** - * - * @param {string} typeName - * @param {number} startIndex - * @param {number} length - * @returns {UdtColumnInfo} - */ - this._parseUdtName = function (typeName, startIndex, length) { + /** + * + * @param {string} typeName + * @param {number} startIndex + * @param {number} length + * @returns {UdtColumnInfo} + */ + private _parseUdtName = function (typeName: string, startIndex: number, length: number): UdtColumnInfo { const udtParams = parseParams(typeName, startIndex, length); if (udtParams.length < 2) { //It should contain at least the keyspace, name of the udt and a type throw new TypeError('Not a valid type ' + typeName); } /** - * @type {{keyspace: String, name: String, fields: Array}} - */ - const udtInfo = { + * @type {{keyspace: String, name: String, fields: Array}} + */ + const udtInfo: { keyspace: string; name: string; fields: Array; } = { keyspace: udtParams[0], name: utils.allocBufferFromString(udtParams[1], 'hex').toString(), fields: [] @@ -1683,14 +1704,10 @@ function defineInstanceMembers() { info : udtInfo }; }; -} -/** - * Sets the encoder and decoder methods for this instance - * @private - */ -function setEncoders() { - this.decoders = { + + + private decoders = { [dataTypes.custom]: this.decodeCustom, [dataTypes.ascii]: this.decodeAsciiString, [dataTypes.bigint]: this.decodeLong, @@ -1720,7 +1737,7 @@ function setEncoders() { [dataTypes.tuple]: this.decodeTuple }; - this.encoders = { + private encoders = { [dataTypes.custom]: this.encodeCustom, [dataTypes.ascii]: this.encodeAsciiString, [dataTypes.bigint]: this.encodeLong, @@ -1749,216 +1766,224 @@ function setEncoders() { [dataTypes.udt]: this.encodeUdt, [dataTypes.tuple]: this.encodeTuple }; -} -/** - * Decodes Cassandra bytes into Javascript values. - *

- * This is part of an experimental API, this can be changed future releases. - *

- * @param {Buffer} buffer Raw buffer to be decoded. - * @param {ColumnInfo} type - */ -Encoder.prototype.decode = function (buffer, type) { - if (buffer === null || (buffer.length === 0 && !zeroLengthTypesSupported.has(type.code))) { - return null; - } + /** + * Decodes Cassandra bytes into Javascript values. + *

+ * This is part of an experimental API, this can be changed future releases. + *

+ * @param {Buffer} buffer Raw buffer to be decoded. + * @param {DataTypeInfo} type + */ + public decode = function (buffer: Buffer, type: DataTypeInfo) { + if (buffer === null || (buffer.length === 0 && !zeroLengthTypesSupported.has(type.code))) { + return null; + } - const decoder = this.decoders[type.code]; + const decoder = this.decoders[type.code]; - if (!decoder) { - throw new Error('Unknown data type: ' + type.code); - } + if (!decoder) { + throw new Error('Unknown data type: ' + type.code); + } - return decoder.call(this, buffer, type); -}; + return decoder.call(this, buffer, type); + }; -/** - * Encodes Javascript types into Buffer according to the Cassandra protocol. - *

- * This is part of an experimental API, this can be changed future releases. - *

- * @param {*} value The value to be converted. - * @param {ColumnInfo | Number | String} typeInfo The type information. - *

It can be either a:

- *
    - *
  • A String representing the data type.
  • - *
  • A Number with one of the values of {@link module:types~dataTypes dataTypes}.
  • - *
  • An Object containing the type.code as one of the values of - * {@link module:types~dataTypes dataTypes} and type.info. - *
  • - *
- * @returns {Buffer} - * @throws {TypeError} When there is an encoding error - */ -Encoder.prototype.encode = function (value, typeInfo) { - if (value === undefined) { - value = this.encodingOptions.useUndefinedAsUnset && this.protocolVersion >= 4 ? types.unset : null; - } - if (value === types.unset) { - if (!types.protocolVersion.supportsUnset(this.protocolVersion)) { - throw new TypeError('Unset value can not be used for this version of Cassandra, protocol version: ' + - this.protocolVersion); + /** + * Encodes Javascript types into Buffer according to the Cassandra protocol. + *

+ * This is part of an experimental API, this can be changed future releases. + *

+ * @param {*} value The value to be converted. + * @param {DataTypeInfo | Number | String} typeInfo The type information. + *

It can be either a:

+ *
    + *
  • A String representing the data type.
  • + *
  • A Number with one of the values of {@link module:types~dataTypes dataTypes}.
  • + *
  • An Object containing the type.code as one of the values of + * {@link module:types~dataTypes dataTypes} and type.info. + *
  • + *
+ * @returns {Buffer} + * @throws {TypeError} When there is an encoding error + */ + public encode = function (value: any, typeInfo: DataTypeInfo | number | string): Buffer { + if (value === undefined) { + value = this.encodingOptions.useUndefinedAsUnset && this.protocolVersion >= 4 ? types.unset : null; } - return value; - } - - if (value === null || value instanceof Buffer) { - return value; - } - - /** @type {ColumnInfo | null} */ - let type = null; + if (value === types.unset) { + if (!types.protocolVersion.supportsUnset(this.protocolVersion)) { + throw new TypeError('Unset value can not be used for this version of Cassandra, protocol version: ' + + this.protocolVersion); + } - if (typeInfo) { - if (typeof typeInfo === 'number') { - type = { - code: typeInfo - }; + return value; } - else if (typeof typeInfo === 'string') { - type = dataTypes.getByName(typeInfo); - } - else if (typeof typeInfo.code === 'number') { - type = typeInfo; + + if (value === null || value instanceof Buffer) { + return value; } - if (type == null || typeof type.code !== 'number') { - throw new TypeError('Type information not valid, only String and Number values are valid hints'); + + /** @type {DataTypeInfo | null} */ + let type: DataTypeInfo | null = null; + + if (typeInfo) { + if (typeof typeInfo === 'number') { + type = { + code: typeInfo as SingleTypeCodes + }; + } + else if (typeof typeInfo === 'string') { + type = dataTypes.getByName(typeInfo); + } + else if (typeof typeInfo.code === 'number') { + type = typeInfo; + } + if (type == null || typeof type.code !== 'number') { + throw new TypeError('Type information not valid, only String and Number values are valid hints'); + } } - } - else { + else { //Lets guess - type = Encoder.guessDataType(value); - if (!type) { - throw new TypeError('Target data type could not be guessed, you should use prepared statements for accurate type mapping. Value: ' + util.inspect(value)); + type = Encoder.guessDataType(value); + if (!type) { + throw new TypeError('Target data type could not be guessed, you should use prepared statements for accurate type mapping. Value: ' + util.inspect(value)); + } } - } - const encoder = this.encoders[type.code]; + const encoder = this.encoders[type.code]; - if (!encoder) { - throw new Error('Type not supported ' + type.code); - } + if (!encoder) { + throw new Error('Type not supported ' + type.code); + } - return encoder.call(this, value, type); -}; + return encoder.call(this, value, type); + }; -/** - * Try to guess the Cassandra type to be stored, based on the javascript value type - * @param value - * @returns {ColumnInfo | null} - * @ignore - * @internal - */ -Encoder.guessDataType = function (value) { - const esTypeName = (typeof value); - if (esTypeName === 'number') { - return {code : dataTypes.double}; - } - else if (esTypeName === 'string') { - if (value.length === 36 && uuidRegex.test(value)){ + /** + * Try to guess the Cassandra type to be stored, based on the javascript value type + * @param value + * @returns {DataTypeInfo | null} + * @ignore + * @internal + */ + public static guessDataType = function (value): DataTypeInfo | null { + const esTypeName = (typeof value); + if (esTypeName === 'number') { + return {code : dataTypes.double}; + } + else if (esTypeName === 'string') { + if (value.length === 36 && uuidRegex.test(value)){ + return {code : dataTypes.uuid}; + } + return {code : dataTypes.text}; + + } + else if (esTypeName === 'boolean') { + return {code : dataTypes.boolean}; + } + else if (value instanceof Buffer) { + return {code : dataTypes.blob}; + } + else if (value instanceof Date) { + return {code : dataTypes.timestamp}; + } + else if (value instanceof Long) { + return {code : dataTypes.bigint}; + } + else if (value instanceof Integer) { + return {code : dataTypes.varint}; + } + else if (value instanceof BigDecimal) { + return {code : dataTypes.decimal}; + } + else if (value instanceof types.Uuid) { return {code : dataTypes.uuid}; } - return {code : dataTypes.text}; - - } - else if (esTypeName === 'boolean') { - return {code : dataTypes.boolean}; - } - else if (value instanceof Buffer) { - return {code : dataTypes.blob}; - } - else if (value instanceof Date) { - return {code : dataTypes.timestamp}; - } - else if (value instanceof Long) { - return {code : dataTypes.bigint}; - } - else if (value instanceof Integer) { - return {code : dataTypes.varint}; - } - else if (value instanceof BigDecimal) { - return {code : dataTypes.decimal}; - } - else if (value instanceof types.Uuid) { - return {code : dataTypes.uuid}; - } - else if (value instanceof types.InetAddress) { - return {code : dataTypes.inet}; - } - else if (value instanceof types.Tuple) { - return {code : dataTypes.tuple}; - } - else if (value instanceof types.LocalDate) { - return {code : dataTypes.date}; - } - else if (value instanceof types.LocalTime) { - return {code : dataTypes.time}; - } - else if (value instanceof types.Duration) { - return {code : dataTypes.custom, - info : customTypeNames.duration}; - } - // Map JS TypedArrays onto vectors - else if (value instanceof types.Vector) { - if (value && value.length > 0) { - if (value instanceof Float32Array) { - return { - code: dataTypes.custom, - customTypeName: 'vector', - info: [ {code: dataTypes.float}, value.length] - }; - } - - /** @type {ColumnInfo?} */ - let subtypeColumnInfo = null; - // try to fetch the subtype from the Vector, or else guess - if (value.subtype) { - try { - subtypeColumnInfo = dataTypes.getByName(value.subtype); - } catch (TypeError) { - // ignore + else if (value instanceof types.InetAddress) { + return {code : dataTypes.inet}; + } + else if (value instanceof types.Tuple) { + return {code : dataTypes.tuple}; + } + else if (value instanceof types.LocalDate) { + return {code : dataTypes.date}; + } + else if (value instanceof types.LocalTime) { + return {code : dataTypes.time}; + } + else if (value instanceof types.Duration) { + return {code : dataTypes.custom, + info : customTypeNames.duration}; + } + // Map JS TypedArrays onto vectors + else if (value instanceof types.Vector) { + if (value && value.length > 0) { + if (value instanceof Float32Array) { + return { + code: dataTypes.custom, + customTypeName: 'vector', + info: [ {code: dataTypes.float}, value.length] + }; + } + + /** @type {DataTypeInfo?} */ + let subtypeColumnInfo: DataTypeInfo | null = null; + // try to fetch the subtype from the Vector, or else guess + if (value.subtype) { + try { + subtypeColumnInfo = dataTypes.getByName(value.subtype); + } catch (_TypeError) { + // ignore + } + } + if (subtypeColumnInfo == null){ + subtypeColumnInfo = this.guessDataType(value[0]); } - } - if (subtypeColumnInfo == null){ - subtypeColumnInfo = this.guessDataType(value[0]); + if (subtypeColumnInfo != null) { + return { + code: dataTypes.custom, + customTypeName: 'vector', + info: [subtypeColumnInfo, value.length] + }; + } + throw new TypeError("Cannot guess subtype from element " + value[0]); + } else { + throw new TypeError("Cannot guess subtype of empty vector"); } - if (subtypeColumnInfo != null) { - return { - code: dataTypes.custom, - customTypeName: 'vector', - info: [subtypeColumnInfo, value.length] - }; + } + else if (Array.isArray(value)) { + return {code : dataTypes.list}; + } + else if (value instanceof Geometry) { + if (value instanceof LineString) { + return {code : dataTypes.custom, + info : customTypeNames.lineString}; + } else if (value instanceof Point) { + return {code : dataTypes.custom, + info : customTypeNames.point}; + } else if (value instanceof Polygon) { + return {code : dataTypes.custom, + info : customTypeNames.polygon}; } - throw new TypeError("Cannot guess subtype from element " + value[0]); - } else { - throw new TypeError("Cannot guess subtype of empty vector"); } - } - else if (Array.isArray(value)) { - return {code : dataTypes.list}; - } - else if (value instanceof Geometry) { - if (value instanceof LineString) { + else if (value instanceof DateRange) { return {code : dataTypes.custom, - info : customTypeNames.lineString}; - } else if (value instanceof Point) { - return {code : dataTypes.custom, - info : customTypeNames.point}; - } else if (value instanceof Polygon) { - return {code : dataTypes.custom, - info : customTypeNames.polygon}; + info : customTypeNames.dateRange}; } - } - else if (value instanceof DateRange) { - return {code : dataTypes.custom, - info : customTypeNames.dateRange}; - } + + return null; + }; - return null; -}; + private static isTypedArray = function(arg) { + // The TypedArray superclass isn't available directly so to detect an instance of a TypedArray + // subclass we have to access the prototype of a concrete instance. There's nothing magical about + // Uint8Array here; we could just as easily use any of the other TypedArray subclasses. + return (arg instanceof Object.getPrototypeOf(Uint8Array)); + }; +} /** * Gets a buffer containing with the bytes (BE) representing the collection length for protocol v2 and below @@ -1966,7 +1991,7 @@ Encoder.guessDataType = function (value) { * @returns {Buffer} * @private */ -function getLengthBufferV2(value) { +function getLengthBufferV2(value: Buffer | number): Buffer { if (!value) { return buffers.int16Zero; } @@ -1986,7 +2011,7 @@ function getLengthBufferV2(value) { * @returns {Buffer} * @private */ -function getLengthBufferV3(value) { +function getLengthBufferV3(value: Buffer | number): Buffer { if (!value) { return buffers.int32Zero; } @@ -2004,7 +2029,7 @@ function getLengthBufferV3(value) { * @param {Buffer} buffer * @private */ -function handleBufferCopy(buffer) { +function handleBufferCopy(buffer: Buffer) { if (buffer === null) { return null; } @@ -2015,7 +2040,7 @@ function handleBufferCopy(buffer) { * @param {Buffer} buffer * @private */ -function handleBufferRef(buffer) { +function handleBufferRef(buffer: Buffer) { return buffer; } /** @@ -2025,7 +2050,7 @@ function handleBufferRef(buffer) { * @returns {Number} * @private */ -function decodeCollectionLengthV3(bytes, offset) { +function decodeCollectionLengthV3(bytes, offset): number { return bytes.readInt32BE(offset); } /** @@ -2035,7 +2060,7 @@ function decodeCollectionLengthV3(bytes, offset) { * @returns {Number} * @private */ -function decodeCollectionLengthV2(bytes, offset) { +function decodeCollectionLengthV2(bytes, offset): number { return bytes.readUInt16BE(offset); } @@ -2054,7 +2079,7 @@ function encodeDuration(value) { * @private * @param {Buffer} buffer */ -function decodeLineString(buffer) { +function decodeLineString(buffer: Buffer) { return LineString.fromBuffer(buffer); } @@ -2062,7 +2087,7 @@ function decodeLineString(buffer) { * @private * @param {LineString} value */ -function encodeLineString(value) { +function encodeLineString(value: LineString) { return value.toBuffer(); } @@ -2070,7 +2095,7 @@ function encodeLineString(value) { * @private * @param {Buffer} buffer */ -function decodePoint(buffer) { +function decodePoint(buffer: Buffer) { return Point.fromBuffer(buffer); } @@ -2078,7 +2103,7 @@ function decodePoint(buffer) { * @private * @param {LineString} value */ -function encodePoint(value) { +function encodePoint(value: LineString) { return value.toBuffer(); } @@ -2086,7 +2111,7 @@ function encodePoint(value) { * @private * @param {Buffer} buffer */ -function decodePolygon(buffer) { +function decodePolygon(buffer: Buffer) { return Polygon.fromBuffer(buffer); } @@ -2094,7 +2119,7 @@ function decodePolygon(buffer) { * @private * @param {Polygon} value */ -function encodePolygon(value) { +function encodePolygon(value: Polygon) { return value.toBuffer(); } @@ -2106,7 +2131,7 @@ function decodeDateRange(buffer) { * @private * @param {DateRange} value */ -function encodeDateRange(value) { +function encodeDateRange(value: DateRange) { return value.toBuffer(); } @@ -2119,7 +2144,7 @@ function encodeDateRange(value) { * @returns {Array} * @private */ -function parseParams(value, startIndex, length, open, close) { +function parseParams(value: string, startIndex: number, length: number, open?: string, close?: string): Array { open = open || '('; close = close || ')'; const types = []; @@ -2149,7 +2174,7 @@ function parseParams(value, startIndex, length, open, close) { * @returns {Buffer} * @private */ -function concatRoutingKey(parts, totalLength) { +function concatRoutingKey(parts: Array, totalLength: number): Buffer { if (totalLength === 0) { return null; } @@ -2179,11 +2204,9 @@ function invertObject(obj) { } return rv; } -Encoder.isTypedArray = function(arg) { - // The TypedArray superclass isn't available directly so to detect an instance of a TypedArray - // subclass we have to access the prototype of a concrete instance. There's nothing magical about - // Uint8Array here; we could just as easily use any of the other TypedArray subclasses. - return (arg instanceof Object.getPrototypeOf(Uint8Array)); -}; -module.exports = Encoder; +export default Encoder; + +export { + type DataTypeInfo +}; diff --git a/lib/errors.js b/lib/errors.js deleted file mode 100644 index 970c528b9..000000000 --- a/lib/errors.js +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -/** - * Contains the error classes exposed by the driver. - * @module errors - */ - -/** - * Base Error - * @private - */ -function DriverError (message) { - Error.call(this, message); - Error.captureStackTrace(this, this.constructor); - this.name = this.constructor.name; - this.info = 'Cassandra Driver Error'; - // Explicitly set the message property as the Error.call() doesn't set the property on v8 - this.message = message; -} - -util.inherits(DriverError, Error); - -/** - * Represents an error when a query cannot be performed because no host is available or could be reached by the driver. - * @param {Object} innerErrors An object map containing the error per host tried - * @param {String} [message] - * @constructor - */ -function NoHostAvailableError(innerErrors, message) { - DriverError.call(this, message); - this.innerErrors = innerErrors; - this.info = 'Represents an error when a query cannot be performed because no host is available or could be reached by the driver.'; - if (!message) { - this.message = 'All host(s) tried for query failed.'; - if (innerErrors) { - const hostList = Object.keys(innerErrors); - if (hostList.length > 0) { - const host = hostList[0]; - this.message += util.format(' First host tried, %s: %s. See innerErrors.', host, innerErrors[host]); - } - } - } -} - -util.inherits(NoHostAvailableError, DriverError); - -/** - * Represents an error message from the server - * @param {Number} code Cassandra exception code - * @param {String} message - * @constructor - */ -function ResponseError(code, message) { - DriverError.call(this, message); - /** - * The error code as defined in [responseErrorCodes]{@link module:types~responseErrorCodes}. - * @type {Number} - */ - this.code = code; - this.info = 'Represents an error message from the server'; -} - -util.inherits(ResponseError, DriverError); - -/** - * Represents a bug inside the driver or in a Cassandra host. - * @param {String} message - * @constructor - */ -function DriverInternalError(message) { - DriverError.call(this, message); - this.info = 'Represents a bug inside the driver or in a Cassandra host.'; -} - -util.inherits(DriverInternalError, DriverError); - -/** - * Represents an error when trying to authenticate with auth-enabled host - * @param {String} message - * @constructor - */ -function AuthenticationError(message) { - DriverError.call(this, message); - this.info = 'Represents an authentication error from the driver or from a Cassandra node.'; -} - -util.inherits(AuthenticationError, DriverError); - -/** - * Represents an error that is raised when one of the arguments provided to a method is not valid - * @param {String} message - * @constructor - */ -function ArgumentError(message) { - DriverError.call(this, message); - this.info = 'Represents an error that is raised when one of the arguments provided to a method is not valid.'; -} - -util.inherits(ArgumentError, DriverError); - -/** - * Represents a client-side error that is raised when the client didn't hear back from the server within - * {@link ClientOptions.socketOptions.readTimeout}. - * @param {String} message The error message. - * @param {String} [host] Address of the server host that caused the operation to time out. - * @constructor - */ -function OperationTimedOutError(message, host) { - DriverError.call(this, message, this.constructor); - this.info = 'Represents a client-side error that is raised when the client did not hear back from the server ' + - 'within socketOptions.readTimeout'; - - /** - * When defined, it gets the address of the host that caused the operation to time out. - * @type {String|undefined} - */ - this.host = host; -} - -util.inherits(OperationTimedOutError, DriverError); - -/** - * Represents an error that is raised when a feature is not supported in the driver or in the current Cassandra version. - * @param message - * @constructor - */ -function NotSupportedError(message) { - DriverError.call(this, message, this.constructor); - this.info = 'Represents a feature that is not supported in the driver or in the Cassandra version.'; -} - -util.inherits(NotSupportedError, DriverError); - -/** - * Represents a client-side error indicating that all connections to a certain host have reached - * the maximum amount of in-flight requests supported. - * @param {String} address - * @param {Number} maxRequestsPerConnection - * @param {Number} connectionLength - * @constructor - */ -function BusyConnectionError(address, maxRequestsPerConnection, connectionLength) { - const message = util.format('All connections to host %s are busy, %d requests are in-flight on %s', - address, maxRequestsPerConnection, connectionLength === 1 ? 'a single connection': 'each connection'); - DriverError.call(this, message, this.constructor); - this.info = 'Represents a client-side error indicating that all connections to a certain host have reached ' + - 'the maximum amount of in-flight requests supported (pooling.maxRequestsPerConnection)'; -} - -util.inherits(BusyConnectionError, DriverError); - -/** - * - * @param {Long} long - */ -function VIntOutOfRangeException(long){ - const message = `Value ${long.toString} is out of range for a JavaScript Number`; - DriverError.call(this, message, this.constructor); - this.info = 'Represents a run-time exception when attempting to decode a vint and the JavaScript Number doesn\'t have enough space to fit the value that was decoded'; -} - -util.inherits(VIntOutOfRangeException, DriverError); - -exports.ArgumentError = ArgumentError; -exports.AuthenticationError = AuthenticationError; -exports.BusyConnectionError = BusyConnectionError; -exports.DriverError = DriverError; -exports.OperationTimedOutError = OperationTimedOutError; -exports.DriverInternalError = DriverInternalError; -exports.NoHostAvailableError = NoHostAvailableError; -exports.NotSupportedError = NotSupportedError; -exports.ResponseError = ResponseError; -exports.VIntOutOfRangeException = VIntOutOfRangeException; \ No newline at end of file diff --git a/lib/errors.ts b/lib/errors.ts new file mode 100644 index 000000000..6b420227f --- /dev/null +++ b/lib/errors.ts @@ -0,0 +1,252 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import Long from "long"; +import util from "util"; +import type { consistencies } from "./types"; + +/** + * Contains the error classes exposed by the driver. + * @module errors + */ + +//TODO: most of the properties were not exposed. I think we should expose them all. +/** + * Base Error + */ +class DriverError extends Error { + info: string; + isSocketError: boolean; + innerError: any; + requestNotWritten?: boolean; + + constructor(message: string) { + super(message); + Error.captureStackTrace(this, this.constructor); + this.name = this.constructor.name; + this.info = 'Cassandra Driver Error'; + this.message = message; + } +} + +/** + * Represents an error when a query cannot be performed because no host is available or could be reached by the driver. + */ +class NoHostAvailableError extends DriverError { + innerErrors: object; + + /** + * Represents an error when a query cannot be performed because no host is available or could be reached by the driver. + * @param {Object} innerErrors An object map containing the error per host tried + * @param {String} [message] + * @constructor + */ + constructor(innerErrors: object, message?: string) { + super(message); + this.innerErrors = innerErrors; + this.info = 'Represents an error when a query cannot be performed because no host is available or could be reached by the driver.'; + if (!message) { + this.message = 'All host(s) tried for query failed.'; + if (innerErrors) { + const hostList = Object.keys(innerErrors); + if (hostList.length > 0) { + const host = hostList[0]; + this.message += util.format(' First host tried, %s: %s. See innerErrors.', host, innerErrors[host]); + } + } + } + } +} + +/** + * Represents an error message from the server + */ +class ResponseError extends DriverError { + code: number; + consistencies?: consistencies; + required?: number; + alive?: number; + received?: number; + blockFor?: number; + failures?: number; + reasons?: object; + isDataPresent?: any; + writeType?: any; + queryId?: any; + keyspace?: string; + functionName?: string; + argTypes?: string[]; + table?: string; + + /** + * Represents an error message from the server + * @param {Number} code Cassandra exception code + * @param {String} message + * @constructor + */ + constructor(code: number, message: string) { + super(message); + this.code = code; + this.info = 'Represents an error message from the server'; + } +} + +/** + * Represents a bug inside the driver or in a Cassandra host. + */ +class DriverInternalError extends DriverError { + /** + * Represents a bug inside the driver or in a Cassandra host. + * @param {String} message + * @constructor + */ + constructor(message: string) { + super(message); + this.info = 'Represents a bug inside the driver or in a Cassandra host.'; + } +} + +/** + * Represents an error when trying to authenticate with auth-enabled host + */ +class AuthenticationError extends DriverError { + additionalInfo: ResponseError; + /** + * Represents an error when trying to authenticate with auth-enabled host + * @param {String} message + * @constructor + */ + constructor(message: string) { + super(message); + this.info = 'Represents an authentication error from the driver or from a Cassandra node.'; + } +} + +/** + * Represents an error that is raised when one of the arguments provided to a method is not valid + */ +class ArgumentError extends DriverError { + /** + * Represents an error that is raised when one of the arguments provided to a method is not valid + * @param {String} message + * @constructor + */ + constructor(message: string) { + super(message); + this.info = 'Represents an error that is raised when one of the arguments provided to a method is not valid.'; + } +} + +/** + * Represents a client-side error that is raised when the client didn't hear back from the server within + * {@link ClientOptions.socketOptions.readTimeout}. + */ +class OperationTimedOutError extends DriverError { + host?: string; + + /** + * Represents a client-side error that is raised when the client didn't hear back from the server within + * {@link ClientOptions.socketOptions.readTimeout}. + * @param {String} message The error message. + * @param {String} [host] Address of the server host that caused the operation to time out. + * @constructor + */ + constructor(message: string, host?: string) { + super(message); + this.info = 'Represents a client-side error that is raised when the client did not hear back from the server ' + + 'within socketOptions.readTimeout'; + /** + * When defined, it gets the address of the host that caused the operation to time out. + * @type {String|undefined} + */ + this.host = host; + } +} + +/** + * Represents an error that is raised when a feature is not supported in the driver or in the current Cassandra version. + */ +class NotSupportedError extends DriverError { + + /** + * Represents an error that is raised when a feature is not supported in the driver or in the current Cassandra version. + * @param message + * @constructor + */ + constructor(message: string) { + super(message); + this.info = 'Represents a feature that is not supported in the driver or in the Cassandra version.'; + } +} + +/** + * Represents a client-side error indicating that all connections to a certain host have reached + * the maximum amount of in-flight requests supported. + */ +class BusyConnectionError extends DriverError { + /** + * Represents a client-side error indicating that all connections to a certain host have reached + * the maximum amount of in-flight requests supported. + * @param {String} address + * @param {Number} maxRequestsPerConnection + * @param {Number} connectionLength + * @constructor + */ + constructor(address: string, maxRequestsPerConnection: number, connectionLength: number) { + const message = util.format('All connections to host %s are busy, %d requests are in-flight on %s', + address, maxRequestsPerConnection, connectionLength === 1 ? 'a single connection' : 'each connection'); + super(message); + this.info = 'Represents a client-side error indicating that all connections to a certain host have reached ' + + 'the maximum amount of in-flight requests supported (pooling.maxRequestsPerConnection)'; + } +} + +/** + * Represents a run-time exception when attempting to decode a vint and the JavaScript Number doesn't have enough space to fit the value that was decoded + */ +class VIntOutOfRangeException extends DriverError { + /** + * Represents a run-time exception when attempting to decode a vint and the JavaScript Number doesn't have enough space to fit the value that was decoded + * @param {Long} long + */ + constructor(long: Long) { + const message = `Value ${long.toString()} is out of range for a JavaScript Number`; + super(message); + this.info = 'Represents a run-time exception when attempting to decode a vint and the JavaScript Number doesn\'t have enough space to fit the value that was decoded'; + } +} + +export default { + ArgumentError, + AuthenticationError, + BusyConnectionError, + DriverError, + OperationTimedOutError, + DriverInternalError, + NoHostAvailableError, + NotSupportedError, + ResponseError, + VIntOutOfRangeException +}; + +export { + ArgumentError, + AuthenticationError, + BusyConnectionError, + DriverError, DriverInternalError, + NoHostAvailableError, + NotSupportedError, OperationTimedOutError, ResponseError, + VIntOutOfRangeException +}; diff --git a/lib/execution-options.js b/lib/execution-options.ts similarity index 77% rename from lib/execution-options.js rename to lib/execution-options.ts index cc3df181f..c48324704 100644 --- a/lib/execution-options.js +++ b/lib/execution-options.ts @@ -13,12 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import type Client from "./client"; +import type { QueryOptions } from "./client"; +import errors from "./errors"; +import { ExecutionProfile } from "./execution-profile"; +import type { Host } from "./host"; +import { LoadBalancingPolicy } from "./policies/load-balancing"; +import { RetryPolicy } from "./policies/retry"; +import types, { type consistencies, Long } from "./types/index"; +import utils from "./utils"; -'use strict'; - -const utils = require('./utils'); -const types = require('./types'); -const errors = require('./errors'); const proxyExecuteKey = 'ProxyExecute'; @@ -40,10 +45,10 @@ class ExecutionOptions { /** * Creates an empty instance, where all methods return undefined, used internally. - * @ignore + * @ignore @internal * @return {ExecutionOptions} */ - static empty() { + static empty(): ExecutionOptions { return new ExecutionOptions(); } @@ -52,8 +57,8 @@ class ExecutionOptions { * @abstract * @returns {Boolean} */ - getCaptureStackTrace() { - + getCaptureStackTrace(): boolean { + return undefined; } /** @@ -61,18 +66,18 @@ class ExecutionOptions { * @abstract * @returns {Number} */ - getConsistency() { - + getConsistency(): consistencies { + return undefined; } /** * Key-value payload to be passed to the server. On the server side, implementations of QueryHandler can use * this data. * @abstract - * @returns {Object} + * @returns {{ [key: string]: any }} */ - getCustomPayload() { - + getCustomPayload(): { [key: string]: any } { + return undefined; } /** @@ -80,8 +85,8 @@ class ExecutionOptions { * @abstract * @returns {Number} */ - getFetchSize() { - + getFetchSize(): number { + return undefined; } /** @@ -89,17 +94,17 @@ class ExecutionOptions { * gets the host that should handle the query. * @returns {Host} */ - getFixedHost() { - + getFixedHost(): Host { + return undefined; } /** * Gets the type hints for parameters given in the query, ordered as for the parameters. * @abstract - * @returns {Array|Array} + * @returns {string[] | string[][]} */ - getHints() { - + getHints(): string[] | string[][] { + return undefined; } /** @@ -110,8 +115,8 @@ class ExecutionOptions { * @abstract * @returns {Boolean} */ - isAutoPage() { - + isAutoPage(): boolean { + return undefined; } /** @@ -120,8 +125,8 @@ class ExecutionOptions { * @abstract * @returns {Boolean} A Boolean value, it can't be undefined. */ - isBatchCounter() { - + isBatchCounter(): boolean { + return undefined; } /** @@ -130,8 +135,8 @@ class ExecutionOptions { * @abstract * @returns {Boolean} A Boolean value, it can't be undefined. */ - isBatchLogged() { - + isBatchLogged(): boolean { + return undefined; } /** @@ -140,8 +145,8 @@ class ExecutionOptions { * @abstract * @returns {Boolean} */ - isIdempotent() { - + isIdempotent(): boolean { + return undefined; } /** @@ -149,8 +154,8 @@ class ExecutionOptions { * @abstract * @returns {Boolean} A Boolean value, it can't be undefined. */ - isPrepared() { - + isPrepared(): boolean { + return undefined; } /** @@ -158,8 +163,8 @@ class ExecutionOptions { * @abstract * @returns {Boolean} */ - isQueryTracing() { - + isQueryTracing(): boolean { + return undefined; } /** @@ -172,16 +177,16 @@ class ExecutionOptions { * @abstract * @returns {String} */ - getKeyspace() { - + getKeyspace(): string { + return undefined; } /** * Gets the load balancing policy used for this execution. * @returns {LoadBalancingPolicy} A LoadBalancingPolicy instance, it can't be undefined. */ - getLoadBalancingPolicy() { - + getLoadBalancingPolicy(): LoadBalancingPolicy { + return undefined; } /** @@ -189,25 +194,25 @@ class ExecutionOptions { * @abstract * @returns {Buffer} */ - getPageState() { - + getPageState(): Buffer { + return undefined; } /** * Internal method that gets the preferred host. * @abstract - * @ignore + * @ignore @internal */ - getPreferredHost() { - + getPreferredHost(): Host { + return undefined; } /** * Gets the query options as provided to the execution method without setting the default values. * @returns {QueryOptions} */ - getRawQueryOptions() { - + getRawQueryOptions(): QueryOptions { + return undefined; } /** @@ -218,8 +223,8 @@ class ExecutionOptions { * @abstract * @returns {Number} */ - getReadTimeout() { - + getReadTimeout(): number { + return undefined; } /** @@ -227,36 +232,36 @@ class ExecutionOptions { * @abstract * @returns {RetryPolicy} A RetryPolicy instance, it can't be undefined. */ - getRetryPolicy() { - + getRetryPolicy(): RetryPolicy { + return undefined; } /** * Internal method to obtain the row callback, for "by row" results. * @abstract - * @ignore + * @ignore @internal */ getRowCallback() { - + return undefined; } /** * Internal method to get or generate a timestamp for the request execution. - * @ignore + * @ignore @internal * @returns {Long|null} */ - getOrGenerateTimestamp() { - + getOrGenerateTimestamp(): Long | null { + return undefined; } /** * Gets the index of the parameters that are part of the partition key to determine the routing. * @abstract - * @ignore + * @ignore @internal * @returns {Array} */ - getRoutingIndexes() { - + getRoutingIndexes(): Array { + return undefined; } /** @@ -264,27 +269,27 @@ class ExecutionOptions { * @abstract * @returns {Buffer|Array} */ - getRoutingKey() { - + getRoutingKey(): Buffer | Array { + return undefined; } /** * Gets the array of the parameters names that are part of the partition key to determine the * routing. Only valid for non-prepared requests. * @abstract - * @ignore + * @ignore @internal */ getRoutingNames() { - + return undefined; } /** * Gets the the consistency level to be used for the serial phase of conditional updates. * @abstract - * @returns {Number} + * @returns {consistencies} */ - getSerialConsistency() { - + getSerialConsistency(): consistencies { + return undefined; } /** @@ -293,79 +298,92 @@ class ExecutionOptions { * @abstract * @returns {Number|Long|undefined|null} */ - getTimestamp() { - + getTimestamp(): number | Long | undefined | null { + return undefined; } + //TODO: was exposed in .d.t.s. Are we removing it? /** * @param {Array} hints * @abstract - * @ignore + * @ignore @internal */ - setHints(hints) { - + setHints(hints: string[]) { + return undefined; } /** * Sets the keyspace for the execution. - * @ignore + * @ignore @internal * @abstract * @param {String} keyspace */ - setKeyspace(keyspace) { - + setKeyspace(keyspace: string) { + return undefined; } /** * @abstract - * @ignore + * @ignore @internal */ - setPageState() { - + setPageState(pageState: Buffer) { + return undefined; } /** * Internal method that sets the preferred host. * @abstract - * @ignore + * @ignore @internal */ - setPreferredHost() { - + setPreferredHost(host: Host) { + return undefined; } /** * Sets the index of the parameters that are part of the partition key to determine the routing. * @param {Array} routingIndexes * @abstract - * @ignore + * @ignore @internal */ - setRoutingIndexes(routingIndexes) { - + setRoutingIndexes(routingIndexes: Array) { + return undefined; } /** * Sets the routing key. * @abstract - * @ignore + * @ignore @internal */ setRoutingKey(value) { - + return undefined; } } /** * Internal implementation of {@link ExecutionOptions} that uses the value from the client options and execution * profile into account. - * @ignore + * @ignore @internal */ class DefaultExecutionOptions extends ExecutionOptions { + protected _queryOptions: QueryOptions; + protected _rowCallback: Function; + protected _routingKey: any; + protected _hints: any; + protected _keyspace: any; + protected _routingIndexes: any; + protected _pageState: any; + protected _preferredHost: Host; + protected _client: Client; + protected _defaultQueryOptions: QueryOptions; + protected _profile: ExecutionProfile; + protected _customPayload: object; /** * Creates a new instance of {@link ExecutionOptions}. * @param {QueryOptions} queryOptions * @param {Client} client * @param {Function|null} rowCallback */ - constructor(queryOptions, client, rowCallback) { + constructor(queryOptions: QueryOptions, client: Client, rowCallback: Function | null) { super(); this._queryOptions = queryOptions; @@ -396,7 +414,7 @@ class DefaultExecutionOptions extends ExecutionOptions { * @param {QueryOptions} defaultQueryOptions * @private */ - static createCustomPayload(userOptions, defaultQueryOptions) { + static createCustomPayload(userOptions: QueryOptions, defaultQueryOptions: QueryOptions) { let customPayload = userOptions.customPayload || defaultQueryOptions.customPayload; const executeAs = userOptions.executeAs || defaultQueryOptions.executeAs; @@ -419,10 +437,10 @@ class DefaultExecutionOptions extends ExecutionOptions { * @param {QueryOptions|null} queryOptions * @param {Client} client * @param {Function|null} [rowCallback] - * @ignore + * @ignore @internal * @return {ExecutionOptions} */ - static create(queryOptions, client, rowCallback) { + static create(queryOptions: QueryOptions | null, client: Client, rowCallback?: Function | null): ExecutionOptions { if (!queryOptions || typeof queryOptions === 'function') { // queryOptions can be null/undefined and could be of type function when is an optional parameter queryOptions = utils.emptyObject; @@ -475,7 +493,7 @@ class DefaultExecutionOptions extends ExecutionOptions { * Determines if the query execution must be prepared beforehand. * @return {Boolean} */ - isPrepared() { + isPrepared(): boolean { return ifUndefined(this._queryOptions.prepare, this._defaultQueryOptions.prepare); } @@ -547,7 +565,7 @@ class DefaultExecutionOptions extends ExecutionOptions { /** * Internal method to obtain the row callback, for "by row" results. - * @ignore + * @ignore @internal */ getRowCallback() { return this._rowCallback; @@ -568,35 +586,35 @@ class DefaultExecutionOptions extends ExecutionOptions { * @internal * @param {Object} payload */ - setCustomPayload(payload) { + setCustomPayload(payload: object) { this._customPayload = payload; } /** * @param {Array} hints */ - setHints(hints) { + setHints(hints: Array) { this._hints = hints; } /** * @param {String} keyspace */ - setKeyspace(keyspace) { + setKeyspace(keyspace: string) { this._keyspace = keyspace; } /** * @param {Buffer} pageState */ - setPageState(pageState) { + setPageState(pageState: Buffer) { this._pageState = pageState; } /** * @param {Array} routingIndexes */ - setRoutingIndexes(routingIndexes) { + setRoutingIndexes(routingIndexes: Array) { this._routingIndexes = routingIndexes; } @@ -616,4 +634,4 @@ function ifUndefined3(v1, v2, v3) { return v2 !== undefined ? v2 : v3; } -module.exports = { ExecutionOptions, DefaultExecutionOptions, proxyExecuteKey }; \ No newline at end of file +export { DefaultExecutionOptions, ExecutionOptions, proxyExecuteKey }; diff --git a/lib/execution-profile.js b/lib/execution-profile.js deleted file mode 100644 index 78c4fc2b2..000000000 --- a/lib/execution-profile.js +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -const utils = require('./utils'); -const types = require('./types'); -const promiseUtils = require('./promise-utils'); - -/** - * Creates a new instance of {@link ExecutionProfile}. - * @classdesc - * Represents a set configurations to be used in a statement execution to be used for a single {@link Client} instance. - *

- * An {@link ExecutionProfile} instance should not be shared across different {@link Client} instances. - *

- * @param {String} name Name of the execution profile. - *

- * Use 'default' to specify that the new instance should be the default {@link ExecutionProfile} if no - * profile is specified in the execution. - *

- * @param {Object} [options] Profile options, when any of the options is not specified the {@link Client} will the use - * the ones defined in the default profile. - * @param {Number} [options.consistency] The consistency level to use for this profile. - * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. - * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. - * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. - * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. - * @param {Object} [options.graphOptions] - * @param {String} [options.graphOptions.language] The graph language to use for graph queries. - *

- * Note that this setting should normally be undefined or set by a utility method and it's not expected - * to be defined manually by the user. - *

- * @param {String} [options.graphOptions.results] The protocol to use for serializing and deserializing graph results. - *

- * Note that this setting should normally be undefined or set by a utility method and it's not expected - * to be defined manually by the user. - *

- * @param {String} [options.graphOptions.name] The graph name to use for graph queries. - * @param {Number} [options.graphOptions.readConsistency] The consistency level to use for graph read queries. - * @param {String} [options.graphOptions.source] The graph traversal source name to use for graph queries. - * @param {Number} [options.graphOptions.writeConsistency] The consistency level to use for graph write queries. - * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. - * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. - * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. - * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. - * @example - * const { Client, ExecutionProfile } = require('cassandra-driver'); - * const client = new Client({ - * contactPoints: ['host1', 'host2'], - * profiles: [ - * new ExecutionProfile('metrics-oltp', { - * consistency: consistency.localQuorum, - * retry: myRetryPolicy - * }) - * ] - * }); - * - * client.execute(query, params, { executionProfile: 'metrics-oltp' }, callback); - * @constructor - */ -function ExecutionProfile(name, options) { - if (typeof name !== 'string') { - throw new TypeError('Execution profile name must be a string'); - } - options = options || utils.emptyObject; - const graphOptions = options.graphOptions || utils.emptyObject; - /** - * Name of the execution profile. - * @type {String} - */ - this.name = name; - /** - * Consistency level. - * @type {Number} - */ - this.consistency = options.consistency; - /** - * Load-balancing policy - * @type {LoadBalancingPolicy} - */ - this.loadBalancing = options.loadBalancing; - /** - * Client read timeout. - * @type {Number} - */ - this.readTimeout = options.readTimeout; - /** - * Retry policy. - * @type {RetryPolicy} - */ - this.retry = options.retry; - /** - * Serial consistency level. - * @type {Number} - */ - this.serialConsistency = options.serialConsistency; - /** - * The graph options for this profile. - * @type {Object} - * @property {String} language The graph language. - * @property {String} name The graph name. - * @property {String} readConsistency The consistency to use for graph write queries. - * @property {String} source The graph traversal source. - * @property {String} writeConsistency The consistency to use for graph write queries. - */ - this.graphOptions = { - language: graphOptions.language, - results: graphOptions.results, - name: graphOptions.name, - readConsistency: graphOptions.readConsistency, - source: graphOptions.source, - writeConsistency: graphOptions.writeConsistency - }; -} - -/** - * Contains the logic to handle the different execution profiles of a {@link Client}. - * @ignore - */ -class ProfileManager { - - /** - * @param {ClientOptions} options - */ - constructor(options) { - this._profiles = options.profiles || []; - this._defaultConfiguredRetryPolicy = undefined; - this._setDefault(options); - // A array of unique load balancing policies - this._loadBalancingPolicies = []; - // A dictionary of name keys and profile values - this._profilesMap = {}; - // A dictionary of name keys and custom payload dictionaries as values - this._customPayloadCache = {}; - // A dictionary of name keys and graph options as values - this._graphOptionsCache = {}; - this._profiles.forEach(function (p) { - this._profilesMap[p.name] = p; - // Set required properties - p.loadBalancing = p.loadBalancing || this._defaultProfile.loadBalancing; - // Using array indexOf is not very efficient (O(n)) but the amount of profiles should be limited - // and a handful of load-balancing policies (no hashcode for load-Balancing policies) - if (this._loadBalancingPolicies.indexOf(p.loadBalancing) === -1) { - this._loadBalancingPolicies.push(p.loadBalancing); - } - return p; - }, this); - } - - /** - * @param {Client} client - * @param {HostMap} hosts - */ - async init(client, hosts) { - for (const lbp of this._loadBalancingPolicies) { - await promiseUtils.fromCallback(callback => lbp.init(client, hosts, callback)); - } - } - - /** - * Uses the load-balancing policies to get the relative distance to the host and return the closest one. - * @param {Host} host - */ - getDistance(host) { - let distance = types.distance.ignored; - // this is performance critical: we can't use any other language features than for-loop :( - for (let i = 0; i < this._loadBalancingPolicies.length; i++) { - const d = this._loadBalancingPolicies[i].getDistance(host); - if (d < distance) { - distance = d; - if (distance === types.distance.local) { - break; - } - } - } - - host.setDistance(distance); - return distance; - } - - /** - * @param {String|ExecutionProfile} name - * @returns {ExecutionProfile|undefined} It returns the execution profile by name or the default profile when name is - * undefined. It returns undefined when the profile does not exist. - */ - getProfile(name) { - if (name instanceof ExecutionProfile) { - return name; - } - return this._profilesMap[name || 'default']; - } - - /** @returns {ExecutionProfile} */ - getDefault() { - return this._defaultProfile; - } - - /** @returns {LoadBalancingPolicy} */ - getDefaultLoadBalancing() { - return this._defaultProfile.loadBalancing; - } - - /** - * Gets the cached default graph options for a given profile. If it doesn't exist, it creates new options using the - * handler and inserts it into the cache - * @param {ExecutionProfile} profile - * @param {Function} createHandler - */ - getOrCreateGraphOptions(profile, createHandler) { - let graphOptions = this._graphOptionsCache[profile.name]; - if (!graphOptions) { - graphOptions = (this._graphOptionsCache[profile.name] = createHandler()); - } - return graphOptions; - } - - /** - * @private - * @param {ClientOptions} options - */ - _setDefault(options) { - this._defaultProfile = this._profiles.filter(function (p) { return p.name === 'default'; })[0]; - if (!this._defaultProfile) { - this._profiles.push(this._defaultProfile = new ExecutionProfile('default')); - } - - // Store the default configured retry policy - this._defaultConfiguredRetryPolicy = this._defaultProfile.retry; - - // Set the required properties - this._defaultProfile.loadBalancing = this._defaultProfile.loadBalancing || options.policies.loadBalancing; - this._defaultProfile.retry = this._defaultProfile.retry || options.policies.retry; - } - - /** - * Gets all the execution profiles currently defined. - * @returns {Array.} - */ - getAll() { - return this._profiles; - } - - getDefaultConfiguredRetryPolicy() { - return this._defaultConfiguredRetryPolicy; - } -} - -module.exports = { - ProfileManager, - ExecutionProfile -}; \ No newline at end of file diff --git a/lib/execution-profile.ts b/lib/execution-profile.ts new file mode 100644 index 000000000..158d3cfe2 --- /dev/null +++ b/lib/execution-profile.ts @@ -0,0 +1,366 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import type Client from "./client"; +import type { ClientOptions } from "./client"; +import type { Host, HostMap } from "./host"; +import { LoadBalancingPolicy } from "./policies/load-balancing"; +import { RetryPolicy } from "./policies/retry"; +import promiseUtils from "./promise-utils"; +import types, { consistencies } from "./types/index"; +import utils from "./utils"; + + + +/** + * @classdesc + * Represents a set configurations to be used in a statement execution to be used for a single {@link Client} instance. + *

+ * An {@link ExecutionProfile} instance should not be shared across different {@link Client} instances. + *

+ * @example + * const { Client, ExecutionProfile } = require('cassandra-driver'); + * const client = new Client({ + * contactPoints: ['host1', 'host2'], + * profiles: [ + * new ExecutionProfile('metrics-oltp', { + * consistency: consistency.localQuorum, + * retry: myRetryPolicy + * }) + * ] + * }); + * + * client.execute(query, params, { executionProfile: 'metrics-oltp' }, callback); + */ +class ExecutionProfile { + /** + * Consistency level. + * @type {Number} + */ + consistency?: consistencies; + /** + * Load-balancing policy + * @type {LoadBalancingPolicy} + */ + loadBalancing?: LoadBalancingPolicy; + /** + * Name of the execution profile. + * @type {String} + */ + name: string; + /** + * Client read timeout. + * @type {Number} + */ + readTimeout?: number; + /** + * Retry policy. + * @type {RetryPolicy} + */ + retry?: RetryPolicy; + /** + * Serial consistency level. + * @type {Number} + */ + serialConsistency?: consistencies; + /** + * The graph options for this profile. + * @type {Object} + * @property {String} language The graph language. + * @property {String} name The graph name. + * @property {String} readConsistency The consistency to use for graph write queries. + * @property {String} source The graph traversal source. + * @property {String} writeConsistency The consistency to use for graph write queries. + */ + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: consistencies; + writeConsistency?: consistencies; + /** @internal */ + results?: any; + }; + + /** + * Creates a new instance of {@link ExecutionProfile}. + * Represents a set configurations to be used in a statement execution to be used for a single {@link Client} instance. + *

+ * An {@link ExecutionProfile} instance should not be shared across different {@link Client} instances. + *

+ * @param {String} name Name of the execution profile. + *

+ * Use 'default' to specify that the new instance should be the default {@link ExecutionProfile} if no + * profile is specified in the execution. + *

+ * @param {Object} [options] Profile options, when any of the options is not specified the {@link Client} will the use + * the ones defined in the default profile. + * @param {Number} [options.consistency] The consistency level to use for this profile. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * @param {Object} [options.graphOptions] + * @param {String} [options.graphOptions.language] The graph language to use for graph queries. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.results] The protocol to use for serializing and deserializing graph results. + *

+ * Note that this setting should normally be undefined or set by a utility method and it's not expected + * to be defined manually by the user. + *

+ * @param {String} [options.graphOptions.name] The graph name to use for graph queries. + * @param {Number} [options.graphOptions.readConsistency] The consistency level to use for graph read queries. + * @param {String} [options.graphOptions.source] The graph traversal source name to use for graph queries. + * @param {Number} [options.graphOptions.writeConsistency] The consistency level to use for graph write queries. + * @param {LoadBalancingPolicy} [options.loadBalancing] The load-balancing policy to use for this profile. + * @param {Number} [options.readTimeout] The client per-host request timeout to use for this profile. + * @param {RetryPolicy} [options.retry] The retry policy to use for this profile. + * @param {Number} [options.serialConsistency] The serial consistency level to use for this profile. + * @example + * const { Client, ExecutionProfile } = require('cassandra-driver'); + * const client = new Client({ + * contactPoints: ['host1', 'host2'], + * profiles: [ + * new ExecutionProfile('metrics-oltp', { + * consistency: consistency.localQuorum, + * retry: myRetryPolicy + * }) + * ] + * }); + * + * client.execute(query, params, { executionProfile: 'metrics-oltp' }, callback); + * @constructor + */ + constructor(name: string, options?: { + consistency?: consistencies; + loadBalancing?: LoadBalancingPolicy; + readTimeout?: number; + retry?: RetryPolicy; + serialConsistency?: consistencies; + graphOptions?: { + name?: string; + language?: string; + source?: string; + readConsistency?: consistencies; + writeConsistency?: consistencies; + }; + }){ + if (typeof name !== 'string') { + throw new TypeError('Execution profile name must be a string'); + } + options = options || utils.emptyObject; + const graphOptions : typeof options.graphOptions = options.graphOptions || utils.emptyObject; + /** + * Name of the execution profile. + * @type {String} + */ + this.name = name; + /** + * Consistency level. + * @type {Number} + */ + this.consistency = options.consistency; + /** + * Load-balancing policy + * @type {LoadBalancingPolicy} + */ + this.loadBalancing = options.loadBalancing; + /** + * Client read timeout. + * @type {Number} + */ + this.readTimeout = options.readTimeout; + /** + * Retry policy. + * @type {RetryPolicy} + */ + this.retry = options.retry; + /** + * Serial consistency level. + * @type {Number} + */ + this.serialConsistency = options.serialConsistency; + /** + * The graph options for this profile. + * @type {Object} + * @property {String} language The graph language. + * @property {String} name The graph name. + * @property {String} readConsistency The consistency to use for graph write queries. + * @property {String} source The graph traversal source. + * @property {String} writeConsistency The consistency to use for graph write queries. + */ + this.graphOptions = { + language: graphOptions.language, + // What's the use of this results? + // @ts-ignore + results: graphOptions.results, + name: graphOptions.name, + readConsistency: graphOptions.readConsistency, + source: graphOptions.source, + writeConsistency: graphOptions.writeConsistency + }; + } +} + +/** + * Contains the logic to handle the different execution profiles of a {@link Client}. + * @ignore @internal + */ +class ProfileManager { + private _profiles: any; + private _defaultConfiguredRetryPolicy: any; + private _loadBalancingPolicies: any[]; + private _profilesMap: Record; + private _customPayloadCache: Record>; + private _graphOptionsCache: Record; + private _defaultProfile: ExecutionProfile; + + /** + * @param {ClientOptions} options + */ + constructor(options: ClientOptions) { + this._profiles = options.profiles || []; + this._defaultConfiguredRetryPolicy = undefined; + this._setDefault(options); + // A array of unique load balancing policies + this._loadBalancingPolicies = []; + // A dictionary of name keys and profile values + this._profilesMap = {}; + // A dictionary of name keys and custom payload dictionaries as values + this._customPayloadCache = {}; + // A dictionary of name keys and graph options as values + this._graphOptionsCache = {}; + this._profiles.forEach(function (p) { + this._profilesMap[p.name] = p; + // Set required properties + p.loadBalancing = p.loadBalancing || this._defaultProfile.loadBalancing; + // Using array indexOf is not very efficient (O(n)) but the amount of profiles should be limited + // and a handful of load-balancing policies (no hashcode for load-Balancing policies) + if (this._loadBalancingPolicies.indexOf(p.loadBalancing) === -1) { + this._loadBalancingPolicies.push(p.loadBalancing); + } + return p; + }, this); + } + + /** + * @param {Client} client + * @param {HostMap} hosts + */ + async init(client: Client, hosts: HostMap) { + for (const lbp of this._loadBalancingPolicies) { + await promiseUtils.fromCallback(callback => lbp.init(client, hosts, callback)); + } + } + + /** + * Uses the load-balancing policies to get the relative distance to the host and return the closest one. + * @param {Host} host + */ + getDistance(host: Host) { + let distance = types.distance.ignored; + // this is performance critical: we can't use any other language features than for-loop :( + for (let i = 0; i < this._loadBalancingPolicies.length; i++) { + const d = this._loadBalancingPolicies[i].getDistance(host); + if (d < distance) { + distance = d; + if (distance === types.distance.local) { + break; + } + } + } + + host.setDistance(distance); + return distance; + } + + /** + * @param {String|ExecutionProfile} name + * @returns {ExecutionProfile|undefined} It returns the execution profile by name or the default profile when name is + * undefined. It returns undefined when the profile does not exist. + */ + getProfile(name: string | ExecutionProfile): ExecutionProfile | undefined { + if (name instanceof ExecutionProfile) { + return name; + } + return this._profilesMap[name || 'default']; + } + + /** @returns {ExecutionProfile} */ + getDefault(): ExecutionProfile { + return this._defaultProfile; + } + + /** @returns {LoadBalancingPolicy} */ + getDefaultLoadBalancing(): LoadBalancingPolicy { + return this._defaultProfile.loadBalancing; + } + + /** + * Gets the cached default graph options for a given profile. If it doesn't exist, it creates new options using the + * handler and inserts it into the cache + * @param {ExecutionProfile} profile + * @param {Function} createHandler + */ + getOrCreateGraphOptions(profile: ExecutionProfile, createHandler: Function) { + let graphOptions = this._graphOptionsCache[profile.name]; + if (!graphOptions) { + graphOptions = (this._graphOptionsCache[profile.name] = createHandler()); + } + return graphOptions; + } + + /** + * @private + * @param {ClientOptions} options + */ + _setDefault(options: ClientOptions) { + this._defaultProfile = this._profiles.filter(function (p) { return p.name === 'default'; })[0]; + if (!this._defaultProfile) { + this._profiles.push(this._defaultProfile = new ExecutionProfile('default')); + } + + // Store the default configured retry policy + this._defaultConfiguredRetryPolicy = this._defaultProfile.retry; + + // Set the required properties + this._defaultProfile.loadBalancing = this._defaultProfile.loadBalancing || options.policies.loadBalancing; + this._defaultProfile.retry = this._defaultProfile.retry || options.policies.retry; + } + + /** + * Gets all the execution profiles currently defined. + * @returns {Array.} + */ + getAll(): Array { + return this._profiles; + } + + getDefaultConfiguredRetryPolicy() { + return this._defaultConfiguredRetryPolicy; + } +} + +export { + ExecutionProfile, ProfileManager +}; + +export default { + ProfileManager, + ExecutionProfile +}; \ No newline at end of file diff --git a/lib/geometry/geometry.js b/lib/geometry/geometry.js deleted file mode 100644 index b91c8fdbb..000000000 --- a/lib/geometry/geometry.js +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const endianness = { - '0': 'BE', - '1': 'LE' -}; - -function Geometry() { - -} - -Geometry.types = { - Point2D: 1, - LineString: 2, - Polygon: 3 -}; - -/** - * @protected - * @param {Number} code - * @returns {String} - * @ignore - */ -Geometry.getEndianness = function (code) { - const value = endianness[code.toString()]; - if (typeof value === 'undefined') { - throw new TypeError('Invalid endianness with code ' + code); - } - return value; -}; - -/** - * Reads an int32 from binary representation based on endianness. - * @protected - * @param {Buffer} buffer - * @param {String} endianness - * @param {Number} offset - * @returns Number - * @ignore - */ -Geometry.readInt32 = function (buffer, endianness, offset) { - if (endianness === 'BE') { - return buffer.readInt32BE(offset, true); - } - return buffer.readInt32LE(offset, true); -}; - -/** - * Reads an 64-bit double from binary representation based on endianness. - * @protected - * @param {Buffer} buffer - * @param {String} endianness - * @param {Number} offset - * @returns Number - * @ignore - */ -Geometry.readDouble = function (buffer, endianness, offset) { - if (endianness === 'BE') { - return buffer.readDoubleBE(offset, true); - } - return buffer.readDoubleLE(offset, true); -}; - -/** - * Writes an 32-bit integer to binary representation based on OS endianness. - * @protected - * @param {Number} val - * @param {Buffer} buffer - * @param {Number} offset - * @ignore - */ -Geometry.prototype.writeInt32 = function (val, buffer, offset) { - if (this.useBESerialization()) { - return buffer.writeInt32BE(val, offset, true); - } - return buffer.writeInt32LE(val, offset, true); -}; - -/** - * Writes an 64-bit double to binary representation based on OS endianness. - * @protected - * @param {Number} val - * @param {Buffer} buffer - * @param {Number} offset - * @ignore - */ -Geometry.prototype.writeDouble = function (val, buffer, offset) { - if (this.useBESerialization()) { - return buffer.writeDoubleBE(val, offset, true); - } - return buffer.writeDoubleLE(val, offset, true); -}; - -/** - * Writes an 8-bit int that represents the OS endianness. - * @protected - * @param {Buffer} buffer - * @param {Number} offset - * @ignore - */ -Geometry.prototype.writeEndianness = function (buffer, offset) { - if (this.useBESerialization()) { - return buffer.writeInt8(0, offset, true); - } - return buffer.writeInt8(1, offset, true); -}; - -/** - * Returns true if the serialization must be done in big-endian format. - * Designed to allow injection of OS endianness. - * @abstract - * @ignore - */ -Geometry.prototype.useBESerialization = function () { - throw new Error('Not Implemented'); -}; - -module.exports = Geometry; \ No newline at end of file diff --git a/lib/geometry/geometry.ts b/lib/geometry/geometry.ts new file mode 100644 index 000000000..63027ac19 --- /dev/null +++ b/lib/geometry/geometry.ts @@ -0,0 +1,135 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const endianness = { + '0': 'BE', + '1': 'LE' +}; + +class Geometry { + static types = { + Point2D: 1, + LineString: 2, + Polygon: 3 + } as const; + + /** + * @protected + * @param {Number} code + * @returns {String} + * @ignore @internal + */ + static getEndianness(code: number): string { + const value = endianness[code.toString()]; + if (typeof value === 'undefined') { + throw new TypeError('Invalid endianness with code ' + code); + } + return value; + } + + /** + * Reads an int32 from binary representation based on endianness. + * @protected + * @param {Buffer} buffer + * @param {String} endianness + * @param {Number} offset + * @returns Number + * @ignore @internal + */ + static readInt32(buffer: Buffer, endianness: string, offset: number): number { + if (endianness === 'BE') { + // Old Node.js versions, e.g. v8, has buf.readInt32BE(offset[, noAssert]) + // Newer versions do not have the noAssert parameter anymore + return buffer.readInt32BE(offset); + } + return buffer.readInt32LE(offset); + } + + /** + * Reads a 64-bit double from binary representation based on endianness. + * @protected + * @param {Buffer} buffer + * @param {String} endianness + * @param {Number} offset + * @returns Number + * @ignore @internal + */ + static readDouble(buffer: Buffer, endianness: string, offset: number): number { + if (endianness === 'BE') { + return buffer.readDoubleBE(offset); + } + return buffer.readDoubleLE(offset); + } + + /** + * Writes a 32-bit integer to binary representation based on OS endianness. + * @protected + * @param {Number} val + * @param {Buffer} buffer + * @param {Number} offset + * @ignore @internal + */ + writeInt32(val: number, buffer: Buffer, offset: number): void { + if (this.useBESerialization()) { + buffer.writeInt32BE(val, offset); + } else { + buffer.writeInt32LE(val, offset); + } + } + + /** + * Writes a 64-bit double to binary representation based on OS endianness. + * @protected + * @param {Number} val + * @param {Buffer} buffer + * @param {Number} offset + * @ignore @internal + */ + writeDouble(val: number, buffer: Buffer, offset: number): void { + if (this.useBESerialization()) { + buffer.writeDoubleBE(val, offset); + } else { + buffer.writeDoubleLE(val, offset); + } + } + + /** + * Writes an 8-bit int that represents the OS endianness. + * @protected + * @param {Buffer} buffer + * @param {Number} offset + * @ignore @internal + */ + writeEndianness(buffer: Buffer, offset: number): void { + if (this.useBESerialization()) { + buffer.writeInt8(0, offset); + } else { + buffer.writeInt8(1, offset); + } + } + + /** + * Returns true if the serialization must be done in big-endian format. + * Designed to allow injection of OS endianness. + * @abstract + * @ignore @internal + */ + useBESerialization(): boolean { + throw new Error('Not Implemented'); + } +} + +export default Geometry; \ No newline at end of file diff --git a/lib/geometry/index.d.ts b/lib/geometry/index.d.ts deleted file mode 100644 index 82ad292ba..000000000 --- a/lib/geometry/index.d.ts +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -export namespace geometry { - class LineString { - constructor(...args: Point[]); - - static fromBuffer(buffer: Buffer): LineString; - - static fromString(textValue: string): LineString; - - equals(other: LineString): boolean; - - toBuffer(): Buffer; - - toJSON(): string; - - toString(): string; - - } - - class Point { - constructor(x: number, y: number); - - static fromBuffer(buffer: Buffer): Point; - - static fromString(textValue: string): Point; - - equals(other: Point): boolean; - - toBuffer(): Buffer; - - toJSON(): string; - - toString(): string; - - } - - class Polygon { - constructor(...args: Point[]); - - static fromBuffer(buffer: Buffer): Polygon; - - static fromString(textValue: string): Polygon; - - equals(other: Polygon): boolean; - - toBuffer(): Buffer; - - toJSON(): string; - - toString(): string; - } -} \ No newline at end of file diff --git a/lib/geometry/index.js b/lib/geometry/index.ts similarity index 75% rename from lib/geometry/index.js rename to lib/geometry/index.ts index 28c9bc7b6..67feb5ee5 100644 --- a/lib/geometry/index.js +++ b/lib/geometry/index.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; + /** * Geometry module. @@ -24,7 +24,18 @@ * @module geometry */ -exports.Geometry = require('./geometry'); -exports.LineString = require('./line-string'); -exports.Point = require('./point'); -exports.Polygon = require('./polygon'); \ No newline at end of file +import Geometry from './geometry'; +import LineString from './line-string'; +import Point from './point'; +import Polygon from './polygon'; + +export default { + Point, + LineString, + Polygon, + Geometry +}; + +export { + Geometry, LineString, Point, Polygon +}; diff --git a/lib/geometry/line-string.js b/lib/geometry/line-string.js deleted file mode 100644 index 3e7c8ee25..000000000 --- a/lib/geometry/line-string.js +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const utils = require('../utils'); -const Geometry = require('./geometry'); -const Point = require('./point'); - -/** - * Creates a new {@link LineString} instance. - * @classdesc - * A LineString is a one-dimensional object representing a sequence of points and the line segments connecting them. - * @param {...Point}[point] A sequence of [Point]{@link module:geometry~Point} items as arguments. - * @example - * new LineString(new Point(10.99, 20.02), new Point(14, 26), new Point(34, 1.2)); - * @constructor - * @alias module:geometry~LineString - * @extends {Geometry} - */ -function LineString(point) { - let points = Array.prototype.slice.call(arguments); - if (points.length === 1 && Array.isArray(points) && Array.isArray(points[0])) { - //The first argument is an array of the points - points = points[0]; - } - if (points.length === 1) { - throw new TypeError('LineString can be either empty or contain 2 or more points'); - } - /** - * Returns a frozen Array of points that represent the line. - * @type {Array.} - */ - this.points = Object.freeze(points); -} - -//noinspection JSCheckFunctionSignatures -util.inherits(LineString, Geometry); - -/** - * Creates a {@link LineString} instance from - * a Well-known Text (WKT) - * representation of a line. - * @param {Buffer} buffer - * @returns {LineString} - */ -LineString.fromBuffer = function (buffer) { - if (!buffer || buffer.length < 9) { - throw new TypeError('A linestring buffer should contain at least 9 bytes'); - } - const endianness = Geometry.getEndianness(buffer.readInt8(0, true)); - let offset = 1; - if (Geometry.readInt32(buffer, endianness, offset) !== Geometry.types.LineString) { - throw new TypeError('Binary representation was not a LineString'); - } - offset += 4; - const length = Geometry.readInt32(buffer, endianness, offset); - offset += 4; - if (buffer.length !== offset + length * 16) { - throw new TypeError(util.format('Length of the buffer does not match %d !== %d', buffer.length, offset + length * 8)); - } - const points = new Array(length); - for (let i = 0; i < length; i++) { - points[i] = new Point( - Geometry.readDouble(buffer, endianness, offset), - Geometry.readDouble(buffer, endianness, offset + 8)); - offset += 16; - } - //noinspection JSCheckFunctionSignatures - return new LineString(points); -}; - -/** - * Creates a {@link LineString} instance from - * a Well-known Text (WKT) - * representation of a line. - * @param {String} textValue - * @returns {LineString} - */ -LineString.fromString = function (textValue) { - const wktRegex = /^LINESTRING ?\(([-0-9. ,]+)\)+$/g; - const matches = wktRegex.exec(textValue); - if (!matches || matches.length !== 2) { - throw new TypeError('Invalid WKT: ' + textValue); - } - const points = LineString.parseSegments(matches[1]); - return new LineString(points); -}; - -/** - * Internal method that parses a series of WKT points. - * @param {String} textValue - * @returns {Array} - * @internal - * @ignore - */ -LineString.parseSegments = function (textValue) { - const points = []; - const pointParts = textValue.split(','); - for (let i = 0; i < pointParts.length; i++) { - const p = pointParts[i].trim(); - if (p.length === 0) { - throw new TypeError('Invalid WKT segment: ' + textValue); - } - const xyText = p.split(' ').filter(function (element) { - return (element.trim().length > 0); - }); - if (xyText.length !== 2) { - throw new TypeError('Invalid WKT segment: ' + textValue); - } - points.push(new Point(parseFloat(xyText[0]), parseFloat(xyText[1]))); - } - return points; -}; - -/** - * Returns a Well-known Binary (WKB) - * representation of this instance. - * @returns {Buffer} - */ -LineString.prototype.toBuffer = function () { - const buffer = utils.allocBufferUnsafe(9 + this.points.length * 16); - this.writeEndianness(buffer, 0); - let offset = 1; - this.writeInt32(Geometry.types.LineString, buffer, offset); - offset += 4; - this.writeInt32(this.points.length, buffer, offset); - offset += 4; - this.points.forEach(function (p) { - this.writeDouble(p.x, buffer, offset); - this.writeDouble(p.y, buffer, offset + 8); - offset += 16; - }, this); - return buffer; -}; - -/** - * Returns true if the values of the linestrings are the same, otherwise it returns false. - * @param {LineString} other - * @returns {Boolean} - */ -LineString.prototype.equals = function (other) { - if (!(other instanceof LineString)) { - return false; - } - if (this.points.length !== other.points.length) { - return false; - } - for (let i = 0; i < this.points.length; i++) { - if (!this.points[i].equals(other.points[i])) { - return false; - } - } - return true; -}; - -/** - * Returns Well-known text (WKT) representation of the geometry object. - * @returns {String} - */ -LineString.prototype.toString = function () { - if (this.points.length === 0) { - return 'LINESTRING EMPTY'; - } - return 'LINESTRING (' - + this.points.map(function (p) { - return p.x + ' ' + p.y; - }).join(', ') - + ')'; -}; - -LineString.prototype.useBESerialization = function () { - return false; -}; - -/** - * Returns a JSON representation of this geo-spatial type. - */ -LineString.prototype.toJSON = function () { - return { type: 'LineString', coordinates: this.points.map(function (p) { - return [p.x, p.y]; - })}; -}; - -module.exports = LineString; \ No newline at end of file diff --git a/lib/geometry/line-string.ts b/lib/geometry/line-string.ts new file mode 100644 index 000000000..e5c5dcd81 --- /dev/null +++ b/lib/geometry/line-string.ts @@ -0,0 +1,203 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import util from "util"; +import utils from "../utils"; +import Geometry from "./geometry"; +import Point from "./point"; + +/** + * @classdesc + * A LineString is a one-dimensional object representing a sequence of points and the line segments connecting them. + * @example + * new LineString(new Point(10.99, 20.02), new Point(14, 26), new Point(34, 1.2)); + * @alias module:geometry~LineString + * @extends {Geometry} + */ +class LineString extends Geometry { + /** @internal */ + points: ReadonlyArray; + + /** + * Creates a new {@link LineString} instance. + * @param {...Point} points A sequence of {@link Point} items as arguments. + */ + constructor(...points: Point[] | Point[][]) { + super(); + if (points.length === 1 && Array.isArray(points) && Array.isArray(points[0])) { + //The first argument is an array of the points + points = points[0]; + } + if (points.length === 1) { + throw new TypeError('LineString can be either empty or contain 2 or more points'); + } + /** + * Returns a frozen Array of points that represent the line. + * @type {Array.} + */ + this.points = Object.freeze(points as Point[]); + } + + /** + * Creates a {@link LineString} instance from + * a Well-known Text (WKT) + * representation of a line. + * @param {Buffer} buffer + * @returns {LineString} + */ + static fromBuffer(buffer: Buffer): LineString { + if (!buffer || buffer.length < 9) { + throw new TypeError('A linestring buffer should contain at least 9 bytes'); + } + const endianness = Geometry.getEndianness(buffer.readInt8(0)); + let offset = 1; + if (Geometry.readInt32(buffer, endianness, offset) !== Geometry.types.LineString) { + throw new TypeError('Binary representation was not a LineString'); + } + offset += 4; + const length = Geometry.readInt32(buffer, endianness, offset); + offset += 4; + if (buffer.length !== offset + length * 16) { + throw new TypeError(util.format('Length of the buffer does not match %d !== %d', buffer.length, offset + length * 8)); + } + const points = new Array(length); + for (let i = 0; i < length; i++) { + points[i] = new Point( + Geometry.readDouble(buffer, endianness, offset), + Geometry.readDouble(buffer, endianness, offset + 8)); + offset += 16; + } + //noinspection JSCheckFunctionSignatures + return new LineString(points); + } + + /** + * Creates a {@link LineString} instance from + * a Well-known Text (WKT) + * representation of a line. + * @param {String} textValue + * @returns {LineString} + */ + static fromString(textValue: string): LineString { + const wktRegex = /^LINESTRING ?\(([-0-9. ,]+)\)+$/g; + const matches = wktRegex.exec(textValue); + if (!matches || matches.length !== 2) { + throw new TypeError("Invalid WKT: " + textValue); + } + const points = LineString.parseSegments(matches[1]); + return new LineString(points); + } + + /** + * Internal method that parses a series of WKT points. + * @param {String} textValue + * @returns {Array} + * @internal + * @ignore + */ + static parseSegments(textValue: string): Point[] { + const points: Point[] = []; + const pointParts = textValue.split(","); + for (const part of pointParts) { + const p = part.trim(); + if (p.length === 0) { + throw new TypeError("Invalid WKT segment: " + textValue); + } + const xyText = p.split(" ").filter((element) => element.trim().length > 0); + if (xyText.length !== 2) { + throw new TypeError("Invalid WKT segment: " + textValue); + } + points.push(new Point(parseFloat(xyText[0]), parseFloat(xyText[1]))); + } + return points; + } + + /** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ + toBuffer(): Buffer { + const buffer = utils.allocBufferUnsafe(9 + this.points.length * 16); + this.writeEndianness(buffer, 0); + let offset = 1; + this.writeInt32(Geometry.types.LineString, buffer, offset); + offset += 4; + this.writeInt32(this.points.length, buffer, offset); + offset += 4; + this.points.forEach((p) => { + this.writeDouble(p.x, buffer, offset); + this.writeDouble(p.y, buffer, offset + 8); + offset += 16; + }); + return buffer; + } + + /** + * Returns true if the values of the linestrings are the same, otherwise it returns false. + * @param {LineString} other + * @returns {Boolean} + */ + equals(other: LineString): boolean { + if (!(other instanceof LineString)) { + return false; + } + if (this.points.length !== other.points.length) { + return false; + } + for (let i = 0; i < this.points.length; i++) { + if (!this.points[i].equals(other.points[i])) { + return false; + } + } + return true; + } + + /** + * Returns Well-known Text (WKT) representation of the geometry object. + * @returns {String} + */ + toString(): string { + if (this.points.length === 0) { + return 'LINESTRING EMPTY'; + } + return 'LINESTRING (' + + this.points.map(function (p) { + return p.x + ' ' + p.y; + }).join(', ') + + ')'; + } + + /** + * Returns false to indicate little-endian serialization. + * @internal + * @returns {Boolean} + */ + useBESerialization(): boolean { + return false; + } + + //TODO: it was exposed as toJSON(): string; But it clearly returns an object + /** + * Returns a JSON representation of this geo-spatial type. + */ + toJSON(): object { + return { type: 'LineString', coordinates: this.points.map(function (p) { + return [p.x, p.y]; + })}; + } +} + +export default LineString; \ No newline at end of file diff --git a/lib/geometry/point.js b/lib/geometry/point.js deleted file mode 100644 index 473b6cd1d..000000000 --- a/lib/geometry/point.js +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const utils = require('../utils'); -const Geometry = require('./geometry'); - -/** - * Creates a new {@link Point} instance. - * @classdesc - * A Point is a zero-dimensional object that represents a specific (X,Y) - * location in a two-dimensional XY-Plane. In case of Geographic Coordinate - * Systems, the X coordinate is the longitude and the Y is the latitude. - * @param {Number} x The X coordinate. - * @param {Number} y The Y coordinate. - * @extends {Geometry} - * @alias module:geometry~Point - * @constructor - */ -function Point(x, y) { - if (typeof x !== 'number' || typeof y !== 'number') { - throw new TypeError('X and Y must be numbers'); - } - if (isNaN(x) || isNaN(y)) { - throw new TypeError('X and Y must be numbers'); - } - /** - * Returns the X coordinate of this 2D point. - * @type {Number} - */ - this.x = x; - /** - * Returns the Y coordinate of this 2D point. - * @type {Number} - */ - this.y = y; -} - -//noinspection JSCheckFunctionSignatures -util.inherits(Point, Geometry); - -/** - * Creates a {@link Point} instance from - * a Well-known Text (WKT) - * representation of a 2D point. - * @param {Buffer} buffer - * @returns {Point} - */ -Point.fromBuffer = function (buffer) { - if (!buffer || buffer.length !== 21) { - throw new TypeError('2D Point buffer should contain 21 bytes'); - } - const endianness = Geometry.getEndianness(buffer.readInt8(0, true)); - if (Geometry.readInt32(buffer, endianness, 1) !== Geometry.types.Point2D) { - throw new TypeError('Binary representation was not a point'); - } - return new Point(Geometry.readDouble(buffer, endianness, 5), Geometry.readDouble(buffer, endianness, 13)); -}; - -/** - * Creates a {@link Point} instance from - * a Well-known Text (WKT) - * representation of a 2D point. - * @param {String} textValue - * @returns {Point} - */ -Point.fromString = function (textValue) { - const wktRegex = /^POINT\s?\(([-0-9.]+) ([-0-9.]+)\)$/g; - const matches = wktRegex.exec(textValue); - if (!matches || matches.length !== 3) { - throw new TypeError('2D Point WTK should contain 2 coordinates'); - } - return new Point(parseFloat(matches[1]), parseFloat(matches[2])); -}; - -/** - * Returns a Well-known Binary (WKB) - * representation of this instance. - * @returns {Buffer} - */ -Point.prototype.toBuffer = function () { - const buffer = utils.allocBufferUnsafe(21); - this.writeEndianness(buffer, 0); - this.writeInt32(Geometry.types.Point2D, buffer, 1); - this.writeDouble(this.x, buffer, 5); - this.writeDouble(this.y, buffer, 13); - return buffer; -}; - -/** - * Returns true if the values of the point are the same, otherwise it returns false. - * @param {Point} other - * @returns {Boolean} - */ -Point.prototype.equals = function (other) { - if (!(other instanceof Point)) { - return false; - } - return (this.x === other.x && this.y === other.y); -}; - -/** - * Returns Well-known text (WKT) representation of the geometry object. - * @returns {String} - */ -Point.prototype.toString = function () { - return util.format('POINT (%d %d)', this.x, this.y); -}; - -Point.prototype.useBESerialization = function () { - return false; -}; - -/** - * Returns a JSON representation of this geo-spatial type. - */ -Point.prototype.toJSON = function () { - return { type: 'Point', coordinates: [ this.x, this.y ]}; -}; - -module.exports = Point; \ No newline at end of file diff --git a/lib/geometry/point.ts b/lib/geometry/point.ts new file mode 100644 index 000000000..f6ab10c70 --- /dev/null +++ b/lib/geometry/point.ts @@ -0,0 +1,142 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import util from "util"; +import utils from "../utils"; +import Geometry from "./geometry"; + +/** + * @classdesc + * A Point is a zero-dimensional object that represents a specific (X,Y) + * location in a two-dimensional XY-Plane. In case of Geographic Coordinate + * Systems, the X coordinate is the longitude and the Y is the latitude. + * @extends {Geometry} + * @alias module:geometry~Point + */ +class Point extends Geometry { + /** @internal */ + x: number; + /** @internal */ + y: number; + + /** + * Creates a new {@link Point} instance. + * @param {Number} x The X coordinate. + * @param {Number} y The Y coordinate. + */ + constructor(x: number, y: number) { + super(); + if (typeof x !== 'number' || typeof y !== 'number') { + throw new TypeError('X and Y must be numbers'); + } + if (isNaN(x) || isNaN(y)) { + throw new TypeError('X and Y must be numbers'); + } + /** + * Returns the X coordinate of this 2D point. + * @type {Number} + */ + this.x = x; + /** + * Returns the Y coordinate of this 2D point. + * @type {Number} + */ + this.y = y; + } + + /** + * Creates a {@link Point} instance from + * a Well-known Text (WKT) + * representation of a 2D point. + * @param {Buffer} buffer + * @returns {Point} + */ + static fromBuffer(buffer: Buffer): Point { + if (!buffer || buffer.length !== 21) { + throw new TypeError('2D Point buffer should contain 21 bytes'); + } + const endianness = Geometry.getEndianness(buffer.readInt8(0)); + if (Geometry.readInt32(buffer, endianness, 1) !== Geometry.types.Point2D) { + throw new TypeError('Binary representation was not a point'); + } + return new Point(Geometry.readDouble(buffer, endianness, 5), Geometry.readDouble(buffer, endianness, 13)); + } + + /** + * Creates a {@link Point} instance from + * a Well-known Text (WKT) + * representation of a 2D point. + * @param {String} textValue + * @returns {Point} + */ + static fromString(textValue: string): Point { + const wktRegex = /^POINT\s?\(([-0-9.]+) ([-0-9.]+)\)$/g; + const matches = wktRegex.exec(textValue); + if (!matches || matches.length !== 3) { + throw new TypeError("2D Point WKT should contain 2 coordinates"); + } + return new Point(parseFloat(matches[1]), parseFloat(matches[2])); + } + + /** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ + toBuffer(): Buffer { + const buffer = utils.allocBufferUnsafe(21); + this.writeEndianness(buffer, 0); + this.writeInt32(Geometry.types.Point2D, buffer, 1); + this.writeDouble(this.x, buffer, 5); + this.writeDouble(this.y, buffer, 13); + return buffer; + } + + /** + * Returns true if the values of the point are the same, otherwise it returns false. + * @param {Point} other + * @returns {Boolean} + */ + equals(other: Point): boolean { + if (!(other instanceof Point)) { + return false; + } + return this.x === other.x && this.y === other.y; + } + + /** + * Returns Well-known Text (WKT) representation of the geometry object. + * @returns {String} + */ + toString(): string { + return util.format("POINT (%d %d)", this.x, this.y); + } + + /** @internal */ + useBESerialization(): boolean { + return false; + } + + //TODO: exposed as toJSON(): string;, but clearly returning object + /** + * Returns a JSON representation of this geo-spatial type. + * @returns {Object} + */ + toJSON(): object { + return { type: "Point", coordinates: [this.x, this.y] }; + } +} + +export default Point; \ No newline at end of file diff --git a/lib/geometry/polygon.js b/lib/geometry/polygon.js deleted file mode 100644 index c4536e118..000000000 --- a/lib/geometry/polygon.js +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const utils = require('../utils'); -const Geometry = require('./geometry'); -const Point = require('./point'); -const LineString = require('./line-string'); - -/** - * Creates a new {@link Polygon} instance. - * @classdesc - * Represents is a plane geometry figure that is bounded by a finite chain of straight line segments closing in a loop - * to form a closed chain or circuit. - * @param {...Array.}[ringPoints] A sequence of Array of [Point]{@link module:geometry~Point} items as arguments - * representing the rings of the polygon. - * @example - * new Polygon([ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ]); - * @example - * //polygon with a hole - * new Polygon( - * [ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ], - * [ new Point(25, 20), new Point(30, 30), new Point(20, 20), new Point(25, 20) ] - * ); - * @alias module:geometry~Polygon - * @constructor - */ -function Polygon(ringPoints) { - const rings = Array.prototype.slice.call(arguments); - /** - * Returns a frozen Array of array of points that represent the different rings in the polygon. - * @type {Array} - */ - this.rings = Object.freeze(rings); -} - -//noinspection JSCheckFunctionSignatures -util.inherits(Polygon, Geometry); - -/** - * Creates a {@link Polygon} instance from - * a Well-known Text (WKT) - * representation of a polygon. - * @param {Buffer} buffer - * @returns {Polygon} - */ -Polygon.fromBuffer = function (buffer) { - if (!buffer || buffer.length < 9) { - throw new TypeError('A Polygon buffer should contain at least 9 bytes'); - } - const endianness = Geometry.getEndianness(buffer.readInt8(0, true)); - let offset = 1; - if (Geometry.readInt32(buffer, endianness, offset) !== Geometry.types.Polygon) { - throw new TypeError('Binary representation was not a Polygon'); - } - offset += 4; - const ringsLength = Geometry.readInt32(buffer, endianness, offset); - offset += 4; - const ringsArray = new Array(ringsLength); - for (let ringIndex = 0; ringIndex < ringsLength; ringIndex++) { - const pointsLength = Geometry.readInt32(buffer, endianness, offset); - offset += 4; - if (buffer.length < offset + pointsLength * 16) { - throw new TypeError(util.format('Length of the buffer does not match')); - } - const ring = new Array(pointsLength); - for (let i = 0; i < pointsLength; i++) { - ring[i] = new Point( - Geometry.readDouble(buffer, endianness, offset), - Geometry.readDouble(buffer, endianness, offset + 8)); - offset += 16; - } - ringsArray[ringIndex] = ring; - } - //Invoke the constructor with each ring as a parameter - //ringsArray.unshift(null); - //return new (Function.prototype.bind.apply(Polygon, ringsArray)); - return construct(ringsArray); -}; - -/** - * Creates a {@link Polygon} instance from - * a Well-known Text (WKT) - * representation of a shape. - * @param {String} textValue - * @returns {Polygon} - */ -Polygon.fromString = function (textValue) { - const wktRegex = /^POLYGON ?\((\(.*\))\)$/g; - const matches = wktRegex.exec(textValue); - function validateWkt(condition) { - if (condition) { - throw new TypeError('Invalid WKT: ' + textValue); - } - } - validateWkt(!matches || matches.length !== 2); - - const ringsText = matches[1]; - const ringsArray = []; - let ringStart = null; - for (let i = 0; i < ringsText.length; i++) { - const c = ringsText[i]; - if (c === '(') { - validateWkt(ringStart !== null); - ringStart = i+1; - continue; - } - if (c === ')') { - validateWkt(ringStart === null); - ringsArray.push(ringsText.substring(ringStart, i)); - ringStart = null; - continue; - } - validateWkt(ringStart === null && c !== ' ' && c !== ','); - } - return construct(ringsArray.map(LineString.parseSegments)); -}; - -/** - * Creates a new instance of Polygon with each array item as a parameter - * @private - * @param {Array>} argsArray - * @returns {Polygon} - */ -function construct(argsArray) { - function F() { - return Polygon.apply(this, argsArray); - } - F.prototype = Polygon.prototype; - return new F(); -} - -/** - * Returns a Well-known Binary (WKB) - * representation of this instance. - * @returns {Buffer} - */ -Polygon.prototype.toBuffer = function () { - let totalRingsLength = 0; - this.rings.forEach(function (ring) { - totalRingsLength += 4 + ring.length * 16; - }, this); - const buffer = utils.allocBufferUnsafe(9 + totalRingsLength); - this.writeEndianness(buffer, 0); - let offset = 1; - this.writeInt32(Geometry.types.Polygon, buffer, offset); - offset += 4; - this.writeInt32(this.rings.length, buffer, offset); - offset += 4; - this.rings.forEach(function (ring) { - this.writeInt32(ring.length, buffer, offset); - offset += 4; - ring.forEach(function (p) { - this.writeDouble(p.x, buffer, offset); - this.writeDouble(p.y, buffer, offset + 8); - offset += 16; - }, this); - }, this); - return buffer; -}; - -/** - * Returns true if the values of the polygons are the same, otherwise it returns false. - * @param {Polygon} other - * @returns {Boolean} - */ -Polygon.prototype.equals = function (other) { - if (!(other instanceof Polygon)) { - return false; - } - if (this.rings.length !== other.rings.length) { - return false; - } - for (let i = 0; i < this.rings.length; i++) { - const r1 = this.rings[i]; - const r2 = other.rings[i]; - if (r1.length !== r2.length) { - return false; - } - for (let j = 0; j < r1.length; j++) { - if (!r1[i].equals(r2[i])) { - return false; - } - } - } - return true; -}; - -Polygon.prototype.useBESerialization = function () { - return false; -}; - -/** - * Returns Well-known text (WKT) representation of the geometry object. - * @returns {String} - */ -Polygon.prototype.toString = function () { - if (this.rings.length === 0) { - return 'POLYGON EMPTY'; - } - let ringStrings = ''; - this.rings.forEach(function (r, i) { - if (i > 0) { - ringStrings += ', '; - } - ringStrings += '(' + - r.map(function (p) { - return p.x + ' ' + p.y; - }).join(', ') - + ')'; - }); - return 'POLYGON (' + ringStrings + ')'; -}; - -/** - * Returns a JSON representation of this geo-spatial type. - */ -Polygon.prototype.toJSON = function () { - return { type: 'Polygon', coordinates: this.rings.map(function (r) { - return r.map(function (p) { - return [ p.x, p.y ]; - }); - })}; -}; - -module.exports = Polygon; \ No newline at end of file diff --git a/lib/geometry/polygon.ts b/lib/geometry/polygon.ts new file mode 100644 index 000000000..0010cabc9 --- /dev/null +++ b/lib/geometry/polygon.ts @@ -0,0 +1,230 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import utils from "../utils"; +import Geometry from "./geometry"; +import LineString from "./line-string"; +import Point from "./point"; + +/** + * @classdesc + * Represents is a plane geometry figure that is bounded by a finite chain of straight line segments closing in a loop + * to form a closed chain or circuit. + * @example + * new Polygon([ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ]); + * @example + * //polygon with a hole + * new Polygon( + * [ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ], + * [ new Point(25, 20), new Point(30, 30), new Point(20, 20), new Point(25, 20) ] + * ); + * @alias module:geometry~Polygon + */ +class Polygon extends Geometry { + /** @internal */ + rings: ReadonlyArray>; + + //TODO: exposed as constructor(...args: Point[]); but clearly constructor(...args: Point[][]) + /** + * Creates a new {@link Polygon} instance. + * @param {...Array.}[ringPoints] A sequence of Array of [Point]{@link module:geometry~Point} items as arguments + * representing the rings of the polygon. + * @example + * new Polygon([ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ]); + * @example + * //polygon with a hole + * new Polygon( + * [ new Point(30, 10), new Point(40, 40), new Point(10, 20), new Point(30, 10) ], + * [ new Point(25, 20), new Point(30, 30), new Point(20, 20), new Point(25, 20) ] + * ); + * @constructor + */ + constructor(...ringPoints: Point[][]) { + super(); + this.rings = Object.freeze(ringPoints); + } + + /** + * Creates a {@link Polygon} instance from + * a Well-known Text (WKT) + * representation of a polygon. + * @param {Buffer} buffer + * @returns {Polygon} + */ + static fromBuffer(buffer: Buffer): Polygon { + if (!buffer || buffer.length < 9) { + throw new TypeError("A Polygon buffer should contain at least 9 bytes"); + } + const endianness = Geometry.getEndianness(buffer.readInt8(0)); + let offset = 1; + if (Geometry.readInt32(buffer, endianness, offset) !== Geometry.types.Polygon) { + throw new TypeError("Binary representation was not a Polygon"); + } + offset += 4; + const ringsLength = Geometry.readInt32(buffer, endianness, offset); + offset += 4; + const ringsArray : Point[][]= new Array(ringsLength); + for (let ringIndex = 0; ringIndex < ringsLength; ringIndex++) { + const pointsLength = Geometry.readInt32(buffer, endianness, offset); + offset += 4; + if (buffer.length < offset + pointsLength * 16) { + throw new TypeError("Length of the buffer does not match"); + } + const ring = new Array(pointsLength); + for (let i = 0; i < pointsLength; i++) { + ring[i] = new Point( + Geometry.readDouble(buffer, endianness, offset), + Geometry.readDouble(buffer, endianness, offset + 8) + ); + offset += 16; + } + ringsArray[ringIndex] = ring; + } + return new Polygon(...ringsArray); + } + + /** + * Creates a {@link Polygon} instance from a Well-known Text (WKT) representation. + * @param {String} textValue + * @returns {Polygon} + */ + static fromString(textValue: string): Polygon { + const wktRegex = /^POLYGON ?\((\(.*\))\)$/g; + const matches = wktRegex.exec(textValue); + function validateWkt(condition) { + if (condition) { + throw new TypeError('Invalid WKT: ' + textValue); + } + } + validateWkt(!matches || matches.length !== 2); + + const ringsText = matches[1]; + const ringsArray : string[] = []; + let ringStart = null; + for (let i = 0; i < ringsText.length; i++) { + const c = ringsText[i]; + if (c === '(') { + validateWkt(ringStart !== null); + ringStart = i+1; + continue; + } + if (c === ')') { + validateWkt(ringStart === null); + ringsArray.push(ringsText.substring(ringStart, i)); + ringStart = null; + continue; + } + validateWkt(ringStart === null && c !== ' ' && c !== ','); + } + return new Polygon(...ringsArray.map(LineString.parseSegments)); + } + + /** + * Returns a Well-known Binary (WKB) + * representation of this instance. + * @returns {Buffer} + */ + toBuffer(): Buffer { + let totalRingsLength = 0; + this.rings.forEach((ring) => { + totalRingsLength += 4 + ring.length * 16; + }); + const buffer = utils.allocBufferUnsafe(9 + totalRingsLength); + this.writeEndianness(buffer, 0); + let offset = 1; + this.writeInt32(Geometry.types.Polygon, buffer, offset); + offset += 4; + this.writeInt32(this.rings.length, buffer, offset); + offset += 4; + this.rings.forEach((ring) => { + this.writeInt32(ring.length, buffer, offset); + offset += 4; + ring.forEach((p) => { + this.writeDouble(p.x, buffer, offset); + this.writeDouble(p.y, buffer, offset + 8); + offset += 16; + }); + }); + return buffer; + } + + /** + * Returns true if the values of the polygons are the same, otherwise it returns false. + * @param {Polygon} other + * @returns {Boolean} + */ + equals(other: Polygon): boolean { + if (!(other instanceof Polygon)) { + return false; + } + if (this.rings.length !== other.rings.length) { + return false; + } + for (let i = 0; i < this.rings.length; i++) { + const r1 = this.rings[i]; + const r2 = other.rings[i]; + if (r1.length !== r2.length) { + return false; + } + for (let j = 0; j < r1.length; j++) { + if (!r1[j].equals(r2[j])) { + return false; + } + } + } + return true; + } + + /** @internal */ + useBESerialization(): boolean { + return false; + } + + /** + * Returns Well-known Text (WKT) representation of the geometry object. + * @returns {String} + */ + toString(): string { + if (this.rings.length === 0) { + return 'POLYGON EMPTY'; + } + let ringStrings = ''; + this.rings.forEach(function (r, i) { + if (i > 0) { + ringStrings += ', '; + } + ringStrings += '(' + + r.map(function (p) { + return p.x + ' ' + p.y; + }).join(', ') + + ')'; + }); + return 'POLYGON (' + ringStrings + ')'; + } + + /** + * Returns a JSON representation of this geo-spatial type. + */ + toJSON(): object { + return { type: 'Polygon', coordinates: this.rings.map(function (r) { + return r.map(function (p) { + return [ p.x, p.y ]; + }); + })}; + } +} + +export default Polygon; \ No newline at end of file diff --git a/lib/host-connection-pool.js b/lib/host-connection-pool.ts similarity index 90% rename from lib/host-connection-pool.js rename to lib/host-connection-pool.ts index f03e6981e..ffecdffb8 100644 --- a/lib/host-connection-pool.js +++ b/lib/host-connection-pool.ts @@ -13,15 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const util = require('util'); -const events = require('events'); +import events from "events"; +import util from "util"; +import clientOptions from "./client-options"; +import Connection from "./connection"; +import errors from "./errors"; +import type { Host } from "./host"; +import promiseUtils from "./promise-utils"; +import utils from "./utils"; -const Connection = require('./connection'); -const utils = require('./utils'); -const promiseUtils = require('./promise-utils'); -const errors = require('./errors'); -const clientOptions = require('./client-options'); // Used to get the index of the connection with less in-flight requests let connectionIndex = 0; @@ -53,15 +53,27 @@ const state = { /** * Represents a pool of connections to a host + * @ignore @internal */ class HostConnectionPool extends events.EventEmitter { + private _address: any; + private _newConnectionTimeout: NodeJS.Timeout; + private _state: number; + private _opening: boolean; + private _host: Host; + responseCounter: number; + options: any; + protocolVersion: number; + coreConnectionsLength: number; + connections: Connection[]; + log: (type: string, info: string, furtherInfo?: any, options?: any) => void; /** * Creates a new instance of HostConnectionPool. * @param {Host} host * @param {Number} protocolVersion Initial protocol version * @extends EventEmitter */ - constructor(host, protocolVersion) { + constructor(host: Host, protocolVersion: number) { super(); this._address = host.address; this._newConnectionTimeout = null; @@ -76,9 +88,9 @@ class HostConnectionPool extends events.EventEmitter { * An immutable array of connections * @type {Array.} */ - this.connections = utils.emptyArray; + this.connections = utils.emptyArray as Connection[]; this.setMaxListeners(0); - this.log = utils.log; + this.log = utils.log.bind(this); } getInFlight() { @@ -101,7 +113,7 @@ class HostConnectionPool extends events.EventEmitter { * @throws {Error} * @throws {BusyConnectionError} */ - borrowConnection(previousConnection) { + borrowConnection(previousConnection: Connection): Connection { if (this.connections.length === 0) { throw new Error('No connection available'); } @@ -125,7 +137,7 @@ class HostConnectionPool extends events.EventEmitter { * @param {Connection} previousConnection When provided, it will attempt to obtain a different connection. * @returns {Connection!} */ - static minInFlight(connections, maxRequests, previousConnection) { + static minInFlight(connections: Array, maxRequests: number, previousConnection: Connection): Connection { const length = connections.length; if (length === 1) { return connections[0]; @@ -168,7 +180,7 @@ class HostConnectionPool extends events.EventEmitter { * Creates all the connections in the pool and switches the keyspace of each connection if needed. * @param {string} keyspace */ - async warmup(keyspace) { + async warmup(keyspace: string) { if (this.connections.length < this.coreConnectionsLength) { while (this.connections.length < this.coreConnectionsLength) { await this._attemptNewConnection(); @@ -185,7 +197,7 @@ class HostConnectionPool extends events.EventEmitter { for (const connection of this.connections) { await connection.changeKeyspace(keyspace); } - } catch (err) { + } catch (_err) { // Log it and move on, it could be a momentary schema mismatch failure this.log('warning', `Connection(s) to host ${this._address} could not be switched to keyspace ${keyspace}`); } @@ -193,7 +205,7 @@ class HostConnectionPool extends events.EventEmitter { } /** @returns {Connection} */ - _createConnection() { + _createConnection(): Connection { const endpointOrServerName = !this.options.sni ? this._address : this._host.hostId.toString(); @@ -203,7 +215,7 @@ class HostConnectionPool extends events.EventEmitter { } /** @param {Connection} c */ - _addListeners(c) { + _addListeners(c: Connection) { c.on('responseDequeued', () => this.responseCounter++); const self = this; @@ -238,7 +250,7 @@ class HostConnectionPool extends events.EventEmitter { * If a connection is being opened, it will resolve when the existing open task completes. * @returns {Promise} */ - async _attemptNewConnection() { + async _attemptNewConnection(): Promise { if (this._opening) { // Wait for the event to fire return await promiseUtils.fromEvent(this, 'open'); @@ -304,7 +316,7 @@ class HostConnectionPool extends events.EventEmitter { * Closes the connection and removes a connection from the pool. * @param {Connection} connection */ - remove(connection) { + remove(connection: Connection) { // locating an object by position in the array is O(n), but normally there should be between 1 to 8 connections. const index = this.connections.indexOf(connection); if (index < 0) { @@ -325,7 +337,7 @@ class HostConnectionPool extends events.EventEmitter { /** * @param {Number} delay */ - scheduleNewConnectionAttempt(delay) { + scheduleNewConnectionAttempt(delay: number) { if (this.isClosing()) { return; } @@ -369,7 +381,7 @@ class HostConnectionPool extends events.EventEmitter { * Gets the amount of responses and resets the internal counter. * @returns {number} */ - getAndResetResponseCounter() { + getAndResetResponseCounter(): number { const temp = this.responseCounter; this.responseCounter = 0; return temp; @@ -400,7 +412,7 @@ class HostConnectionPool extends events.EventEmitter { const self = this; const connections = this.connections; - this.connections = utils.emptyArray; + this.connections = utils.emptyArray as Connection[]; let closedConnections = 0; this.log('info', util.format('Draining and closing %d connections to %s', connections.length, this._address)); let wasClosed = false; @@ -473,7 +485,7 @@ class HostConnectionPool extends events.EventEmitter { /** * @returns {Promise} */ - async shutdown() { + async shutdown(): Promise { this.clearNewConnectionAttempt(); if (!this.connections.length) { @@ -500,7 +512,7 @@ class HostConnectionPool extends events.EventEmitter { async _closeAllConnections() { const connections = this.connections; // point to an empty array - this.connections = utils.emptyArray; + this.connections = utils.emptyArray as Connection[]; if (connections.length === 0) { return; } @@ -519,4 +531,4 @@ function getDefaultOptions() { return defaultOptions; } -module.exports = HostConnectionPool; \ No newline at end of file +export default HostConnectionPool; \ No newline at end of file diff --git a/lib/host.js b/lib/host.ts similarity index 86% rename from lib/host.js rename to lib/host.ts index 56c128f5b..90c763d87 100644 --- a/lib/host.js +++ b/lib/host.ts @@ -13,16 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import events from "events"; +import Connection from "./connection"; +import HostConnectionPool from "./host-connection-pool"; +import PrepareHandler from "./prepare-handler"; +import promiseUtils from "./promise-utils"; +import types, { Uuid } from "./types/index"; +import utils from "./utils"; -'use strict'; - -const events = require('events'); - -const utils = require('./utils'); -const types = require('./types'); -const HostConnectionPool = require('./host-connection-pool'); -const PrepareHandler = require('./prepare-handler'); -const promiseUtils = require('./promise-utils'); const healthResponseCountInterval = 200; @@ -31,9 +29,35 @@ const healthResponseCountInterval = 200; * @extends EventEmitter */ class Host extends events.EventEmitter { + address: string; + private setDownAt: number; + private log: (type: any, info: any, furtherInfo?: any, options?: any) => void; + /** @internal */ + isUpSince: number; + /** @internal */ + pool: any; + cassandraVersion: string; + datacenter: string; + rack: string; + tokens: string[]; + hostId: Uuid; + /** @internal */ + dseVersion: string; + /** @internal */ + workloads: readonly any[]; + private _distance: number; + private _healthResponseCounter: number; + /** @internal */ + reconnectionSchedule: any; + /** @internal */ + options: any; + private reconnectionDelay: number; + private _healthResponseCountTimer: any; + private _metadata: any; /** * Creates a new Host instance. + * @internal @ignore */ constructor(address, protocolVersion, options, metadata) { super(); @@ -43,7 +67,7 @@ class Host extends events.EventEmitter { */ this.address = address; this.setDownAt = 0; - this.log = utils.log; + this.log = utils.log.bind(this); /** * Gets the timestamp of the moment when the Host was marked as UP. @@ -164,7 +188,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - setUp(clearReconnection) { + setUp(clearReconnection?: boolean) { if (!this.setDownAt) { //The host is already marked as UP return; @@ -201,7 +225,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - shutdown(waitForPending) { + shutdown(waitForPending?: boolean): Promise { if (this._healthResponseCountTimer) { clearInterval(this._healthResponseCountTimer); } @@ -217,7 +241,7 @@ class Host extends events.EventEmitter { * Determines if the node is UP now (seen as UP by the driver). * @returns {boolean} */ - isUp() { + isUp(): boolean { return !this.setDownAt; } @@ -226,7 +250,7 @@ class Host extends events.EventEmitter { * Deprecated: Use {@link Host#isUp()} instead. * @returns {boolean} */ - canBeConsideredAsUp() { + canBeConsideredAsUp(): boolean { const self = this; function hasTimePassed() { return new Date().getTime() - self.setDownAt >= self.reconnectionDelay; @@ -240,7 +264,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - setDistance(distance) { + setDistance(distance: number) { const previousDistance = this._distance; this._distance = distance || types.distance.local; if (this.options.pooling.coreConnectionsPerHost) { @@ -278,7 +302,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - setProtocolVersion(value) { + setProtocolVersion(value: number) { this.pool.protocolVersion = value; } @@ -291,7 +315,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - borrowConnection(previousConnection) { + borrowConnection(previousConnection?: Connection): Connection { return this.pool.borrowConnection(previousConnection); } @@ -301,7 +325,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - warmupPool(keyspace) { + warmupPool(keyspace: string) { return this.pool.warmup(keyspace); } @@ -319,7 +343,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - getActiveConnection() { + getActiveConnection(): Connection { if (!this.isUp() || !this.pool.connections.length) { return null; } @@ -333,7 +357,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - getResponseCount() { + getResponseCount(): number { // Last interval plus the current count return this._healthResponseCounter + this.pool.responseCounter; } @@ -344,7 +368,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - checkHealth(connection) { + checkHealth(connection: Connection) { if (connection.timedOutOperations <= this.options.socketOptions.defunctReadTimeoutThreshold) { return; } @@ -356,7 +380,7 @@ class Host extends events.EventEmitter { * @internal * @ignore */ - removeFromPool(connection) { + removeFromPool(connection: Connection) { this.pool.remove(connection); this._checkPoolState(); } @@ -376,7 +400,7 @@ class Host extends events.EventEmitter { * If the amount of connections is 0 for not ignored hosts, the host must be down. * @private */ - _checkPoolState() { + private _checkPoolState() { if (this.pool.isClosing()) { return; } @@ -398,7 +422,7 @@ class Host extends events.EventEmitter { * Executed after an scheduled new connection attempt finished * @private */ - async _onNewConnectionOpen(err) { + private async _onNewConnectionOpen(err) { if (err) { this._checkPoolState(); return; @@ -422,21 +446,22 @@ class Host extends events.EventEmitter { * position. * @returns {Array.} */ - getCassandraVersion() { + getCassandraVersion(): Array { if (!this.cassandraVersion) { - return utils.emptyArray; + return utils.emptyArray as number[]; } return this.cassandraVersion.split('-')[0].split('.').map(x => parseInt(x, 10)); } + //TODO: was not exposed. Should we? /** * Gets the DSE version of the host as an Array, containing the major version in the first position. * In case the cluster is not a DSE cluster, it returns an empty Array. - * @returns {Array} + * @returns {Array.} */ - getDseVersion() { + getDseVersion(): Array { if (!this.dseVersion) { - return utils.emptyArray; + return utils.emptyArray as number[]; } return this.dseVersion.split('-')[0].split('.').map(x => parseInt(x, 10)); } @@ -450,6 +475,10 @@ class Host extends events.EventEmitter { * @constructor */ class HostMap extends events.EventEmitter{ + private _items: Map; + private _values: any; + length: number; + /** @internal */ constructor() { super(); @@ -470,9 +499,9 @@ class HostMap extends events.EventEmitter{ /** * Executes a provided function once per map element. - * @param callback + * @param {Function} callback */ - forEach(callback) { + forEach(callback: (value: Host, key: string) => void): void{ const items = this._items; for (const [ key, value ] of items) { callback(value, key); @@ -484,7 +513,7 @@ class HostMap extends events.EventEmitter{ * @param {String} key * @returns {Host} */ - get(key) { + get(key: string): Host { return this._items.get(key); } @@ -492,7 +521,7 @@ class HostMap extends events.EventEmitter{ * Returns an array of host addresses. * @returns {Array.} */ - keys() { + keys(): Array { return Array.from(this._items.keys()); } @@ -500,8 +529,9 @@ class HostMap extends events.EventEmitter{ * Removes an item from the map. * @param {String} key The key of the host * @fires HostMap#remove + * @internal @ignore */ - remove(key) { + remove(key: string) { const value = this._items.get(key); if (value === undefined) { return; @@ -522,8 +552,9 @@ class HostMap extends events.EventEmitter{ * Removes multiple hosts from the map. * @param {Array.} keys * @fires HostMap#remove + * @ignore @internal */ - removeMultiple(keys) { + removeMultiple(keys: Array) { // Clear value cache this._values = null; @@ -552,8 +583,9 @@ class HostMap extends events.EventEmitter{ * @param {Host} value The host to be added * @fires HostMap#remove * @fires HostMap#add + * @ignore @internal */ - set(key, value) { + set(key: string, value: Host) { // Clear values cache this._values = null; @@ -581,9 +613,9 @@ class HostMap extends events.EventEmitter{ * @param {Number} [begin] * @param {Number} [end] * @returns {Array} - * @ignore + * @ignore @internal */ - slice(begin, end) { + slice(begin: number, end: number): Array { if (!begin && !end) { // Avoid making a copy of the copy return this.values(); @@ -594,7 +626,7 @@ class HostMap extends events.EventEmitter{ /** * Deprecated: Use set() instead. - * @ignore + * @ignore @internal * @deprecated */ push(k, v) { @@ -605,7 +637,7 @@ class HostMap extends events.EventEmitter{ * Returns a shallow copy of the values of the map. * @returns {Array.} */ - values() { + values(): Array { if (!this._values) { // Cache the values this._values = Object.freeze(Array.from(this._items.values())); @@ -617,8 +649,9 @@ class HostMap extends events.EventEmitter{ /** * Removes all items from the map. * @returns {Array.} The previous items + * @ignore @internal */ - clear() { + clear(): Array { const previousItems = this.values(); // Clear cache @@ -633,10 +666,12 @@ class HostMap extends events.EventEmitter{ return previousItems; } + /** @internal @ignore */ inspect() { return this._items; } + /** @internal @ignore */ toJSON() { // Node.js 10 and below don't support Object.fromEntries() if (Object.fromEntries) { @@ -652,7 +687,12 @@ class HostMap extends events.EventEmitter{ } } -module.exports = { +export { + Host, + HostMap +}; + +export default { Host, HostMap }; \ No newline at end of file diff --git a/lib/insights-client.js b/lib/insights-client.ts similarity index 88% rename from lib/insights-client.js rename to lib/insights-client.ts index 4c9207d4d..837c810c6 100644 --- a/lib/insights-client.js +++ b/lib/insights-client.ts @@ -14,19 +14,20 @@ * limitations under the License. */ -'use strict'; - -const os = require('os'); -const path = require('path'); -const fs = require('fs'); -const utils = require('./utils'); -const promiseUtils = require('./promise-utils'); -const types = require('./types'); -const requests = require('./requests'); -const { ExecutionOptions } = require('./execution-options'); -const packageInfo = require('../package.json'); -const VersionNumber = require('./types/version-number'); -const { NoAuthProvider } = require('./auth'); + +import fs from "fs"; +import os from "os"; +import path from "path"; +import packageInfo from "../package.json"; +import { NoAuthProvider } from './auth/index'; +import type Client from './client'; +import { type ClientOptions, type QueryOptions } from './client'; +import { ExecutionOptions } from "./execution-options"; +import promiseUtils from "./promise-utils"; +import requests from "./requests"; +import types from "./types/index"; +import VersionNumber from "./types/version-number"; +import utils from "./utils"; let kerberosModule; @@ -34,7 +35,7 @@ try { // eslint-disable-next-line kerberosModule = require('kerberos'); } -catch (err) { +catch (_err) { // Kerberos is an optional dependency } @@ -46,8 +47,19 @@ const maxStatusErrorLogs = 5; /** * Contains methods and functionality to send events to DSE Insights. + * @internal + * @ignore */ class InsightsClient { + private _client: Client; + private _sessionId: any; + private _enabled: boolean; + private _closed: boolean; + private _firstTimeout: NodeJS.Timeout; + private _recurrentTimeout: NodeJS.Timeout; + private _statusErrorLogs: number; + private _statusEventDelay: number; + private _errorCallback: Function; /** * Creates a new instance of the {@link InsightsClient} using the driver {@link Client}. @@ -56,7 +68,7 @@ class InsightsClient { * @param {Number} [options.statusEventDelay] * @param {Function} [options.errorCallback] */ - constructor(client, options) { + constructor(client: Client, options?: { statusEventDelay?: number; errorCallback?: Function; }) { this._client = client; this._sessionId = types.Uuid.random().toString(); this._enabled = false; @@ -76,7 +88,7 @@ class InsightsClient { * regular intervals. * @returns {undefined} */ - init() { + init(): undefined { this._enabled = this._client.options.monitorReporting.enabled && this._dseSupportsInsights(); if (!this._enabled) { return; @@ -120,7 +132,7 @@ class InsightsClient { * @returns {Promise} * @private */ - async _sendStartupEvent() { + async _sendStartupEvent(): Promise { const message = await this._getStartupMessage(); const request = new requests.QueryRequest(rpc, [message], ExecutionOptions.empty()); await this._client.controlConnection.query(request, false); @@ -131,7 +143,7 @@ class InsightsClient { * @returns {Promise} A promise that is never rejected. * @private */ - async _sendStatusEvent() { + async _sendStatusEvent(): Promise { const request = new requests.QueryRequest(rpc, [ this._getStatusEvent() ], ExecutionOptions.empty()); try { @@ -171,7 +183,7 @@ class InsightsClient { return false; } - const version = new VersionNumber(...versionArr); + const version = new VersionNumber(...(versionArr as [number, number, number])); return version.compare(minDse6Version) >= 0 || (version.compare(dse600Version) < 0 && version.compare(minDse51Version) >= 0); @@ -183,7 +195,7 @@ class InsightsClient { * @returns {Promise} Returns a json string with the startup message. * @private */ - async _getStartupMessage() { + async _getStartupMessage(): Promise { const cc = this._client.controlConnection; const options = this._client.options; @@ -257,7 +269,7 @@ class InsightsClient { _getConfigAntiPatterns() { const options = this._client.options; - const result = {}; + const result : {sslWithoutCertValidation?: string}= {}; if (options.sslOptions && !options.sslOptions.rejectUnauthorized) { result.sslWithoutCertValidation = @@ -274,7 +286,7 @@ class InsightsClient { * @returns {Array} * @private */ - _getDataCenters() { + _getDataCenters(): Array { const remoteConnectionsLength = this._client.options.pooling.coreConnectionsPerHost[types.distance.remote]; const dataCenters = new Set(); @@ -290,11 +302,11 @@ class InsightsClient { /** * Tries to obtain the application name and version from - * @param {DseClientOptions} options + * @param {ClientOptions} options * @returns {Promise} * @private */ - async _getAppInfo(options) { + async _getAppInfo(options: ClientOptions): Promise { if (typeof options.applicationName === 'string') { return Promise.resolve({ applicationName: options.applicationName, @@ -303,7 +315,7 @@ class InsightsClient { }); } - let readPromise = Promise.resolve(); + let readPromise : Promise= Promise.resolve(); if (require.main && require.main.filename) { const packageInfoPath = path.dirname(require.main.filename); @@ -322,7 +334,7 @@ class InsightsClient { applicationVersion = packageInfo.version; } } - catch (err) { + catch (_err) { // The package.json file could not be parsed // Use the default name } @@ -339,7 +351,7 @@ class InsightsClient { * @private * @returns {Promise} A Promise that will never be rejected */ - _readPackageInfoFile(packageInfoPath) { + _readPackageInfoFile(packageInfoPath): Promise { return new Promise(resolve => { fs.readFile(path.join(packageInfoPath, 'package.json'), 'utf8', (err, data) => { // Swallow error @@ -352,7 +364,7 @@ class InsightsClient { * @returns {String} Returns a json string with the startup message. * @private */ - _getStatusEvent() { + _getStatusEvent(): string { const cc = this._client.controlConnection; const options = this._client.options; const state = this._client.getState(); @@ -404,7 +416,7 @@ class InsightsClient { } } -module.exports = InsightsClient; +export default InsightsClient; function mapToObject(map) { const result = {}; @@ -451,7 +463,7 @@ function getExecutionProfiles(client) { } function setExecutionProfileProperties(client, parent, profile, defaultProfile) { - const output = parent[profile.name] = {}; + const output : QueryOptions = parent[profile.name] = {}; setExecutionProfileItem(output, profile, defaultProfile, 'readTimeout'); setExecutionProfileItem(output, profile, defaultProfile, 'loadBalancing', getPolicyInfo); setExecutionProfileItem(output, profile, defaultProfile, 'retry', getPolicyInfo); @@ -461,7 +473,7 @@ function setExecutionProfileProperties(client, parent, profile, defaultProfile) if (profile === defaultProfile) { // Speculative execution policy is included in the profiles as some drivers support // different spec exec policy per profile, in this case is fixed for all profiles - output.speculativeExecution = getPolicyInfo(client.options.policies.speculativeExecution); + output["speculativeExecution"] = getPolicyInfo(client.options.policies.speculativeExecution); } if (profile.graphOptions) { @@ -482,7 +494,7 @@ function setExecutionProfileProperties(client, parent, profile, defaultProfile) } } -function setExecutionProfileItem(output, profile, defaultProfile, prop, valueGetter) { +function setExecutionProfileItem(output, profile, defaultProfile, prop, valueGetter?) { const value = profile[prop]; valueGetter = valueGetter || (x => x); diff --git a/lib/mapping/cache.js b/lib/mapping/cache.ts similarity index 87% rename from lib/mapping/cache.js rename to lib/mapping/cache.ts index ffc4777da..98a678fc5 100644 --- a/lib/mapping/cache.js +++ b/lib/mapping/cache.ts @@ -13,16 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -'use strict'; - -const qModule = require('./q'); -const QueryOperator = qModule.QueryOperator; -const QueryAssignment = qModule.QueryAssignment; +import type { FindDocInfo, InsertDocInfo, RemoveDocInfo, UpdateDocInfo } from "."; +import { QueryAssignment, QueryOperator } from "./q"; /** * Provides utility methods for obtaining a caching keys based on the specifics of the Mapper methods. - * @ignore + * @ignore @internal */ class Cache { /** @@ -32,7 +28,7 @@ class Cache { * @param {{fields, limit, orderBy}} docInfo * @returns {Iterator} */ - static *getSelectKey(docKeys, doc, docInfo) { + static *getSelectKey(docKeys: Array, doc: object, docInfo: FindDocInfo): Iterator { yield* Cache._yieldKeyAndOperators(docKeys, doc); yield* Cache._getSelectDocInfo(docInfo); @@ -42,7 +38,7 @@ class Cache { * @param {{fields, limit, orderBy}} docInfo * @returns {Iterator} */ - static *getSelectAllKey(docInfo) { + static *getSelectAllKey(docInfo: FindDocInfo): Iterator { yield 'root'; yield* Cache._getSelectDocInfo(docInfo); @@ -53,7 +49,7 @@ class Cache { * @param {{fields, limit, orderBy}} docInfo * @private */ - static *_getSelectDocInfo(docInfo) { + private static *_getSelectDocInfo(docInfo: FindDocInfo): IterableIterator { if (docInfo) { if (docInfo.fields && docInfo.fields.length > 0) { // Use a separator from properties @@ -85,7 +81,7 @@ class Cache { * @param {{ifNotExists, ttl, fields}} docInfo * @returns {Iterator} */ - static *getInsertKey(docKeys, docInfo) { + static *getInsertKey(docKeys: Array, docInfo: InsertDocInfo): Iterator { // No operator supported on INSERT values yield* docKeys; @@ -112,7 +108,7 @@ class Cache { * @param {Object} doc * @param {{ifExists, when, ttl, fields}} docInfo */ - static *getUpdateKey(docKeys, doc, docInfo) { + static *getUpdateKey(docKeys: Array, doc: object, docInfo: UpdateDocInfo): Iterator { yield* Cache._yieldKeyAndAllQs(docKeys, doc); if (docInfo) { @@ -143,7 +139,7 @@ class Cache { * @param {{ifExists, when, fields, deleteOnlyColumns}} docInfo * @returns {Iterator} */ - static *getRemoveKey(docKeys, doc, docInfo) { + static *getRemoveKey(docKeys: Array, doc: object, docInfo: RemoveDocInfo): Iterator{ yield* Cache._yieldKeyAndOperators(docKeys, doc); if (docInfo) { @@ -204,4 +200,4 @@ class Cache { } } -module.exports = Cache; \ No newline at end of file +export default Cache; \ No newline at end of file diff --git a/lib/mapping/doc-info-adapter.js b/lib/mapping/doc-info-adapter.ts similarity index 85% rename from lib/mapping/doc-info-adapter.js rename to lib/mapping/doc-info-adapter.ts index 32deead54..66f490e85 100644 --- a/lib/mapping/doc-info-adapter.js +++ b/lib/mapping/doc-info-adapter.ts @@ -13,15 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import type { MappingExecutionOptions } from "."; +import errors from "../errors"; +import utils from "../utils"; +import type ModelMappingInfo from "./model-mapping-info"; -'use strict'; - -const errors = require('../errors'); -const utils = require('../utils'); /** * Provides utility methods to adapt and map user provided docInfo and executionOptions to a predictable object format. - * @ignore + * @ignore @internal */ class DocInfoAdapter { /** @@ -34,7 +34,7 @@ class DocInfoAdapter { * @param {ModelMappingInfo} mappingInfo * @returns {Array} */ - static getPropertiesInfo(docKeys, docInfo, doc, mappingInfo) { + static getPropertiesInfo(docKeys: Array, docInfo: { fields?; }, doc: object, mappingInfo: ModelMappingInfo): Array { let propertyKeys = docKeys; if (docInfo && docInfo.fields && docInfo.fields.length > 0) { propertyKeys = docInfo.fields; @@ -53,9 +53,9 @@ class DocInfoAdapter { * @param {ModelMappingInfo} mappingInfo * @returns {Array} */ - static adaptOrderBy(docInfo, mappingInfo){ + static adaptOrderBy(docInfo: { orderBy?; }, mappingInfo: ModelMappingInfo): Array>{ if (!docInfo || !docInfo.orderBy) { - return utils.emptyArray; + return utils.emptyArray as any[]; } return Object.keys(docInfo.orderBy).map(key => { const value = docInfo.orderBy[key]; @@ -72,7 +72,7 @@ class DocInfoAdapter { * @param {Object|String|undefined} executionOptions * @param {Boolean} isIdempotent */ - static adaptOptions(executionOptions, isIdempotent) { + static adaptOptions(executionOptions: MappingExecutionOptions, isIdempotent: boolean) { const options = { prepare: true, executionProfile: undefined, @@ -99,7 +99,7 @@ class DocInfoAdapter { * @param {Object|String|undefined} executionOptions * @param {Boolean} [overrideIdempotency] */ - static adaptAllOptions(executionOptions, overrideIdempotency) { + static adaptAllOptions(executionOptions: MappingExecutionOptions, overrideIdempotency?: boolean) { const options = { prepare: true, executionProfile: undefined, @@ -133,7 +133,7 @@ class DocInfoAdapter { * @param {Boolean} isIdempotent * @param {Boolean} isCounter */ - static adaptBatchOptions(executionOptions, isIdempotent, isCounter) { + static adaptBatchOptions(executionOptions: string | MappingExecutionOptions, isIdempotent: boolean, isCounter: boolean) { const options = { prepare: true, executionProfile: undefined, @@ -159,4 +159,4 @@ class DocInfoAdapter { } } -module.exports = DocInfoAdapter; \ No newline at end of file +export default DocInfoAdapter; \ No newline at end of file diff --git a/lib/mapping/index.d.ts b/lib/mapping/index.d.ts deleted file mode 100644 index 694e789cf..000000000 --- a/lib/mapping/index.d.ts +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { types } from '../types'; -import { Client } from '../../'; -import Long = types.Long; - -export namespace mapping { - interface TableMappings { - getColumnName(propName: string): string; - - getPropertyName(columnName: string): string; - - newObjectInstance(): any; - } - - class DefaultTableMappings implements TableMappings { - getColumnName(propName: string): string; - - getPropertyName(columnName: string): string; - - newObjectInstance(): any; - } - - class UnderscoreCqlToCamelCaseMappings implements TableMappings { - getColumnName(propName: string): string; - - getPropertyName(columnName: string): string; - - newObjectInstance(): any; - } - - interface Result extends Iterator { - wasApplied(): boolean; - - first(): T | null; - - forEach(callback: (currentValue: T, index: number) => void, thisArg?: any): void; - - toArray(): T[]; - } - - type MappingExecutionOptions = { - executionProfile?: string; - isIdempotent?: boolean; - logged?: boolean; - timestamp?: number | Long; - fetchSize?: number; - pageState?: number; - } - - interface ModelTables { - name: string; - isView: boolean; - } - - class Mapper { - constructor(client: Client, options?: MappingOptions); - - batch(items: ModelBatchItem[], executionOptions?: string | MappingExecutionOptions): Promise; - - forModel(name: string): ModelMapper; - } - - type MappingOptions = { - models: { [key: string]: ModelOptions }; - } - - type FindDocInfo = { - fields?: string[]; - orderBy?: { [key: string]: string }; - limit?: number; - } - - type InsertDocInfo = { - fields?: string[]; - ttl?: number; - ifNotExists?: boolean; - } - - type UpdateDocInfo = { - fields?: string[]; - ttl?: number; - ifExists?: boolean; - when?: { [key: string]: any }; - orderBy?: { [key: string]: string }; - limit?: number; - deleteOnlyColumns?: boolean; - } - - type RemoveDocInfo = { - fields?: string[]; - ttl?: number; - ifExists?: boolean; - when?: { [key: string]: any }; - deleteOnlyColumns?: boolean; - } - - type ModelOptions = { - tables?: string[] | ModelTables[]; - mappings?: TableMappings; - columns?: { [key: string]: string|ModelColumnOptions }; - keyspace?: string; - } - - type ModelColumnOptions = { - name: string; - toModel?: (columnValue: any) => any; - fromModel?: (modelValue: any) => any; - }; - - interface ModelBatchItem { - - } - - interface ModelBatchMapper { - insert(doc: any, docInfo?: InsertDocInfo): ModelBatchItem; - - remove(doc: any, docInfo?: RemoveDocInfo): ModelBatchItem; - - update(doc: any, docInfo?: UpdateDocInfo): ModelBatchItem; - } - - interface ModelMapper { - name: string; - batching: ModelBatchMapper; - - get(doc: { [key: string]: any }, docInfo?: { fields?: string[] }, executionOptions?: string | MappingExecutionOptions): Promise; - - find(doc: { [key: string]: any }, docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; - - findAll(docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; - - insert(doc: { [key: string]: any }, docInfo?: InsertDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; - - update(doc: { [key: string]: any }, docInfo?: UpdateDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; - - remove(doc: { [key: string]: any }, docInfo?: RemoveDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>; - - mapWithQuery( - query: string, - paramsHandler: (doc: any) => any[], - executionOptions?: string | MappingExecutionOptions - ): (doc: any, executionOptions?: string | MappingExecutionOptions) => Promise>; - } - - namespace q { - interface QueryOperator { - - } - - function in_(arr: any): QueryOperator; - - function gt(value: any): QueryOperator; - - function gte(value: any): QueryOperator; - - function lt(value: any): QueryOperator; - - function lte(value: any): QueryOperator; - - function notEq(value: any): QueryOperator; - - function and(condition1: any, condition2: any): QueryOperator; - - function incr(value: any): QueryOperator; - - function decr(value: any): QueryOperator; - - function append(value: any): QueryOperator; - - function prepend(value: any): QueryOperator; - - function remove(value: any): QueryOperator; - } -} \ No newline at end of file diff --git a/lib/mapping/index.js b/lib/mapping/index.js deleted file mode 100644 index e1de6c93b..000000000 --- a/lib/mapping/index.js +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -/** - * Module containing classes and fields related to the Mapper. - * @module mapping - */ - -exports.Mapper = require('./mapper'); -exports.ModelMapper = require('./model-mapper'); -exports.ModelBatchMapper = require('./model-batch-mapper'); -exports.ModelBatchItem = require('./model-batch-item').ModelBatchItem; -exports.Result = require('./result'); -const tableMappingsModule = require('./table-mappings'); -exports.TableMappings = tableMappingsModule.TableMappings; -exports.DefaultTableMappings = tableMappingsModule.DefaultTableMappings; -exports.UnderscoreCqlToCamelCaseMappings = tableMappingsModule.UnderscoreCqlToCamelCaseMappings; -exports.q = require('./q').q; \ No newline at end of file diff --git a/lib/mapping/index.ts b/lib/mapping/index.ts new file mode 100644 index 000000000..16d945cff --- /dev/null +++ b/lib/mapping/index.ts @@ -0,0 +1,109 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import Long from 'long'; +import Mapper from './mapper'; +import { ModelBatchItem } from './model-batch-item'; +import ModelBatchMapper from "./model-batch-mapper"; +import ModelMapper from "./model-mapper"; +import { q } from "./q"; +import Result from "./result"; +import { DefaultTableMappings, TableMappings, UnderscoreCqlToCamelCaseMappings } from "./table-mappings"; + +type MappingOptions = { + models: { [key: string]: ModelOptions }; +} + +type ModelOptions = { + tables?: string[] | ModelTables[]; + mappings?: TableMappings; + columns?: { [key: string]: string|ModelColumnOptions }; + keyspace?: string; +} + +type ModelColumnOptions = { + name: string; + toModel?: (columnValue: any) => any; + fromModel?: (modelValue: any) => any; +}; + +interface ModelTables { + name: string; + isView: boolean; +} + +type MappingExecutionOptions = { + executionProfile?: string; + isIdempotent?: boolean; + logged?: boolean; + timestamp?: number | Long; + fetchSize?: number; + pageState?: number; +} + +type FindDocInfo = { + fields?: string[]; + orderBy?: { [key: string]: string }; + limit?: number; +} + +type InsertDocInfo = { + fields?: string[]; + ttl?: number; + ifNotExists?: boolean; +} + +type UpdateDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { [key: string]: any }; + orderBy?: { [key: string]: string }; + limit?: number; + deleteOnlyColumns?: boolean; +} + +type RemoveDocInfo = { + fields?: string[]; + ttl?: number; + ifExists?: boolean; + when?: { [key: string]: any }; + deleteOnlyColumns?: boolean; +} + + +/** + * Module containing classes and fields related to the Mapper. + * @module mapping + */ + +export { + DefaultTableMappings, Mapper, ModelBatchItem, ModelBatchMapper, ModelColumnOptions, ModelMapper, ModelTables, q, Result, + TableMappings, UnderscoreCqlToCamelCaseMappings, type FindDocInfo, + type InsertDocInfo, type MappingExecutionOptions, type MappingOptions, + type ModelOptions, type RemoveDocInfo, type UpdateDocInfo +}; + +export default { + Mapper, + ModelMapper, + ModelBatchMapper, + ModelBatchItem, + Result, + TableMappings, + DefaultTableMappings, + UnderscoreCqlToCamelCaseMappings, + q +}; \ No newline at end of file diff --git a/lib/mapping/mapper.js b/lib/mapping/mapper.ts similarity index 80% rename from lib/mapping/mapper.js rename to lib/mapping/mapper.ts index 329c87f1c..323e5245f 100644 --- a/lib/mapping/mapper.js +++ b/lib/mapping/mapper.ts @@ -13,17 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import type { MappingExecutionOptions, MappingOptions } from "."; +import type Client from "../client"; +import errors from "../errors"; +import DocInfoAdapter from "./doc-info-adapter"; +import MappingHandler from "./mapping-handler"; +import { ModelBatchItem } from "./model-batch-item"; +import ModelMapper from "./model-mapper"; +import ModelMappingInfo from "./model-mapping-info"; +import Result from "./result"; +import ResultMapper from "./result-mapper"; -'use strict'; -const ModelMapper = require('./model-mapper'); -const MappingHandler = require('./mapping-handler'); -const DocInfoAdapter = require('./doc-info-adapter'); -const errors = require('../errors'); -const Result = require('./result'); -const ResultMapper = require('./result-mapper'); -const ModelMappingInfo = require('./model-mapping-info'); -const { ModelBatchItem } = require('./model-batch-item'); /** * Represents an object mapper for Apache Cassandra and DataStax Enterprise. @@ -57,13 +58,16 @@ const { ModelBatchItem } = require('./model-batch-item'); * const mapper = new Mapper(client, mappingOptions); */ class Mapper { + private client: Client; + private _modelMappingInfos: Map; + private _modelMappers: Map; /** * Creates a new instance of Mapper. * @param {Client} client The Client instance to use to execute the queries and fetch the metadata. * @param {MappingOptions} [options] The [MappingOptions]{@link module:mapping~MappingOptions} containing the * information of the models and table mappings. */ - constructor(client, options) { + constructor(client: Client, options?: MappingOptions) { if (!client) { throw new Error('client must be defined'); } @@ -84,7 +88,7 @@ class Mapper { * @param {String} name The name to identify the model. Note that the name is case-sensitive. * @returns {ModelMapper} A [ModelMapper]{@link module:mapping~ModelMapper} instance. */ - forModel(name) { + forModel(name: string): ModelMapper { let modelMapper = this._modelMappers.get(name); if (modelMapper === undefined) { @@ -134,7 +138,7 @@ class Mapper { * unix epoch (00:00:00, January 1st, 1970). * @returns {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result}. */ - batch(items, executionOptions) { + batch(items: Array, executionOptions: string | MappingExecutionOptions): Promise { if (!Array.isArray(items) || !(items.length > 0)) { return Promise.reject( new errors.ArgumentError('First parameter items should be an Array with 1 or more ModelBatchItem instances')); @@ -171,23 +175,4 @@ class Mapper { } } -/** - * Represents the mapping options. - * @typedef {Object} module:mapping~MappingOptions - * @property {Object} models An associative array containing the - * name of the model as key and the table and column information as value. - */ - -/** - * Represents a set of options that applies to a certain model. - * @typedef {Object} module:mapping~ModelOptions - * @property {Array|Array<{name, isView}>} tables An Array containing the name of the tables or An Array - * containing the name and isView property to describe the table. - * @property {TableMappings} mappings The TableMappings implementation instance that is used to convert from column - * names to property names and the other way around. - * @property {Object.} [columnNames] An associative array containing the name of the columns and - * properties that doesn't follow the convention defined in the TableMappings. - * @property {String} [keyspace] The name of the keyspace. Only mandatory when the Client is not using a keyspace. - */ - -module.exports = Mapper; \ No newline at end of file +export default Mapper; \ No newline at end of file diff --git a/lib/mapping/mapping-handler.js b/lib/mapping/mapping-handler.ts similarity index 87% rename from lib/mapping/mapping-handler.js rename to lib/mapping/mapping-handler.ts index f5bd83177..ae4dcda31 100644 --- a/lib/mapping/mapping-handler.js +++ b/lib/mapping/mapping-handler.ts @@ -13,29 +13,33 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import type { FindDocInfo, InsertDocInfo, RemoveDocInfo, UpdateDocInfo } from "."; +import type Client from "../client"; +import utils from "../utils"; +import Cache from "./cache"; +import DocInfoAdapter from "./doc-info-adapter"; +import type ModelMappingInfo from "./model-mapping-info"; +import ObjectSelector from "./object-selector"; +import QueryGenerator from "./query-generator"; +import Result from "./result"; +import ResultMapper from "./result-mapper"; +import Tree from "./tree"; -'use strict'; - -const utils = require('../utils'); -const QueryGenerator = require('./query-generator'); -const ResultMapper = require('./result-mapper'); -const Result = require('./result'); -const Cache = require('./cache'); -const Tree = require('./tree'); -const ObjectSelector = require('./object-selector'); -const DocInfoAdapter = require('./doc-info-adapter'); const cacheHighWaterMark = 100; /** - * @ignore + * @ignore @internal */ class MappingHandler { + private _client: Client; + private _cache: { select: Tree; selectAll: Tree; insert: Tree; update: Tree; remove: Tree; customQueries: Map; }; + info: ModelMappingInfo; /** * @param {Client} client * @param {ModelMappingInfo} mappingInfo */ - constructor(client, mappingInfo) { + constructor(client: Client, mappingInfo: ModelMappingInfo) { this._client = client; this._cache = { select: new Tree().on('add', length => this._validateCacheLength(length)), @@ -61,7 +65,7 @@ class MappingHandler { * be valid. * @return {Promise} */ - getSelectExecutor(doc, docInfo, allPKsDefined) { + getSelectExecutor(doc: object, docInfo: FindDocInfo, allPKsDefined: boolean): Promise { const docKeys = Object.keys(doc); if (docKeys.length === 0) { return Promise.reject(new Error('Expected object with keys')); @@ -77,7 +81,7 @@ class MappingHandler { } const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, null, doc, this.info); - const fieldsInfo = DocInfoAdapter.getPropertiesInfo(utils.emptyArray, docInfo, doc, this.info); + const fieldsInfo = DocInfoAdapter.getPropertiesInfo(utils.emptyArray as any[], docInfo, doc, this.info); const orderByColumns = DocInfoAdapter.adaptOrderBy(docInfo, this.info); const limit = docInfo && docInfo.limit; @@ -107,7 +111,7 @@ class MappingHandler { return cacheItem.executor; } - const fieldsInfo = DocInfoAdapter.getPropertiesInfo(utils.emptyArray, docInfo, utils.emptyObject, this.info); + const fieldsInfo = DocInfoAdapter.getPropertiesInfo(utils.emptyArray as any[], docInfo, utils.emptyObject, this.info); const orderByColumns = DocInfoAdapter.adaptOrderBy(docInfo, this.info); const limit = docInfo && docInfo.limit; @@ -115,7 +119,7 @@ class MappingHandler { // Part of the closure const query = QueryGenerator.getSelect( - tableName, this.info.keyspace, utils.emptyArray, fieldsInfo, orderByColumns, limit); + tableName, this.info.keyspace, utils.emptyArray as any[], fieldsInfo, orderByColumns, limit); const paramsGetter = QueryGenerator.selectParamsGetter(utils.emptyArray, limit); const self = this; @@ -131,7 +135,7 @@ class MappingHandler { * When a result adapter is not yet created, it gets a new one and caches it. * @private */ - _executeSelect(query, paramsGetter, doc, docInfo, executionOptions, cacheItem) { + private _executeSelect(query, paramsGetter, doc, docInfo, executionOptions, cacheItem) { const options = DocInfoAdapter.adaptAllOptions(executionOptions, true); return this._client.execute(query, paramsGetter(doc, docInfo, this.info), options) @@ -149,7 +153,7 @@ class MappingHandler { * @param {{ifNotExists, ttl, fields}} docInfo * @return {Promise} */ - getInsertExecutor(doc, docInfo) { + getInsertExecutor(doc: object, docInfo: InsertDocInfo): Promise { const docKeys = Object.keys(doc); if (docKeys.length === 0) { return Promise.reject(new Error('Expected object with keys')); @@ -179,7 +183,7 @@ class MappingHandler { * @param {{ifNotExists, ttl, fields}} docInfo * @returns {Promise>} */ - createInsertQueries(docKeys, doc, docInfo) { + createInsertQueries(docKeys: Array, doc: object, docInfo: InsertDocInfo): Promise> { const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, docInfo, doc, this.info); const ifNotExists = docInfo && docInfo.ifNotExists; @@ -204,7 +208,7 @@ class MappingHandler { * @param {{ifExists, when, ttl, fields}} docInfo * @return {Promise} */ - getUpdateExecutor(doc, docInfo) { + getUpdateExecutor(doc: object, docInfo: UpdateDocInfo): Promise { const docKeys = Object.keys(doc); if (docKeys.length === 0) { return Promise.reject(new Error('Expected object with keys')); @@ -234,7 +238,7 @@ class MappingHandler { * @param {Object} docInfo * @returns {Promise>} */ - createUpdateQueries(docKeys, doc, docInfo) { + createUpdateQueries(docKeys: Array, doc: object, docInfo: UpdateDocInfo): Promise> { const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, docInfo, doc, this.info); const ifExists = docInfo && docInfo.ifExists; const when = docInfo && docInfo.when @@ -247,7 +251,7 @@ class MappingHandler { // Get all the tables affected return this._client.connect() - .then(() => ObjectSelector.getForUpdate(this._client, this.info, propertiesInfo, when)) + .then(() => ObjectSelector.getForUpdate(this._client, this.info, propertiesInfo, when as any[])) .then(tables => { if (tables.length > 1 && (when.length > 0 || ifExists)) { @@ -256,7 +260,7 @@ class MappingHandler { // For each table affected, Generate query and parameter getters return tables.map(table => - QueryGenerator.getUpdate(table, this.info.keyspace, propertiesInfo, docInfo, when, ifExists)); + QueryGenerator.getUpdate(table, this.info.keyspace, propertiesInfo, docInfo, when as any[], ifExists)); }); } @@ -266,7 +270,7 @@ class MappingHandler { * @param {{when, ifExists, fields, deleteOnlyColumns}} docInfo * @return {Promise} */ - getDeleteExecutor(doc, docInfo) { + getDeleteExecutor(doc: object, docInfo: RemoveDocInfo): Promise { const docKeys = Object.keys(doc); if (docKeys.length === 0) { return Promise.reject(new Error('Expected object with keys')); @@ -296,7 +300,7 @@ class MappingHandler { * @param {{when, ifExists, fields, deleteOnlyColumns}} docInfo * @returns {Promise>} */ - createDeleteQueries(docKeys, doc, docInfo) { + createDeleteQueries(docKeys: Array, doc: object, docInfo: RemoveDocInfo): Promise> { const propertiesInfo = DocInfoAdapter.getPropertiesInfo(docKeys, docInfo, doc, this.info); const ifExists = docInfo && docInfo.ifExists; const when = docInfo && docInfo.when @@ -309,7 +313,7 @@ class MappingHandler { // Get all the tables affected return this._client.connect() - .then(() => ObjectSelector.getForDelete(this._client, this.info, propertiesInfo, when)) + .then(() => ObjectSelector.getForDelete(this._client, this.info, propertiesInfo, when as any[])) .then(tables => { if (tables.length > 1 && (when.length > 0 || ifExists)) { @@ -318,7 +322,7 @@ class MappingHandler { // For each tables affected, Generate query and parameter getters return tables.map(table => - QueryGenerator.getDelete(table, this.info.keyspace, propertiesInfo, docInfo, when, ifExists)); + QueryGenerator.getDelete(table, this.info.keyspace, propertiesInfo, docInfo, when as any[], ifExists)); }); } @@ -360,7 +364,7 @@ class MappingHandler { }); } - _setSingleExecutor(cacheItem, queryInfo) { + private _setSingleExecutor(cacheItem, queryInfo) { // Parameters and this instance are part of the closure const self = this; @@ -375,7 +379,7 @@ class MappingHandler { return cacheItem.executor; } - _setBatchExecutor(cacheItem, queries) { + private _setBatchExecutor(cacheItem, queries) { // Parameters and the following fields are part of the closure const self = this; const isIdempotent = queries.reduce((acc, q) => acc && q.isIdempotent, true); @@ -398,7 +402,7 @@ class MappingHandler { return cacheItem.executor; } - _validateCacheLength(length) { + private _validateCacheLength(length) { if (length !== cacheHighWaterMark) { return; } @@ -409,4 +413,4 @@ class MappingHandler { } } -module.exports = MappingHandler; \ No newline at end of file +export default MappingHandler; \ No newline at end of file diff --git a/lib/mapping/model-batch-item.js b/lib/mapping/model-batch-item.ts similarity index 77% rename from lib/mapping/model-batch-item.js rename to lib/mapping/model-batch-item.ts index f04528e50..823f2d5dc 100644 --- a/lib/mapping/model-batch-item.js +++ b/lib/mapping/model-batch-item.ts @@ -13,23 +13,34 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import type { FindDocInfo, InsertDocInfo, RemoveDocInfo, UpdateDocInfo } from "."; +import Cache from "./cache"; +import type MappingHandler from "./mapping-handler"; +import type Tree from "./tree"; -'use strict'; - -const Cache = require('./cache'); +type DocInfo = FindDocInfo | UpdateDocInfo | InsertDocInfo | RemoveDocInfo /** * Represents a query or a set of queries used to perform a mutation in a batch. * @alias module:mapping~ModelBatchItem */ class ModelBatchItem { + /** @internal */ + doc: object; + /** @internal */ + docInfo: DocInfo; + /** @internal */ + handler: MappingHandler; + /** @internal */ + cache: Tree; /** + * @internal * @param {Object} doc * @param {Object} docInfo * @param {MappingHandler} handler * @param {Tree} cache */ - constructor(doc, docInfo, handler, cache) { + constructor(doc: object, docInfo: DocInfo, handler: MappingHandler, cache: Tree) { this.doc = doc; this.docInfo = docInfo; this.handler = handler; @@ -37,7 +48,7 @@ class ModelBatchItem { } /** - * @ignore + * @ignore @internal * @returns > */ getQueries() { @@ -54,20 +65,24 @@ class ModelBatchItem { /** * Gets the cache key for this item. * @abstract + * @internal * @param {Array} docKeys * @returns {Iterator} */ - getCacheKey(docKeys) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getCacheKey(docKeys: Array): Iterator { throw new Error('getCacheKey must be implemented'); } /** * Gets the Promise to create the queries. * @abstract + * @internal * @param {Array} docKeys * @returns {Promise} */ - createQueries(docKeys) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + createQueries(docKeys: Array): Promise> { throw new Error('getCacheKey must be implemented'); } @@ -78,7 +93,7 @@ class ModelBatchItem { * @param {Array} arr * @return {Promise<{isIdempotent, isCounter}>} */ - pushQueries(arr) { + pushQueries(arr: Array): Promise<{ isIdempotent; isCounter; }> { let isIdempotent = true; let isCounter; @@ -119,7 +134,7 @@ class InsertModelBatchItem extends ModelBatchItem { * @param {MappingHandler} handler * @param {Tree} cache */ - constructor(doc, docInfo, handler, cache) { + constructor(doc: object, docInfo: InsertDocInfo, handler: MappingHandler, cache: Tree) { super(doc, docInfo, handler, cache); } @@ -146,7 +161,7 @@ class UpdateModelBatchItem extends ModelBatchItem { * @param {MappingHandler} handler * @param {Tree} cache */ - constructor(doc, docInfo, handler, cache) { + constructor(doc: object, docInfo: UpdateDocInfo, handler: MappingHandler, cache: Tree) { super(doc, docInfo, handler, cache); } @@ -173,7 +188,7 @@ class RemoveModelBatchItem extends ModelBatchItem { * @param {MappingHandler} handler * @param {Tree} cache */ - constructor(doc, docInfo, handler, cache) { + constructor(doc: object, docInfo: RemoveDocInfo, handler: MappingHandler, cache: Tree) { super(doc, docInfo, handler, cache); } @@ -188,4 +203,4 @@ class RemoveModelBatchItem extends ModelBatchItem { } } -module.exports = { ModelBatchItem, InsertModelBatchItem, UpdateModelBatchItem, RemoveModelBatchItem }; \ No newline at end of file +export { InsertModelBatchItem, ModelBatchItem, RemoveModelBatchItem, UpdateModelBatchItem }; diff --git a/lib/mapping/model-batch-mapper.js b/lib/mapping/model-batch-mapper.ts similarity index 90% rename from lib/mapping/model-batch-mapper.js rename to lib/mapping/model-batch-mapper.ts index 4928072c8..23f60ebd6 100644 --- a/lib/mapping/model-batch-mapper.js +++ b/lib/mapping/model-batch-mapper.ts @@ -13,20 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -'use strict'; - -const Tree = require('./tree'); -const moduleBatchItemModule = require('./model-batch-item'); -const InsertModelBatchItem = moduleBatchItemModule.InsertModelBatchItem; -const UpdateModelBatchItem = moduleBatchItemModule.UpdateModelBatchItem; -const RemoveModelBatchItem = moduleBatchItemModule.RemoveModelBatchItem; +import type { InsertDocInfo, RemoveDocInfo, UpdateDocInfo } from "."; +import type MappingHandler from "./mapping-handler"; +import { InsertModelBatchItem, ModelBatchItem, RemoveModelBatchItem, UpdateModelBatchItem } from "./model-batch-item"; +import Tree from "./tree"; /** * Provides utility methods to group multiple mutations on a single batch. * @alias module:mapping~ModelBatchMapper */ class ModelBatchMapper { + private _handler: MappingHandler; + private _cache: { insert: Tree; update: Tree; remove: Tree; }; /** * Creates a new instance of model batch mapper. *

@@ -35,9 +33,9 @@ class ModelBatchMapper { * constructor. *

* @param {MappingHandler} handler - * @ignore + * @ignore @internal */ - constructor(handler) { + constructor(handler: MappingHandler) { this._handler = handler; this._cache = { insert: new Tree(), @@ -60,7 +58,7 @@ class ModelBatchMapper { * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query * or a set of queries to be included in a batch. */ - insert(doc, docInfo) { + insert(doc: object, docInfo: InsertDocInfo): ModelBatchItem { return new InsertModelBatchItem(doc, docInfo, this._handler, this._cache.insert); } @@ -86,7 +84,7 @@ class ModelBatchMapper { * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query * or a set of queries to be included in a batch. */ - update(doc, docInfo) { + update(doc: object, docInfo: UpdateDocInfo): ModelBatchItem { return new UpdateModelBatchItem(doc, docInfo, this._handler, this._cache.update); } @@ -117,9 +115,9 @@ class ModelBatchMapper { * @returns {ModelBatchItem} A [ModelBatchItem]{@link module:mapping~ModelBatchItem} instance representing a query * or a set of queries to be included in a batch. */ - remove(doc, docInfo) { + remove(doc: object, docInfo: RemoveDocInfo): ModelBatchItem { return new RemoveModelBatchItem(doc, docInfo, this._handler, this._cache.update); } } -module.exports = ModelBatchMapper; \ No newline at end of file +export default ModelBatchMapper; \ No newline at end of file diff --git a/lib/mapping/model-mapper.js b/lib/mapping/model-mapper.ts similarity index 91% rename from lib/mapping/model-mapper.js rename to lib/mapping/model-mapper.ts index 8a4d04194..b9aae5c5d 100644 --- a/lib/mapping/model-mapper.js +++ b/lib/mapping/model-mapper.ts @@ -13,28 +13,32 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import type { FindDocInfo, InsertDocInfo, MappingExecutionOptions, RemoveDocInfo, Result, UpdateDocInfo } from "."; +import ModelBatchMapper from "./model-batch-mapper"; -'use strict'; -const ModelBatchMapper = require('./model-batch-mapper'); /** * Represents an object mapper for a specific model. * @alias module:mapping~ModelMapper */ -class ModelMapper { +class ModelMapper { + /** + * Gets the name identifier of the model. + * @type {String} + */ + name: string; + private _handler: any; + /** + * Gets a [ModelBatchMapper]{@link module:mapping~ModelBatchMapper} instance containing utility methods to group + * multiple doc mutations in a single batch. + * @type {ModelBatchMapper} + */ + batching: ModelBatchMapper; + /** @internal */ constructor(name, handler) { - /** - * Gets the name identifier of the model. - * @type {String} - */ this.name = name; this._handler = handler; - /** - * Gets a [ModelBatchMapper]{@link module:mapping~ModelBatchMapper} instance containing utility methods to group - * multiple doc mutations in a single batch. - * @type {ModelBatchMapper} - */ this.batching = new ModelBatchMapper(this._handler); } @@ -56,7 +60,7 @@ class ModelMapper { * @example Get a video by id, selecting specific columns * videoMapper.get({ id }, fields: ['name', 'description']) */ - get(doc, docInfo, executionOptions) { + get(doc: { [key: string]: any }, docInfo?: { fields?: string[] }, executionOptions?: string | MappingExecutionOptions): Promise { if (executionOptions === undefined && typeof docInfo === 'string') { executionOptions = docInfo; docInfo = null; @@ -94,7 +98,7 @@ class ModelMapper { * @example Get user's videos in reverse order * videoMapper.find({ userId }, { orderBy: { addedDate: 'desc' }}); */ - find(doc, docInfo, executionOptions) { + find(doc: { [key: string]: any }, docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise> { if (executionOptions === undefined && typeof docInfo === 'string') { executionOptions = docInfo; docInfo = null; @@ -125,7 +129,7 @@ class ModelMapper { *

When provided, the query will be executed starting from a given paging state.

* @return {Promise} A Promise that resolves to a [Result]{@link module:mapping~Result} instance. */ - findAll(docInfo, executionOptions) { + findAll(docInfo?: FindDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>{ if (executionOptions === undefined && typeof docInfo === 'string') { executionOptions = docInfo; docInfo = null; @@ -166,7 +170,7 @@ class ModelMapper { * @example Insert a video * videoMapper.insert({ id, name }); */ - insert(doc, docInfo, executionOptions) { + insert(doc: { [key: string]: any }, docInfo?: InsertDocInfo, executionOptions?: string | MappingExecutionOptions): Promise> { if (executionOptions === undefined && typeof docInfo === 'string') { executionOptions = docInfo; docInfo = null; @@ -219,7 +223,7 @@ class ModelMapper { * @example Update the name of a video * videoMapper.update({ id, name }); */ - update(doc, docInfo, executionOptions) { + update(doc: { [key: string]: any }, docInfo?: UpdateDocInfo, executionOptions?: string | MappingExecutionOptions): Promise> { if (executionOptions === undefined && typeof docInfo === 'string') { executionOptions = docInfo; docInfo = null; @@ -269,7 +273,7 @@ class ModelMapper { * @example Delete a video * videoMapper.remove({ id }); */ - remove(doc, docInfo, executionOptions) { + remove(doc: { [key: string]: any }, docInfo?: RemoveDocInfo, executionOptions?: string | MappingExecutionOptions): Promise>{ if (executionOptions === undefined && typeof docInfo === 'string') { executionOptions = docInfo; docInfo = null; @@ -298,9 +302,13 @@ class ModelMapper { * @return {Function} Returns a function that takes the document and execution options as parameters and returns a * Promise the resolves to a [Result]{@link module:mapping~Result} instance. */ - mapWithQuery(query, paramsHandler, executionOptions) { + mapWithQuery( + query: string, + paramsHandler: (doc: any) => any[], + executionOptions?: string | MappingExecutionOptions + ): (doc: any, executionOptions?: string | MappingExecutionOptions) => Promise>{ return this._handler.getExecutorFromQuery(query, paramsHandler, executionOptions); } } -module.exports = ModelMapper; \ No newline at end of file +export default ModelMapper; \ No newline at end of file diff --git a/lib/mapping/model-mapping-info.js b/lib/mapping/model-mapping-info.ts similarity index 89% rename from lib/mapping/model-mapping-info.js rename to lib/mapping/model-mapping-info.ts index 23be0408d..1a374f4bd 100644 --- a/lib/mapping/model-mapping-info.js +++ b/lib/mapping/model-mapping-info.ts @@ -13,25 +13,26 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -'use strict'; - -const tableMappingsModule = require('./table-mappings'); -const TableMappings = tableMappingsModule.TableMappings; -const DefaultTableMappings = tableMappingsModule.DefaultTableMappings; +import type { MappingOptions } from "."; +import { DefaultTableMappings, TableMappings } from "./table-mappings"; /** * Represents the parsed user information of the table mappings of a model. - * @ignore + * @ignore @internal */ class ModelMappingInfo { + keyspace: string; + tables: { name: any; isView: any; }[]; + _mappings: TableMappings; + _columns: Map; + _documentProperties: Map; /** * @param {String} keyspace * @param {Array<{name, isView}>} tables * @param {TableMappings} mappings * @param {Map} columns */ - constructor(keyspace, tables, mappings, columns) { + constructor(keyspace: string, tables: Array<{ name; isView; }>, mappings: TableMappings, columns: Map) { this.keyspace = keyspace; this.tables = tables; this._mappings = mappings; @@ -85,7 +86,7 @@ class ModelMappingInfo { * @param {String} currentKeyspace * @returns {Map} */ - static parse(options, currentKeyspace) { + static parse(options: MappingOptions, currentKeyspace: string): Map { const result = new Map(); if (!options || !options.models) { return result; @@ -160,7 +161,11 @@ class ModelMappingInfo { } class ModelColumnInfo { - constructor(columnName, propertyName, toModel, fromModel) { + columnName: any; + toModel: any; + fromModel: any; + propertyName: any; + constructor(columnName, propertyName, toModel?, fromModel?) { this.columnName = columnName; this.propertyName = propertyName; @@ -191,4 +196,4 @@ class ModelColumnInfo { } } -module.exports = ModelMappingInfo; \ No newline at end of file +export default ModelMappingInfo; \ No newline at end of file diff --git a/lib/mapping/object-selector.js b/lib/mapping/object-selector.ts similarity index 92% rename from lib/mapping/object-selector.js rename to lib/mapping/object-selector.ts index de25df627..259fd75fb 100644 --- a/lib/mapping/object-selector.js +++ b/lib/mapping/object-selector.ts @@ -14,7 +14,11 @@ * limitations under the License. */ -'use strict'; +import type Client from "../client"; +import type TableMetadata from "../metadata/table-metadata"; +import type ModelMappingInfo from "./model-mapping-info"; + + const keyMatches = { all: 1, @@ -24,7 +28,7 @@ const keyMatches = { /** * Provides utility methods to choose the correct tables and views that should be included in a statement. - * @ignore + * @ignore @internal */ class ObjectSelector { /** @@ -37,7 +41,7 @@ class ObjectSelector { * @param {Array>} orderByColumns * @return {Promise} A promise that resolves to a table names. */ - static getForSelect(client, info, allPKsDefined, propertiesInfo, fieldsInfo, orderByColumns) { + static getForSelect(client: Client, info: ModelMappingInfo, allPKsDefined: boolean, propertiesInfo: Array, fieldsInfo: Array, orderByColumns: Array>): Promise { return Promise.all( info.tables.map(t => { if (t.isView) { @@ -138,7 +142,7 @@ class ObjectSelector { * @param {Array} propertiesInfo * @return {Promise>} A promise that resolves to an Array of tables. */ - static getForInsert(client, info, propertiesInfo) { + static getForInsert(client: Client, info: ModelMappingInfo, propertiesInfo: Array): Promise> { return Promise.all(info.tables.filter(t => !t.isView).map(t => client.metadata.getTable(info.keyspace, t.name))) .then(tables => { const filteredTables = tables @@ -184,7 +188,7 @@ class ObjectSelector { * @param {Array} when * @return {Promise>} A promise that resolves to an Array of tables. */ - static getForUpdate(client, info, propertiesInfo, when) { + static getForUpdate(client: Client, info: ModelMappingInfo, propertiesInfo: Array, when: Array): Promise> { return Promise.all(info.tables.filter(t => !t.isView).map(t => client.metadata.getTable(info.keyspace, t.name))) .then(tables => { const filteredTables = tables @@ -246,7 +250,7 @@ class ObjectSelector { * @param {Array} when * @return {Promise>} A promise that resolves to an Array of tables. */ - static getForDelete(client, info, propertiesInfo, when) { + static getForDelete(client: Client, info: ModelMappingInfo, propertiesInfo: Array, when: Array): Promise> { return Promise.all(info.tables.filter(t => !t.isView).map(t => client.metadata.getTable(info.keyspace, t.name))) .then(tables => { const filteredTables = tables @@ -295,7 +299,7 @@ function contains(arr, fn) { * @param {Array} keys * @param {Array} propertiesInfo */ -function keysAreIncluded(keys, propertiesInfo) { +function keysAreIncluded(keys: Array, propertiesInfo: Array) { if (keys.length === 0) { return keyMatches.all; } @@ -318,4 +322,4 @@ function staticColumnCount(table) { return table.columns.reduce((acc, column) => acc + (column.isStatic ? 1 : 0), 0); } -module.exports = ObjectSelector; +export default ObjectSelector; diff --git a/lib/mapping/q.js b/lib/mapping/q.ts similarity index 75% rename from lib/mapping/q.js rename to lib/mapping/q.ts index 7e4c8ce2d..44688fbf5 100644 --- a/lib/mapping/q.js +++ b/lib/mapping/q.ts @@ -13,24 +13,32 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import errors from "../errors"; -'use strict'; -const errors = require('../errors'); /** * Represents a CQL query operator, like >=, IN, <, ... * @ignore */ class QueryOperator { + /** @internal */ + key: string; + /** @internal */ + value: any; + /** @internal */ + hasChildValues: any; + /** @internal */ + isInOperator: any; /** * Creates a new instance of QueryOperator. + * @internal * @param {String} key * @param value * @param [hasChildValues] * @param [isInOperator] */ - constructor(key, value, hasChildValues, isInOperator) { + constructor(key: string, value, hasChildValues?, isInOperator?) { /** * The CQL key representing the operator * @type {string} @@ -59,7 +67,14 @@ class QueryOperator { * @ignore */ class QueryAssignment { - constructor(sign, value, inverted) { + /** @internal */ + sign: any; + /** @internal */ + value: any; + /** @internal */ + inverted: boolean; + /** @internal */ + constructor(sign, value, inverted?) { /** * Gets the sign of the assignment operation. */ @@ -97,58 +112,68 @@ class QueryAssignment { * @property {function} remove Represents the CQL remove assignment used for collections, e.g: "col = col - x" */ const q = { - in_: function in_(arr) { + in_: function in_(arr:any):QueryOperator { if (!Array.isArray(arr)) { throw new errors.ArgumentError('IN operator supports only Array values'); } return new QueryOperator('IN', arr, false, true); }, - gt: function gt(value) { + gt: function gt(value:any):QueryOperator { return new QueryOperator('>', value); }, - gte: function gte(value) { + gte: function gte(value:any):QueryOperator { return new QueryOperator('>=', value); }, - lt: function lt(value) { + lt: function lt(value:any):QueryOperator { return new QueryOperator('<', value); }, - lte: function lte(value) { + lte: function lte(value:any):QueryOperator { return new QueryOperator('<=', value); }, - notEq: function notEq(value) { + notEq: function notEq(value:any):QueryOperator { return new QueryOperator('!=', value); }, - and: function (condition1, condition2) { + and: function (condition1:any, condition2:any):QueryOperator { return new QueryOperator('AND', [ condition1, condition2 ], true); }, - incr: function incr(value) { + //TODO: these had a return type of QueryOperator, but they are clearly returning QueryAssignment + // and QueryAssignment clearly does not extend QueryOperator + incr: function incr(value:any):QueryAssignment { return new QueryAssignment('+', value); }, - decr: function decr(value) { + decr: function decr(value:any):QueryAssignment { return new QueryAssignment('-', value); }, - append: function append(value) { + append: function append(value:any):QueryAssignment { return new QueryAssignment('+', value); }, - prepend: function prepend(value) { + prepend: function prepend(value:any):QueryAssignment { return new QueryAssignment('+', value, true); }, - remove: function remove(value) { + remove: function remove(value:any):QueryAssignment { return new QueryAssignment('-', value); } }; -exports.q = q; -exports.QueryAssignment = QueryAssignment; -exports.QueryOperator = QueryOperator; \ No newline at end of file +export default { + q, + QueryAssignment, + QueryOperator, +}; + +export { + q, + QueryAssignment, + QueryOperator +}; diff --git a/lib/mapping/query-generator.js b/lib/mapping/query-generator.ts similarity index 86% rename from lib/mapping/query-generator.js rename to lib/mapping/query-generator.ts index 83a105d03..b24db7822 100644 --- a/lib/mapping/query-generator.js +++ b/lib/mapping/query-generator.ts @@ -13,21 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import vm from "vm"; +import type { InsertDocInfo, RemoveDocInfo, UpdateDocInfo } from "."; +import type TableMetadata from "../metadata/table-metadata"; +import types from "../types/index"; +import qModule from "./q"; -'use strict'; -const vm = require('vm'); -const qModule = require('./q'); const QueryOperator = qModule.QueryOperator; const QueryAssignment = qModule.QueryAssignment; -const types = require('../types'); const dataTypes = types.dataTypes; const vmFileName = 'gen-param-getter.js'; /** * Provides methods to generate a query and parameter handlers. - * @ignore + * @ignore @internal */ class QueryGenerator { /** @@ -40,7 +41,7 @@ class QueryGenerator { * @param {Number|undefined} limit * @return {string} */ - static getSelect(tableName, keyspace, propertiesInfo, fieldsInfo, orderByColumns, limit) { + static getSelect(tableName: string, keyspace: string, propertiesInfo: Array, fieldsInfo: Array, orderByColumns: Array, limit: number | undefined): string { let query = 'SELECT '; query += fieldsInfo.length > 0 ? fieldsInfo.map(p => `"${p.columnName}"`).join(', ') : '*'; query += ` FROM ${keyspace}.${tableName}`; @@ -91,7 +92,7 @@ class QueryGenerator { * @param {Boolean|undefined} ifNotExists * @return {{query: String, paramsGetter: Function, isIdempotent: Boolean}} */ - static getInsert(table, keyspace, propertiesInfo, docInfo, ifNotExists) { + static getInsert(table: TableMetadata, keyspace: string, propertiesInfo: Array, docInfo: InsertDocInfo, ifNotExists: boolean | undefined): { query: string; paramsGetter: Function; isIdempotent: boolean; } { const ttl = docInfo && docInfo.ttl; // Not all columns are contained in the table @@ -114,7 +115,7 @@ class QueryGenerator { * @param {Number|undefined} ttl * @return {String} */ - static _getInsertQuery(tableName, keyspace, propertiesInfo, ifNotExists, ttl) { + static _getInsertQuery(tableName: string, keyspace: string, propertiesInfo: Array, ifNotExists: boolean, ttl: number | undefined): string { let query = `INSERT INTO ${keyspace}.${tableName} (`; query += propertiesInfo.map(pInfo => `"${pInfo.columnName}"`).join(', '); query += ') VALUES ('; @@ -158,7 +159,7 @@ class QueryGenerator { * @param {Boolean|undefined} ifExists * @return {{query: String, paramsGetter: Function, isIdempotent: Boolean, isCounter}} */ - static getUpdate(table, keyspace, propertiesInfo, docInfo, when, ifExists) { + static getUpdate(table: TableMetadata, keyspace: string, propertiesInfo: Array, docInfo: UpdateDocInfo, when: Array, ifExists: boolean | undefined): { query: string; paramsGetter: Function; isIdempotent: boolean; isCounter; } { const ttl = docInfo && docInfo.ttl; const primaryKeys = new Set(table.partitionKeys.concat(table.clusteringKeys).map(c => c.name)); let isIdempotent = true; @@ -202,7 +203,7 @@ class QueryGenerator { * @param {Boolean} ifExists * @param {Number|undefined} ttl */ - static _getUpdateQuery(tableName, keyspace, primaryKeys, propertiesInfo, when, ifExists, ttl) { + static _getUpdateQuery(tableName: string, keyspace: string, primaryKeys: Set, propertiesInfo: Array, when: { [key: string]: any }, ifExists: boolean, ttl: number | undefined) { let query = `UPDATE ${keyspace}.${tableName} `; if (typeof ttl === 'number') { @@ -248,7 +249,7 @@ class QueryGenerator { * @param {Number|undefined} ttl * @returns {Function} */ - static _updateParamsGetter(primaryKeys, propertiesInfo, when, ttl) { + static _updateParamsGetter(primaryKeys: Set, propertiesInfo: Array, when: Array, ttl: number | undefined): Function { let scriptText = '(function getParametersUpdate(doc, docInfo, mappingInfo) {\n'; scriptText += ' return ['; @@ -285,7 +286,7 @@ class QueryGenerator { * @param {Boolean|undefined} ifExists * @return {{query: String, paramsGetter: Function, isIdempotent}} */ - static getDelete(table, keyspace, propertiesInfo, docInfo, when, ifExists) { + static getDelete(table: TableMetadata, keyspace: string, propertiesInfo: Array, docInfo: RemoveDocInfo, when: Array, ifExists: boolean | undefined): { query: string; paramsGetter: Function; isIdempotent; } { const deleteOnlyColumns = docInfo && docInfo.deleteOnlyColumns; const primaryKeys = new Set(table.partitionKeys.concat(table.clusteringKeys).map(c => c.name)); @@ -313,7 +314,7 @@ class QueryGenerator { * @private * @return {String} */ - static _getDeleteQuery(tableName, keyspace, primaryKeys, propertiesInfo, when, ifExists, deleteOnlyColumns) { + static _getDeleteQuery(tableName: string, keyspace: string, primaryKeys: Set, propertiesInfo: Array, when: Array, ifExists: boolean, deleteOnlyColumns: boolean): string { let query = 'DELETE'; if (deleteOnlyColumns) { @@ -345,7 +346,7 @@ class QueryGenerator { * @param {Array} when * @returns {Function} */ - static _deleteParamsGetter(primaryKeys, propertiesInfo, when) { + static _deleteParamsGetter(primaryKeys: Set, propertiesInfo: Array, when: Array): Function { let scriptText = '(function getParametersDelete(doc, docInfo, mappingInfo) {\n'; scriptText += ' return ['; @@ -371,7 +372,7 @@ class QueryGenerator { * @return {string} * @private */ - static _valueGetterExpression(propertiesInfo, objectName) { + static _valueGetterExpression(propertiesInfo: Array, objectName?: string): string { objectName = objectName || 'doc'; return propertiesInfo @@ -407,7 +408,7 @@ class QueryGenerator { * @return {string} * @private */ - static _assignmentGetterExpression(propertiesInfo, prefix) { + static _assignmentGetterExpression(propertiesInfo: Array, prefix?: string): string { prefix = prefix || 'doc'; return propertiesInfo @@ -443,4 +444,4 @@ class QueryGenerator { } } -module.exports = QueryGenerator; \ No newline at end of file +export default QueryGenerator; \ No newline at end of file diff --git a/lib/mapping/result-mapper.js b/lib/mapping/result-mapper.ts similarity index 86% rename from lib/mapping/result-mapper.js rename to lib/mapping/result-mapper.ts index c364e5a24..52aa0ae3d 100644 --- a/lib/mapping/result-mapper.js +++ b/lib/mapping/result-mapper.ts @@ -13,15 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import vm from "vm"; +import types, { ResultSet } from "../types/index"; +import utils from "../utils"; +import type ModelMappingInfo from "./model-mapping-info"; -'use strict'; -const vm = require('vm'); -const utils = require('../utils'); -const types = require('../types'); /** - * @ignore + * @ignore @internal */ class ResultMapper { /** @@ -30,7 +30,7 @@ class ResultMapper { * @param {ResultSet} rs * @returns {Function} */ - static getSelectAdapter(info, rs) { + static getSelectAdapter(info: ModelMappingInfo, rs: ResultSet): Function { const columns = rs.columns; if (!columns) { throw new Error('Expected ROWS result obtained VOID'); @@ -60,7 +60,7 @@ class ResultMapper { * @param {ResultSet} rs * @returns {Function} */ - static getMutationAdapter(rs) { + static getMutationAdapter(rs: ResultSet): Function { if (rs.columns === null) { // VOID result return utils.noop; @@ -94,7 +94,7 @@ class ResultMapper { * @param {ResultSet} rs * @returns {{canCache: Boolean, fn: Function}} */ - static getCustomQueryAdapter(info, rs) { + static getCustomQueryAdapter(info: ModelMappingInfo, rs: ResultSet): { canCache: boolean; fn: Function; } { if (rs.columns === null || rs.columns.length === 0) { // VOID result return { canCache: true, fn: utils.noop }; @@ -109,4 +109,4 @@ class ResultMapper { } } -module.exports = ResultMapper; \ No newline at end of file +export default ResultMapper; \ No newline at end of file diff --git a/lib/mapping/result.js b/lib/mapping/result.ts similarity index 72% rename from lib/mapping/result.js rename to lib/mapping/result.ts index 6f1ead4d6..73c739d82 100644 --- a/lib/mapping/result.js +++ b/lib/mapping/result.ts @@ -13,25 +13,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import util from "util"; +import type { ResultSet } from "../types"; +import utils from "../utils"; +import type ModelMappingInfo from "./model-mapping-info"; -'use strict'; -const util = require('util'); -const utils = require('../utils'); -const inspectMethod = util.inspect.custom || 'inspect'; +// @ts-ignore +const inspectMethod : unique symbol = util.inspect.custom || 'inspect'; +//TODO: It was interface Result extends Iterator in the previous .d.ts. +// But it didn't have the method of next(); So I added it there /** * Represents the result of an execution as an iterable of objects in the Mapper. * @alias module:mapping~Result */ -class Result { +class Result implements IterableIterator { + private _rs: ResultSet; + private _info: ModelMappingInfo; + private _rowAdapter: Function; + private _isEmptyLwt: boolean; + private _iteratorIndex: number; + /** @internal */ + length: number; + /** @internal */ + pageState: string; /** * Creates a new instance of Result. * @param {ResultSet} rs * @param {ModelMappingInfo} info * @param {Function} rowAdapter + * @ignore @internal */ - constructor(rs, info, rowAdapter) { + constructor(rs: ResultSet, info: ModelMappingInfo, rowAdapter: Function) { this._rs = rs; this._info = info; this._rowAdapter = rowAdapter; @@ -63,6 +77,7 @@ class Result { * @default null */ this.pageState = rs.pageState; + this._iteratorIndex = 0; } /** @@ -75,14 +90,14 @@ class Result { * information whether it was applied or not. *

*/ - wasApplied() { + wasApplied(): boolean { return this._rs.wasApplied(); } /** * Gets the first document in this result or null when the result is empty. */ - first() { + first(): T | null { if (!this._rs.rowLength || this._isEmptyLwt) { return null; } @@ -92,7 +107,7 @@ class Result { /** * Returns a new Iterator object that contains the document values. */ - *[Symbol.iterator]() { + *[Symbol.iterator](): IterableIterator { if (this._isEmptyLwt) { // Empty iterator return; @@ -105,11 +120,11 @@ class Result { /** * Converts the current instance to an Array of documents. - * @return {Array} + * @return {Array} */ - toArray() { + toArray(): T[] { if (this._isEmptyLwt || !this._rs.rows) { - return utils.emptyArray; + return utils.emptyArray as any[]; } return this._rs.rows.map(row => this._rowAdapter(row, this._info)); @@ -120,7 +135,7 @@ class Result { * @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index. * @param {Object} [thisArg] Value to use as this when executing callback. */ - forEach(callback, thisArg) { + forEach(callback: (currentValue: T, index: number) => void, thisArg: any):void { let index = 0; thisArg = thisArg || this; for (const doc of this) { @@ -131,6 +146,16 @@ class Result { [inspectMethod]() { return this.toArray(); } + + next(): {done: boolean; value: T} { + if(this._isEmptyLwt) { + return {done: true, value: undefined}; + } + if (this._iteratorIndex >= this._rs.rows.length) { + return {done: true, value: undefined}; + } + return {done: false, value: this._rowAdapter(this._rs.rows[this._iteratorIndex++], this._info) as T}; + } } -module.exports = Result; \ No newline at end of file +export default Result; \ No newline at end of file diff --git a/lib/mapping/table-mappings.js b/lib/mapping/table-mappings.ts similarity index 82% rename from lib/mapping/table-mappings.js rename to lib/mapping/table-mappings.ts index 06010a1c0..e19f59f72 100644 --- a/lib/mapping/table-mappings.js +++ b/lib/mapping/table-mappings.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -'use strict'; + /** * Contains a set of methods to represent a row into a document and a document into a row. @@ -26,7 +26,7 @@ class TableMappings { * Method that is called by the mapper to create the instance of the document. * @return {Object} */ - newObjectInstance() { + newObjectInstance(): object { return {}; } @@ -35,7 +35,7 @@ class TableMappings { * @param {String} propName The name of the property. * @returns {String} */ - getColumnName(propName) { + getColumnName(propName: string): string { return propName; } @@ -44,7 +44,7 @@ class TableMappings { * @param {String} columnName The name of the column. * @returns {String} */ - getPropertyName(columnName) { + getPropertyName(columnName: string): string { return columnName; } } @@ -72,8 +72,8 @@ class UnderscoreCqlToCamelCaseMappings extends TableMappings { * @param {String} propName Name of the property to convert to snake case. * @return {String} */ - getColumnName(propName) { - return propName.replace(/[a-z][A-Z]/g, (match, offset) => match.charAt(0) + '_' + match.charAt(1)).toLowerCase(); + getColumnName(propName: string): string { + return propName.replace(/[a-z][A-Z]/g, (match, _offset) => match.charAt(0) + '_' + match.charAt(1)).toLowerCase(); } /** @@ -81,7 +81,7 @@ class UnderscoreCqlToCamelCaseMappings extends TableMappings { * @param {String} columnName The column name to convert to camel case. * @return {String} */ - getPropertyName(columnName) { + getPropertyName(columnName: string): string { return columnName.replace(/_[a-z]/g, (match, offset) => ((offset === 0) ? match : match.substr(1).toUpperCase())); } } @@ -100,23 +100,30 @@ class DefaultTableMappings extends TableMappings { } /** @override */ - getColumnName(propName) { + getColumnName(propName: string): string { return super.getColumnName(propName); } /** @override */ - getPropertyName(columnName) { + getPropertyName(columnName: string): string { return super.getPropertyName(columnName); } /** * Creates a new object instance, using object initializer. */ - newObjectInstance() { + newObjectInstance(): object { return super.newObjectInstance(); } } -exports.TableMappings = TableMappings; -exports.UnderscoreCqlToCamelCaseMappings = UnderscoreCqlToCamelCaseMappings; -exports.DefaultTableMappings = DefaultTableMappings; \ No newline at end of file +export default { + TableMappings, + UnderscoreCqlToCamelCaseMappings, + DefaultTableMappings +}; + +export { + DefaultTableMappings, TableMappings, + UnderscoreCqlToCamelCaseMappings +}; diff --git a/lib/mapping/tree.js b/lib/mapping/tree.ts similarity index 88% rename from lib/mapping/tree.js rename to lib/mapping/tree.ts index e6adece9a..5402e97c5 100644 --- a/lib/mapping/tree.js +++ b/lib/mapping/tree.ts @@ -13,23 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import EventEmitter from "events"; -'use strict'; -const EventEmitter = require('events'); /** * Represents a tree node where the key is composed by 1 or more strings. - * @ignore + * @ignore @internal */ class Node extends EventEmitter { + key: string[]; + value: object; + edges: any[]; /** * Creates a new instance of {@link Node}. * @param {Array} key * @param {Object} value * @param {Array} [edges] */ - constructor(key, value, edges) { + constructor(key: Array, value: object, edges?: Array) { super(); this.key = key; this.value = value; @@ -39,9 +41,10 @@ class Node extends EventEmitter { /** * A radix tree where each node contains a key, a value and edges. - * @ignore + * @ignore @internal */ class Tree extends Node { + length: number; constructor() { super([], null); this.length = 0; @@ -53,11 +56,11 @@ class Tree extends Node { * @param {Function} valueHandler * @return {Object} */ - getOrCreate(keyIterator, valueHandler) { + getOrCreate(keyIterator: Iterator, valueHandler: () => T ): T { if (typeof keyIterator.next !== 'function') { keyIterator = keyIterator[Symbol.iterator](); } - let node = this; + let node : Tree = this; let isMatch = false; let item = keyIterator.next(); while (true) { @@ -112,10 +115,10 @@ class Tree extends Node { if (node.value === null && node.edges.length > 0) { node.value = valueHandler(); } - return node.value; + return node.value as T; } - _createBranch(node, index, useNewValue, valueHandler) { + private _createBranch(node, index, useNewValue, valueHandler) { const newBranch = new Node(node.key.slice(index), node.value, node.edges); node.key = node.key.slice(0, index); node.edges = [ newBranch ]; @@ -148,4 +151,4 @@ function iteratorToArray(value, iterator) { return values; } -module.exports = Tree; \ No newline at end of file +export default Tree; \ No newline at end of file diff --git a/lib/metadata/aggregate.js b/lib/metadata/aggregate.ts similarity index 60% rename from lib/metadata/aggregate.js rename to lib/metadata/aggregate.ts index 6677f5f6c..43f190bf1 100644 --- a/lib/metadata/aggregate.js +++ b/lib/metadata/aggregate.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; + /** * Creates a new Aggregate. @@ -21,58 +21,76 @@ * @alias module:metadata~Aggregate * @constructor */ -function Aggregate() { +class Aggregate { /** * Name of the aggregate. * @type {String} + * @internal */ - this.name = null; + name: string; /** * Name of the keyspace where the aggregate is declared. */ - this.keyspaceName = null; + keyspaceName: string; /** * Signature of the aggregate. * @type {Array.} */ - this.signature = null; + signature: Array; /** * List of the CQL aggregate argument types. * @type {Array.<{code, info}>} */ - this.argumentTypes = null; + argumentTypes: Array<{ code: number, info?: (object | Array | string) }>; /** * State Function. * @type {String} */ - this.stateFunction = null; + stateFunction: string; /** * State Type. * @type {{code, info}} */ - this.stateType = null; + stateType: { code: number, info?: (object | Array | string) }; /** - * Final Function. - * @type {String} - */ - this.finalFunction = null; - this.initConditionRaw = null; + * Final Function. + * @type {String} + */ + finalFunction: string; + /** @internal */ + initConditionRaw: any; /** * Initial state value of this aggregate. * @type {String} */ - this.initCondition = null; + initCondition: string; + //TODO: was exposed as a string. /** * Type of the return value. * @type {{code: number, info: (Object|Array|null)}} */ - this.returnType = null; + returnType: { code: number, info?: (object | Array | string) }; /** * Indicates whether or not this aggregate is deterministic. This means that * given a particular input, the aggregate will always produce the same output. * @type {Boolean} */ - this.deterministic = null; + //TODO: It was not exposed. I believe it should be. + deterministic: boolean; + /** @internal */ + constructor() { + this.name = null; + this.keyspaceName = null; + this.signature = null; + this.argumentTypes = null; + this.stateFunction = null; + this.stateType = null; + this.finalFunction = null; + this.initConditionRaw = null; + this.initCondition = null; + this.returnType = null; + this.deterministic = null; + } } -module.exports = Aggregate; \ No newline at end of file +export default Aggregate; \ No newline at end of file diff --git a/lib/metadata/client-state.js b/lib/metadata/client-state.ts similarity index 82% rename from lib/metadata/client-state.js rename to lib/metadata/client-state.ts index 6bf7c372a..9a9f0ce7f 100644 --- a/lib/metadata/client-state.js +++ b/lib/metadata/client-state.ts @@ -13,10 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import util from "util"; +import type Client from "../client"; +import errors from "../errors"; +import type { Host } from "../host"; + -const util = require('util'); -const errors = require('../errors'); /** * Represents the state of a {@link Client}. @@ -27,14 +29,18 @@ const errors = require('../errors'); * @constructor */ class ClientState { + private _hosts: Host[]; + private _openConnections: { [key: string]: number; }; + private _inFlightQueries: { [key: string]: number; }; /** * Creates a new instance of ClientState. + * @internal * @param {Array} hosts * @param {Object.} openConnections * @param {Object.} inFlightQueries */ - constructor(hosts, openConnections, inFlightQueries) { + constructor(hosts: Array, openConnections: {[key: string]: number}, inFlightQueries: {[key: string]: number}) { this._hosts = hosts; this._openConnections = openConnections; this._inFlightQueries = inFlightQueries; @@ -44,7 +50,7 @@ class ClientState { * Get an array of hosts to which the client is connected to. * @return {Array} */ - getConnectedHosts() { + getConnectedHosts(): Array { return this._hosts; } @@ -53,7 +59,7 @@ class ClientState { * @param {Host} host * @return {Number} */ - getOpenConnections(host) { + getOpenConnections(host: Host): number { if (!host) { throw new errors.ArgumentError('Host is not defined'); } @@ -70,7 +76,7 @@ class ClientState { * @param {Host} host * @return {Number} */ - getInFlightQueries(host) { + getInFlightQueries(host: Host): number { if (!host) { throw new errors.ArgumentError('Host is not defined'); } @@ -81,7 +87,7 @@ class ClientState { /** * Returns the string representation of the instance. */ - toString() { + toString(): string { return util.format('{"hosts": %j, "openConnections": %j, "inFlightQueries": %j}', this._hosts.map(function (h) { return h.address; }), this._openConnections, this._inFlightQueries); } @@ -92,7 +98,7 @@ class ClientState { * @internal * @ignore */ - static from(client) { + static from(client: Client) { const openConnections = {}; const inFlightQueries = {}; const hostArray = []; @@ -111,4 +117,4 @@ class ClientState { } } -module.exports = ClientState; \ No newline at end of file +export default ClientState; \ No newline at end of file diff --git a/lib/metadata/data-collection.js b/lib/metadata/data-collection.js deleted file mode 100644 index 1bab680a4..000000000 --- a/lib/metadata/data-collection.js +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const events = require('events'); -/** - * Creates a new instance of DataCollection - * @param {String} name Name of the data object. - * @classdesc Describes a table or a view - * @alias module:metadata~DataCollection - * @constructor - * @abstract - */ -function DataCollection(name) { - events.EventEmitter.call(this); - this.setMaxListeners(0); - //private - Object.defineProperty(this, 'loading', { value: false, enumerable: false, writable: true }); - Object.defineProperty(this, 'loaded', { value: false, enumerable: false, writable: true }); - /** - * Name of the object - * @type {String} - */ - this.name = name; - /** - * False-positive probability for SSTable Bloom filters. - * @type {number} - */ - this.bloomFilterFalsePositiveChance = 0; - /** - * Level of caching: all, keys_only, rows_only, none - * @type {String} - */ - this.caching = null; - /** - * A human readable comment describing the table. - * @type {String} - */ - this.comment = null; - /** - * Specifies the time to wait before garbage collecting tombstones (deletion markers) - * @type {number} - */ - this.gcGraceSeconds = 0; - /** - * Compaction strategy class used for the table. - * @type {String} - */ - this.compactionClass = null; - /** - * Associative-array containing the compaction options keys and values. - * @type {Object} - */ - this.compactionOptions = null; - /** - * Associative-array containing the compression options. - * @type {Object} - */ - this.compression = null; - /** - * Specifies the probability of read repairs being invoked over all replicas in the current data center. - * @type {number} - */ - this.localReadRepairChance = 0; - /** - * Specifies the probability with which read repairs should be invoked on non-quorum reads. The value must be - * between 0 and 1. - * @type {number} - */ - this.readRepairChance = 0; - /** - * An associative Array containing extra metadata for the table. - *

- * For Apache Cassandra versions prior to 3.0.0, this method always returns null. - *

- * @type {Object} - */ - this.extensions = null; - /** - * When compression is enabled, this option defines the probability - * with which checksums for compressed blocks are checked during reads. - * The default value for this options is 1.0 (always check). - *

- * For Apache Cassandra versions prior to 3.0.0, this method always returns null. - *

- * @type {Number|null} - */ - this.crcCheckChance = null; - /** - * Whether the populate I/O cache on flush is set on this table. - * @type {Boolean} - */ - this.populateCacheOnFlush = false; - /** - * Returns the default TTL for this table. - * @type {Number} - */ - this.defaultTtl = 0; - /** - * * Returns the speculative retry option for this table. - * @type {String} - */ - this.speculativeRetry = 'NONE'; - /** - * Returns the minimum index interval option for this table. - *

- * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for - * earlier versions. - *

- * @type {Number|null} - */ - this.minIndexInterval = 128; - /** - * Returns the maximum index interval option for this table. - *

- * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for - * earlier versions. - *

- * @type {Number|null} - */ - this.maxIndexInterval = 2048; - /** - * Array describing the table columns. - * @type {Array} - */ - this.columns = null; - /** - * An associative Array of columns by name. - * @type {Object} - */ - this.columnsByName = null; - /** - * Array describing the columns that are part of the partition key. - * @type {Array} - */ - this.partitionKeys = []; - /** - * Array describing the columns that form the clustering key. - * @type {Array} - */ - this.clusteringKeys = []; - /** - * Array describing the clustering order of the columns in the same order as the clusteringKeys. - * @type {Array} - */ - this.clusteringOrder = []; - /** - * An associative Array containing nodesync options for this table. - *

- * For DSE versions prior to 6.0.0, this method always returns {@code null}. If nodesync - * was not explicitly configured for this table this method will also return {@code null}. - *

- * @type {Object} - */ - this.nodesync = null; -} - -util.inherits(DataCollection, events.EventEmitter); - -module.exports = DataCollection; \ No newline at end of file diff --git a/lib/metadata/data-collection.ts b/lib/metadata/data-collection.ts new file mode 100644 index 000000000..eac2194cc --- /dev/null +++ b/lib/metadata/data-collection.ts @@ -0,0 +1,312 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import events from "events"; +import type { ColumnInfo } from "."; + +/** + * Creates a new instance of DataCollection + * @param {String} name Name of the data object. + * @classdesc Describes a table or a view + * @alias module:metadata~DataCollection + * @constructor + * @abstract + */ +class DataCollection extends events.EventEmitter { + /** + * Name of the object + * @type {String} + */ + name: string; + /** + * False-positive probability for SSTable Bloom filters. + * @type {number} + */ + bloomFilterFalsePositiveChance: number; + /** + * Level of caching: all, keys_only, rows_only, none + * @type {String} + */ + caching: string; + /** + * A human readable comment describing the table. + * @type {String} + */ + comment: string; + /** + * Specifies the time to wait before garbage collecting tombstones (deletion markers) + * @type {number} + */ + gcGraceSeconds: number; + /** + * Compaction strategy class used for the table. + * @type {String} + */ + compactionClass: string; + /** + * Associative-array containing the compaction options keys and values. + * @type {Object} + */ + compactionOptions: { [option: string]: any; }; + /** + * Associative-array containing the compaction options keys and values. + * @type {Object} + */ + compression: { + class?: string; + [option: string]: any; + }; + /** + * Specifies the probability of read repairs being invoked over all replicas in the current data center. + * @type {number} + */ + localReadRepairChance: number; + /** + * Specifies the probability with which read repairs should be invoked on non-quorum reads. The value must be + * between 0 and 1. + * @type {number} + */ + readRepairChance: number; + /** + * An associative Array containing extra metadata for the table. + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Object} + */ + extensions: { [option: string]: any; }; + /** + * When compression is enabled, this option defines the probability + * with which checksums for compressed blocks are checked during reads. + * The default value for this options is 1.0 (always check). + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Number|null} + */ + crcCheckChance?: number; + /** + * Whether the populate I/O cache on flush is set on this table. + * @type {Boolean} + */ + populateCacheOnFlush: boolean; + /** + * Returns the default TTL for this table. + * @type {Number} + */ + defaultTtl: number; + /** + * * Returns the speculative retry option for this table. + * @type {String} + */ + speculativeRetry: string; + /** + * Returns the minimum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + minIndexInterval?: number; + /** + * Returns the maximum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + maxIndexInterval?: number ; + /** + * Array describing the table columns. + * @type {Array} + */ + columns: ColumnInfo[]; + /** + * An associative Array of columns by name. + * @type {Object} + */ + columnsByName: { [key: string]: ColumnInfo }; + /** + * Array describing the columns that are part of the partition key. + * @type {Array} + */ + partitionKeys: ColumnInfo[]; + /** + * Array describing the columns that form the clustering key. + * @type {Array} + */ + clusteringKeys: ColumnInfo[]; + /** + * Array describing the clustering order of the columns in the same order as the clusteringKeys. + * @type {Array} + */ + clusteringOrder: string[]; + //TODO: not exposed, I believe it should be + /** + * An associative Array containing nodesync options for this table. + *

+ * For DSE versions prior to 6.0.0, this method always returns {@code null}. If nodesync + * was not explicitly configured for this table this method will also return {@code null}. + *

+ * @type {Object} + */ + nodesync?: object; + /** + * Creates a new instance of DataCollection + * @internal + * @param {String} name Name of the data object. + * @constructor + */ + constructor(name: string) { + super(); + this.setMaxListeners(0); + //private + Object.defineProperty(this, 'loading', { value: false, enumerable: false, writable: true }); + Object.defineProperty(this, 'loaded', { value: false, enumerable: false, writable: true }); + /** + * Name of the object + * @type {String} + */ + this.name = name; + /** + * False-positive probability for SSTable Bloom filters. + * @type {number} + */ + this.bloomFilterFalsePositiveChance = 0; + /** + * Level of caching: all, keys_only, rows_only, none + * @type {String} + */ + this.caching = null; + /** + * A human readable comment describing the table. + * @type {String} + */ + this.comment = null; + /** + * Specifies the time to wait before garbage collecting tombstones (deletion markers) + * @type {number} + */ + this.gcGraceSeconds = 0; + /** + * Compaction strategy class used for the table. + * @type {String} + */ + this.compactionClass = null; + /** + * Associative-array containing the compaction options keys and values. + * @type {Object} + */ + this.compactionOptions = null; + /** + * Associative-array containing the compression options. + * @type {Object} + */ + this.compression = null; + /** + * Specifies the probability of read repairs being invoked over all replicas in the current data center. + * @type {number} + */ + this.localReadRepairChance = 0; + /** + * Specifies the probability with which read repairs should be invoked on non-quorum reads. The value must be + * between 0 and 1. + * @type {number} + */ + this.readRepairChance = 0; + /** + * An associative Array containing extra metadata for the table. + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Object} + */ + this.extensions = null; + /** + * When compression is enabled, this option defines the probability + * with which checksums for compressed blocks are checked during reads. + * The default value for this options is 1.0 (always check). + *

+ * For Apache Cassandra versions prior to 3.0.0, this method always returns null. + *

+ * @type {Number|null} + */ + this.crcCheckChance = null; + /** + * Whether the populate I/O cache on flush is set on this table. + * @type {Boolean} + */ + this.populateCacheOnFlush = false; + /** + * Returns the default TTL for this table. + * @type {Number} + */ + this.defaultTtl = 0; + /** + * * Returns the speculative retry option for this table. + * @type {String} + */ + this.speculativeRetry = 'NONE'; + /** + * Returns the minimum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + this.minIndexInterval = 128; + /** + * Returns the maximum index interval option for this table. + *

+ * Note: this option is available in Apache Cassandra 2.1 and above, and will return null for + * earlier versions. + *

+ * @type {Number|null} + */ + this.maxIndexInterval = 2048; + /** + * Array describing the table columns. + * @type {Array} + */ + this.columns = null; + /** + * An associative Array of columns by name. + * @type {Object} + */ + this.columnsByName = null; + /** + * Array describing the columns that are part of the partition key. + * @type {Array} + */ + this.partitionKeys = []; + /** + * Array describing the columns that form the clustering key. + * @type {Array} + */ + this.clusteringKeys = []; + /** + * Array describing the clustering order of the columns in the same order as the clusteringKeys. + * @type {Array} + */ + this.clusteringOrder = []; + this.nodesync = null; + } +} + +export default DataCollection; \ No newline at end of file diff --git a/lib/metadata/event-debouncer.js b/lib/metadata/event-debouncer.ts similarity index 83% rename from lib/metadata/event-debouncer.js rename to lib/metadata/event-debouncer.ts index 4ea8ddc2e..2a86ec29c 100644 --- a/lib/metadata/event-debouncer.js +++ b/lib/metadata/event-debouncer.ts @@ -13,27 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import util from "util"; +import promiseUtils from "../promise-utils"; +import utils from "../utils"; -const util = require('util'); -const utils = require('../utils'); -const promiseUtils = require('../promise-utils'); const _queueOverflowThreshold = 1000; /** * Debounce protocol events by acting on those events with a sliding delay. - * @ignore + * @ignore @internal * @constructor */ class EventDebouncer { + private _delay: number; + private _logger: Function; + private _queue: { callbacks: undefined[]; keyspaces: object; mainEvent?: {handler: Function} }; + private _timeout: NodeJS.Timeout; /** * Creates a new instance of the event debouncer. * @param {Number} delay * @param {Function} logger */ - constructor(delay, logger) { + constructor(delay: number, logger: Function) { this._delay = delay; this._logger = logger; this._queue = null; @@ -47,9 +50,12 @@ class EventDebouncer { * @param {Boolean} processNow * @returns {Promise} */ - eventReceived(event, processNow) { + eventReceived(event: { + handler: Function; all?: boolean; keyspace?: string; + cqlObject?: string; + }, processNow: boolean): Promise { return new Promise((resolve, reject) => { - event.callback = promiseUtils.getCallback(resolve, reject); + event["callback"] = promiseUtils.getCallback(resolve, reject); this._queue = this._queue || { callbacks: [], keyspaces: {} }; const delay = !processNow ? this._delay : 0; if (event.all) { @@ -61,7 +67,7 @@ class EventDebouncer { // warn once this._logger('warn', util.format('Event debouncer queue exceeded %d events', _queueOverflowThreshold)); } - this._queue.callbacks.push(event.callback); + this._queue.callbacks.push(event["callback"]); if (this._queue.mainEvent) { // a full refresh is scheduled and the callback was added, nothing else to do. return this._slideDelay(delay); @@ -84,7 +90,7 @@ class EventDebouncer { * @param {Number} delay * @private * */ - _slideDelay(delay) { + _slideDelay(delay: number) { const self = this; function process() { const q = self._queue; @@ -114,7 +120,7 @@ class EventDebouncer { if (!this._queue) { return; } - this._queue.callbacks.forEach(function (cb) { + this._queue.callbacks.forEach(function (cb: Function) { cb(); }); this._queue = null; @@ -127,7 +133,7 @@ class EventDebouncer { * @param {{callbacks: Array, keyspaces: Object, mainEvent: Object}} q * @private */ -function processQueue (q) { +function processQueue (q: { callbacks: Array; keyspaces: object; mainEvent?: {handler: Function}; }) { if (q.mainEvent) { // refresh all by invoking 1 handler and invoke all pending callbacks return promiseUtils.toCallback(q.mainEvent.handler(), (err) => { @@ -161,4 +167,4 @@ function processQueue (q) { }); } -module.exports = EventDebouncer; \ No newline at end of file +export default EventDebouncer; \ No newline at end of file diff --git a/lib/metadata/index.d.ts b/lib/metadata/index.d.ts deleted file mode 100644 index 80c0d9673..000000000 --- a/lib/metadata/index.d.ts +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { types } from '../types'; -import { EmptyCallback, Host, token, ValueCallback } from '../../'; -import dataTypes = types.dataTypes; -import Uuid = types.Uuid; -import InetAddress = types.InetAddress; - -export namespace metadata { - - interface Aggregate { - argumentTypes: Array<{ code: dataTypes, info: any }>; - finalFunction: string; - initCondition: string; - keyspaceName: string; - returnType: string; - signature: string[]; - stateFunction: string; - stateType: string; - } - - interface ClientState { - getConnectedHosts(): Host[]; - - getInFlightQueries(host: Host): number; - - getOpenConnections(host: Host): number; - - toString(): string; - } - - interface DataTypeInfo { - code: dataTypes; - info: string | DataTypeInfo | DataTypeInfo[]; - options: { - frozen: boolean; - reversed: boolean; - }; - } - - interface ColumnInfo { - name: string; - type: DataTypeInfo; - } - - enum IndexKind { - custom = 0, - keys, - composites - } - - interface Index { - kind: IndexKind; - name: string; - options: object; - target: string; - - isCompositesKind(): boolean; - - isCustomKind(): boolean; - - isKeysKind(): boolean; - } - - interface DataCollection { - bloomFilterFalsePositiveChance: number; - caching: string; - clusteringKeys: ColumnInfo[]; - clusteringOrder: string[]; - columns: ColumnInfo[]; - columnsByName: { [key: string]: ColumnInfo }; - comment: string; - compactionClass: string; - compactionOptions: { [option: string]: any; }; - compression: { - class?: string; - [option: string]: any; - }; - crcCheckChange?: number; - defaultTtl: number; - extensions: { [option: string]: any; }; - gcGraceSeconds: number; - localReadRepairChance: number; - maxIndexInterval?: number; - minIndexInterval?: number; - name: string; - partitionKeys: ColumnInfo[]; - populateCacheOnFlush: boolean; - readRepairChance: number; - speculativeRetry: string; - } - - interface MaterializedView extends DataCollection { - tableName: string; - whereClause: string; - includeAllColumns: boolean; - } - - interface TableMetadata extends DataCollection { - indexes: Index[]; - indexInterval?: number; - isCompact: boolean; - memtableFlushPeriod: number; - replicateOnWrite: boolean; - cdc?: boolean; - virtual: boolean; - } - - interface QueryTrace { - requestType: string; - coordinator: InetAddress; - parameters: { [key: string]: any }; - startedAt: number | types.Long; - duration: number; - clientAddress: string; - events: Array<{ id: Uuid; activity: any; source: any; elapsed: any; thread: any }>; - } - - interface SchemaFunction { - argumentNames: string[]; - argumentTypes: Array<{ code: dataTypes, info: any }>; - body: string; - calledOnNullInput: boolean; - keyspaceName: string; - language: string; - name: string; - returnType: string; - signature: string[]; - } - - interface Udt { - name: string; - fields: ColumnInfo[] - } - - interface Metadata { - keyspaces: { [name: string]: { name: string, strategy: string }}; - - clearPrepared(): void; - - getAggregate(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>, callback: ValueCallback): void; - - getAggregate(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>): Promise; - - getAggregates(keyspaceName: string, name: string, callback: ValueCallback): void; - - getAggregates(keyspaceName: string, name: string): Promise; - - getFunction(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>, callback: ValueCallback): void; - - getFunction(keyspaceName: string, name: string, signature: string[] | Array<{ code: number, info: any }>): Promise; - - getFunctions(keyspaceName: string, name: string, callback: ValueCallback): void; - - getFunctions(keyspaceName: string, name: string): Promise; - - getMaterializedView(keyspaceName: string, name: string, callback: ValueCallback): void; - - getMaterializedView(keyspaceName: string, name: string, callback: EmptyCallback): Promise; - - getReplicas(keyspaceName: string, token: Buffer | token.Token | token.TokenRange): Host[]; - - getTable(keyspaceName: string, name: string, callback: ValueCallback): void; - - getTable(keyspaceName: string, name: string): Promise; - - getTokenRanges(): Set; - - getTokenRangesForHost(keyspaceName: string, host: Host): Set | null; - - getTrace(traceId: Uuid, consistency: types.consistencies, callback: ValueCallback): void; - - getTrace(traceId: Uuid, consistency: types.consistencies): Promise; - - getTrace(traceId: Uuid, callback: ValueCallback): void; - - getTrace(traceId: Uuid): Promise; - - getUdt(keyspaceName: string, name: string, callback: ValueCallback): void; - - getUdt(keyspaceName: string, name: string): Promise; - - newToken(components: Buffer[] | Buffer | string): token.Token; - - newTokenRange(start: token.Token, end: token.Token): token.TokenRange; - - refreshKeyspace(name: string, callback: EmptyCallback): void; - - refreshKeyspace(name: string): Promise; - - refreshKeyspaces(waitReconnect: boolean, callback: EmptyCallback): void; - - refreshKeyspaces(waitReconnect?: boolean): Promise; - - refreshKeyspaces(callback: EmptyCallback): void; - } -} \ No newline at end of file diff --git a/lib/metadata/index.js b/lib/metadata/index.ts similarity index 76% rename from lib/metadata/index.js rename to lib/metadata/index.ts index a34c45708..a818d3fc2 100644 --- a/lib/metadata/index.js +++ b/lib/metadata/index.ts @@ -13,26 +13,51 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import events from "events"; +import { EventEmitter } from "stream"; +import util from "util"; +import type { ClientOptions } from "../client"; +import Connection from "../connection"; +import type ControlConnection from "../control-connection"; +import type { DataTypeInfo } from "../encoder"; +import errors from "../errors"; +import { ExecutionOptions } from "../execution-options"; +import { Host, HostMap } from "../host"; +import promiseUtils from "../promise-utils"; +import requests from "../requests"; +import { Token, TokenRange } from "../token"; +import t, { Tokenizer } from "../tokenizer"; +import types, { type consistencies, type InetAddress, type Long, Uuid } from "../types/index"; +import utils, { type EmptyCallback, type HashSet, type ValueCallback } from "../utils"; +import Aggregate from "./aggregate"; +import type ClientState from "./client-state"; +import DataCollection from "./data-collection"; +import MaterializedView from "./materialized-view"; +import SchemaFunction from "./schema-function"; +import type Index from "./schema-index"; +import { IndexKind } from "./schema-index"; +import schemaParserFactory, { type Keyspace, SchemaParser } from "./schema-parser"; +import type TableMetadata from "./table-metadata"; + +interface ColumnInfo { + name: string; + type: DataTypeInfo; +} -'use strict'; - -const events = require('events'); -const util = require('util'); - -/** - * Module containing classes and fields related to metadata. - * @module metadata - */ +interface QueryTrace { + requestType: string; + coordinator: InetAddress; + parameters: { [key: string]: any }; + startedAt: number | Long; + duration: number; + clientAddress: string; + events: Array<{ id: Uuid; activity: any; source: any; elapsed: any; thread: any }>; +} -const t = require('../tokenizer'); -const utils = require('../utils'); -const errors = require('../errors'); -const types = require('../types'); -const requests = require('../requests'); -const schemaParserFactory = require('./schema-parser'); -const promiseUtils = require('../promise-utils'); -const { TokenRange } = require('../token'); -const { ExecutionOptions } = require('../execution-options'); +interface Udt { + name: string; + fields: ColumnInfo[] +} /** * @const @@ -65,18 +90,44 @@ const _traceMaxAttemps = 5; */ const _traceAttemptDelay = 400; +//TODO: I enhanced the types from {code: number, info: any} to DataTypeInfo in some methods' signature in this class. +//TODO: Also enhanced { name: string, strategy: string } to interface Keyspace /** * Represents cluster and schema information. * The metadata class acts as a internal state of the driver. */ class Metadata { + keyspaces: { [name: string]: Keyspace}; + /** @internal */ + initialized: boolean; + private _isDbaas: boolean; + private _schemaParser: SchemaParser; + /** @internal */ + log: (type: string, info: string, furtherInfo?: any, options?: any) => void; + private _preparedQueries: PreparedQueries; + /** @internal */ + tokenizer: Tokenizer; + /** @internal */ + primaryReplicas: Record; + /** @internal */ + ring: any[]; + /** @internal */ + tokenRanges: Set; + /** @internal */ + ringTokensAsStrings: any[]; + /** @internal */ + datacenters: { [datacenter: string]: { hostLength: number; racks: HashSet } }; + private options: ClientOptions; + private controlConnection: ControlConnection; + /** * Creates a new instance of {@link Metadata}. + * @internal * @param {ClientOptions} options * @param {ControlConnection} controlConnection Control connection used to retrieve information. */ - constructor(options, controlConnection) { + constructor(options: ClientOptions, controlConnection: ControlConnection) { if (!options) { throw new errors.ArgumentError('Options are not defined'); } @@ -87,8 +138,8 @@ class Metadata { this.initialized = false; this._isDbaas = false; this._schemaParser = schemaParserFactory.getByVersion(options, controlConnection, this.getUdt.bind(this)); - this.log = utils.log; - this._preparedQueries = new PreparedQueries(options.maxPrepared, (...args) => this.log(...args)); + this.log = utils.log.bind(this); + this._preparedQueries = new PreparedQueries(options.maxPrepared, (...args) => this.log(...args as [string, string, any?, any?])); } /** @@ -97,17 +148,18 @@ class Metadata { * @ignore * @param {Array.} version */ - setCassandraVersion(version) { + setCassandraVersion(version: Array) { this._schemaParser = schemaParserFactory.getByVersion( this.options, this.controlConnection, this.getUdt.bind(this), version, this._schemaParser); } + //TODO: not exposed. I believe it should. /** * Determines whether the cluster is provided as a service. * @returns {boolean} true when the cluster is provided as a service (DataStax Astra), false when it's a * different deployment (on-prem). */ - isDbaas() { + isDbaas(): boolean { return this._isDbaas; } @@ -121,10 +173,10 @@ class Metadata { } /** - * @ignore + * @ignore @internal * @param {String} partitionerName */ - setPartitioner(partitionerName) { + setPartitioner(partitionerName: string) { if (/RandomPartitioner$/.test(partitionerName)) { return this.tokenizer = new t.RandomTokenizer(); } @@ -136,10 +188,10 @@ class Metadata { /** * Populates the information regarding primary replica per token, datacenters (+ racks) and sorted token ring. - * @ignore + * @ignore @internal * @param {HostMap} hosts */ - buildTokens(hosts) { + buildTokens(hosts: HostMap) { if (!this.tokenizer) { return this.log('error', 'Tokenizer could not be determined'); } @@ -175,7 +227,7 @@ class Metadata { //All the tokens in ring order this.ring = allSorted; // Build TokenRanges. - const tokenRanges = new Set(); + const tokenRanges = new Set(); if (this.ring.length === 1) { // If there is only one token, return the range ]minToken, minToken] const min = this.tokenizer.minToken(); @@ -207,7 +259,9 @@ class Metadata { * @param {String} name Name of the keyspace. * @param {Function} [callback] Optional callback. */ - refreshKeyspace(name, callback) { + refreshKeyspace(name: string, callback: EmptyCallback): void; + refreshKeyspace(name: string): Promise; + refreshKeyspace(name: string, callback?: EmptyCallback): void | Promise { return promiseUtils.optionalCallback(this._refreshKeyspace(name), callback); } @@ -215,7 +269,7 @@ class Metadata { * @param {String} name * @private */ - async _refreshKeyspace(name) { + private async _refreshKeyspace(name: string) { if (!this.initialized) { throw this._uninitializedError(); } @@ -247,7 +301,10 @@ class Metadata { * connected at the moment. Default: true. * @param {Function} [callback] Optional callback. */ - refreshKeyspaces(waitReconnect, callback) { + refreshKeyspaces(waitReconnect: boolean, callback: EmptyCallback): void; + refreshKeyspaces(waitReconnect?: boolean): Promise; + refreshKeyspaces(callback: EmptyCallback): void; + refreshKeyspaces(waitReconnect?: boolean | EmptyCallback, callback?: EmptyCallback): void | Promise { if (typeof waitReconnect === 'function' || typeof waitReconnect === 'undefined') { callback = waitReconnect; waitReconnect = true; @@ -268,7 +325,7 @@ class Metadata { * @ignore * @internal */ - async refreshKeyspacesInternal(waitReconnect) { + async refreshKeyspacesInternal(waitReconnect: boolean): Promise<{ [s: string]: object; }> { this.log('info', 'Retrieving keyspaces metadata'); try { this.keyspaces = await this._schemaParser.getKeyspaces(waitReconnect); @@ -280,7 +337,7 @@ class Metadata { } } - _getKeyspaceReplicas(keyspace) { + private _getKeyspaceReplicas(keyspace) { if (!keyspace.replicas) { //Calculate replicas the first time for the keyspace keyspace.replicas = @@ -299,7 +356,7 @@ class Metadata { * @param {Buffer|Token|TokenRange} token Can be Buffer (serialized partition key), Token or TokenRange * @returns {Array} */ - getReplicas(keyspaceName, token) { + getReplicas(keyspaceName: string, token: Buffer | Token | TokenRange): Array { if (!this.ring) { return null; } @@ -338,7 +395,7 @@ class Metadata { * * @returns {Set} The ranges of the ring or empty set if schema metadata is not enabled. */ - getTokenRanges() { + getTokenRanges(): Set { return this.tokenRanges; } @@ -350,7 +407,7 @@ class Metadata { * @param {Host} host The host. * @returns {Set|null} Ranges for the keyspace on this host or null if keyspace isn't found or hasn't been loaded. */ - getTokenRangesForHost(keyspaceName, host) { + getTokenRangesForHost(keyspaceName: string, host: Host): Set | null { if (!this.ring) { return null; } @@ -367,7 +424,7 @@ class Metadata { return this.getTokenRanges(); } const replicas = this._getKeyspaceReplicas(keyspace); - const ranges = new Set(); + const ranges = new Set(); // for each range, find replicas for end token, if replicas include host, add range. this.tokenRanges.forEach((tokenRange) => { const replicasForToken = replicas[this.tokenizer.stringify(tokenRange.end)]; @@ -384,7 +441,7 @@ class Metadata { * @param {Array|Buffer|String} components * @returns {Token} constructed token from the input buffer. */ - newToken(components) { + newToken(components: Array | Buffer | string): Token { if (!this.tokenizer) { throw new Error('Partitioner not established. This should only happen if metadata was disabled or you have not connected yet.'); } @@ -401,9 +458,9 @@ class Metadata { * Constructs a TokenRange from the given start and end tokens. * @param {Token} start * @param {Token} end - * @returns TokenRange build range spanning from start (exclusive) to end (inclusive). + * @returns {TokenRange} build range spanning from start (exclusive) to end (inclusive). */ - newTokenRange(start, end) { + newTokenRange(start: Token, end: Token): TokenRange { if (!this.tokenizer) { throw new Error('Partitioner not established. This should only happen if metadata was disabled or you have not connected yet.'); } @@ -417,7 +474,7 @@ class Metadata { * @internal * @ignore */ - getPreparedInfo(keyspaceName, query) { + getPreparedInfo(keyspaceName: string, query: string): PreparedQueryInfo { return this._preparedQueries.getOrAdd(keyspaceName, query); } @@ -425,27 +482,27 @@ class Metadata { * Clears the internal state related to the prepared statements. * Following calls to the Client using the prepare flag will re-prepare the statements. */ - clearPrepared() { + clearPrepared():void { this._preparedQueries.clear(); } - /** @ignore */ + /** @ignore @internal */ getPreparedById(id) { return this._preparedQueries.getById(id); } - /** @ignore */ + /** @ignore @internal */ setPreparedById(info) { return this._preparedQueries.setById(info); } - /** @ignore */ + /** @ignore @internal */ getAllPrepared() { return this._preparedQueries.getAll(); } - /** @ignore */ - _uninitializedError() { + /** @ignore @internal */ + private _uninitializedError() { return new Error('Metadata has not been initialized. This could only happen if you have not connected yet.'); } @@ -463,7 +520,9 @@ class Metadata { * @param {String} name Name of the UDT. * @param {Function} [callback] The callback to invoke when retrieval completes. */ - getUdt(keyspaceName, name, callback) { + getUdt(keyspaceName: string, name: string, callback: ValueCallback): void; + getUdt(keyspaceName: string, name: string): Promise; + getUdt(keyspaceName: string, name: string, callback?: ValueCallback): void | Promise { return promiseUtils.optionalCallback(this._getUdt(keyspaceName, name), callback); } @@ -473,7 +532,7 @@ class Metadata { * @returns {Promise} * @private */ - async _getUdt(keyspaceName, name) { + private async _getUdt(keyspaceName: string, name: string): Promise { if (!this.initialized) { throw this._uninitializedError(); } @@ -503,7 +562,9 @@ class Metadata { * @param {Function} [callback] The callback with the err as a first parameter and the {@link TableMetadata} as * second parameter. */ - getTable(keyspaceName, name, callback) { + getTable(keyspaceName: string, name: string, callback: ValueCallback): void; + getTable(keyspaceName: string, name: string): Promise; + getTable(keyspaceName: string, name: string, callback?: ValueCallback): void | Promise { return promiseUtils.optionalCallback(this._getTable(keyspaceName, name), callback); } @@ -512,7 +573,7 @@ class Metadata { * @param {String} name * @private */ - async _getTable(keyspaceName, name) { + private async _getTable(keyspaceName: string, name: string) { if (!this.initialized) { throw this._uninitializedError(); } @@ -544,7 +605,9 @@ class Metadata { * @param {Function} [callback] The callback with the err as a first parameter and the array of {@link SchemaFunction} * as second parameter. */ - getFunctions(keyspaceName, name, callback) { + getFunctions(keyspaceName: string, name: string, callback: ValueCallback): void; + getFunctions(keyspaceName: string, name: string): Promise; + getFunctions(keyspaceName: string, name: string, callback?: ValueCallback): void | Promise { return promiseUtils.optionalCallback(this._getFunctionsWrapper(keyspaceName, name), callback); } @@ -553,7 +616,7 @@ class Metadata { * @param {String} name * @private */ - async _getFunctionsWrapper(keyspaceName, name) { + private async _getFunctionsWrapper(keyspaceName: string, name: string) { if (!keyspaceName || !name) { throw new errors.ArgumentError('You must provide the keyspace name and cql function name to retrieve the metadata'); } @@ -577,7 +640,9 @@ class Metadata { * @param {Function} [callback] The callback with the err as a first parameter and the {@link SchemaFunction} as second * parameter. */ - getFunction(keyspaceName, name, signature, callback) { + getFunction(keyspaceName: string, name: string, signature: string[] | Array, callback: ValueCallback): void; + getFunction(keyspaceName: string, name: string, signature: string[] | Array): Promise; + getFunction(keyspaceName: string, name: string, signature: string[] | Array, callback?: ValueCallback): void | Promise { return promiseUtils.optionalCallback(this._getSingleFunction(keyspaceName, name, signature, false), callback); } @@ -596,7 +661,9 @@ class Metadata { * @param {Function} [callback] The callback with the err as a first parameter and the array of {@link Aggregate} as * second parameter. */ - getAggregates(keyspaceName, name, callback) { + getAggregates(keyspaceName: string, name: string, callback: ValueCallback): void; + getAggregates(keyspaceName: string, name: string): Promise; + getAggregates(keyspaceName: string, name: string, callback?: ValueCallback): void | Promise { return promiseUtils.optionalCallback(this._getAggregates(keyspaceName, name), callback); } @@ -605,7 +672,7 @@ class Metadata { * @param {String} name * @private */ - async _getAggregates(keyspaceName, name) { + private async _getAggregates(keyspaceName: string, name: string) { if (!keyspaceName || !name) { throw new errors.ArgumentError('You must provide the keyspace name and cql aggregate name to retrieve the metadata'); } @@ -628,7 +695,9 @@ class Metadata { * @param {Array.|Array.<{code, info}>} signature Array of types of the parameters. * @param {Function} [callback] The callback with the err as a first parameter and the {@link Aggregate} as second parameter. */ - getAggregate(keyspaceName, name, signature, callback) { + getAggregate(keyspaceName: string, name: string, signature: string[] | Array, callback: ValueCallback): void; + getAggregate(keyspaceName: string, name: string, signature: string[] | Array): Promise; + getAggregate(keyspaceName: string, name: string, signature: string[] | Array, callback?: ValueCallback): void | Promise { return promiseUtils.optionalCallback(this._getSingleFunction(keyspaceName, name, signature, true), callback); } @@ -648,7 +717,9 @@ class Metadata { * @param {Function} [callback] The callback with the err as a first parameter and the {@link MaterializedView} as * second parameter. */ - getMaterializedView(keyspaceName, name, callback) { + getMaterializedView(keyspaceName: string, name: string, callback: ValueCallback): void; + getMaterializedView(keyspaceName: string, name: string): Promise; //TODO: It was getMaterializedView(keyspaceName: string, name: string, callback: EmptyCallback): Promise; + getMaterializedView(keyspaceName: string, name: string, callback?: ValueCallback): void | Promise { return promiseUtils.optionalCallback(this._getMaterializedView(keyspaceName, name), callback); } @@ -658,7 +729,7 @@ class Metadata { * @returns {Promise} * @private */ - async _getMaterializedView(keyspaceName, name) { + private async _getMaterializedView(keyspaceName: string, name: string): Promise { if (!this.initialized) { throw this._uninitializedError(); } @@ -681,7 +752,7 @@ class Metadata { * @returns {Promise} * @private */ - async _getFunctions(keyspaceName, name, aggregate) { + private async _getFunctions(keyspaceName: string, name: string, aggregate: boolean): Promise> { if (!this.initialized) { throw this._uninitializedError(); } @@ -705,7 +776,7 @@ class Metadata { * @returns {Promise} * @private */ - async _getSingleFunction(keyspaceName, name, signature, aggregate) { + private async _getSingleFunction(keyspaceName: string, name: string, signature: Array, aggregate: boolean): Promise { if (!keyspaceName || !name) { throw new errors.ArgumentError('You must provide the keyspace name and cql function name to retrieve the metadata'); } @@ -735,7 +806,11 @@ class Metadata { * @param {Number} [consistency] The consistency level to obtain the trace. * @param {Function} [callback] The callback with the err as first parameter and the query trace as second parameter. */ - getTrace(traceId, consistency, callback) { + getTrace(traceId: Uuid, consistency: consistencies, callback: ValueCallback): void; + getTrace(traceId: Uuid, consistency: consistencies): Promise; + getTrace(traceId: Uuid, callback: ValueCallback): void; + getTrace(traceId: Uuid): Promise; + getTrace(traceId: Uuid, consistency?: consistencies | ValueCallback, callback?: ValueCallback): void | Promise { if (!callback && typeof consistency === 'function') { // Both callback and consistency are optional parameters // In this case, the second parameter is the callback @@ -743,7 +818,7 @@ class Metadata { consistency = null; } - return promiseUtils.optionalCallback(this._getTrace(traceId, consistency), callback); + return promiseUtils.optionalCallback(this._getTrace(traceId, consistency as consistencies), callback); } /** @@ -752,7 +827,7 @@ class Metadata { * @returns {Promise} * @private */ - async _getTrace(traceId, consistency) { + private async _getTrace(traceId: Uuid, consistency: consistencies): Promise { if (!this.initialized) { throw this._uninitializedError(); } @@ -803,6 +878,7 @@ class Metadata { /** * Checks whether hosts that are currently up agree on the schema definition. + * @internal *

* This method performs a one-time check only, without any form of retry; therefore * protocolOptions.maxSchemaAgreementWaitSeconds setting does not apply in this case. @@ -814,7 +890,7 @@ class Metadata { * true when all hosts agree on the schema and false when there is no agreement or when * the check could not be performed (for example, if the control connection is down). */ - checkSchemaAgreement(callback) { + checkSchemaAgreement(callback: Function): Promise { return promiseUtils.optionalCallback(this._checkSchemaAgreement(), callback); } @@ -822,7 +898,7 @@ class Metadata { * Async-only version of check schema agreement. * @private */ - async _checkSchemaAgreement() { + private async _checkSchemaAgreement() { const connection = this.controlConnection.connection; if (!connection) { return false; @@ -830,7 +906,7 @@ class Metadata { try { return await this.compareSchemaVersions(connection); } - catch (err) { + catch (_err) { return false; } } @@ -842,7 +918,7 @@ class Metadata { * @internal * @ignore */ - async adaptUserHints(keyspace, hints) { + async adaptUserHints(keyspace: string, hints: Array) { if (!Array.isArray(hints)) { return; } @@ -855,7 +931,7 @@ class Metadata { } const type = types.dataTypes.getByName(hint); - this._checkUdtTypes(udts, type, keyspace); + this._checkUdtTypes(udts, type as {code, info}, keyspace); hints[i] = type; } @@ -874,7 +950,7 @@ class Metadata { * @param {string} keyspace * @private */ - _checkUdtTypes(udts, type, keyspace) { + private _checkUdtTypes(udts: Array, type: {code; info}, keyspace: string) { if (type.code === types.dataTypes.udt) { const udtName = type.info.split('.'); type.info = { @@ -911,7 +987,7 @@ class Metadata { * @internal * @ignore */ - async compareSchemaVersions(connection) { + async compareSchemaVersions(connection: Connection) { const versions = new Set(); const response1 = await connection.send(new requests.QueryRequest(_selectSchemaVersionLocal), null); if (response1 && response1.rows && response1.rows.length === 1) { @@ -931,17 +1007,30 @@ class Metadata { } } +type PreparedQueryInfo = { + queryId?: Buffer; + preparing?: boolean; + query: string; + keyspace: string; + meta?: DataCollection; +} & EventEmitter; + /** * Allows to store prepared queries and retrieval by query or query id. - * @ignore + * @ignore @internal */ class PreparedQueries { + length: number; + _maxPrepared: number; + _mapByKey: Map; + _mapById: Map; + _logger: Function; /** * @param {Number} maxPrepared * @param {Function} logger */ - constructor(maxPrepared, logger) { + constructor(maxPrepared: number, logger: Function) { this.length = 0; this._maxPrepared = maxPrepared; this._mapByKey = new Map(); @@ -953,7 +1042,7 @@ class PreparedQueries { return (keyspace || '') + query; } - getOrAdd(keyspace, query) { + getOrAdd(keyspace, query): PreparedQueryInfo { const key = this._getKey(keyspace, query); let info = this._mapByKey.get(key); if (info) { @@ -962,6 +1051,7 @@ class PreparedQueries { this._validateOverflow(); + // @ts-ignore info = new events.EventEmitter(); info.setMaxListeners(0); info.query = query; @@ -1021,4 +1111,12 @@ class PreparedQueries { } } -module.exports = Metadata; +export default Metadata; + +export { + Metadata, + type Aggregate, type ClientState, type ColumnInfo, type DataCollection, type DataTypeInfo, type Index, type MaterializedView, type PreparedQueryInfo, + type QueryTrace, type SchemaFunction, type TableMetadata, type IndexKind, + type Udt +}; + diff --git a/lib/metadata/materialized-view.js b/lib/metadata/materialized-view.ts similarity index 65% rename from lib/metadata/materialized-view.js rename to lib/metadata/materialized-view.ts index 455a66a0e..7a2e7afdb 100644 --- a/lib/metadata/materialized-view.js +++ b/lib/metadata/materialized-view.ts @@ -13,36 +13,47 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const util = require('util'); -const DataCollection = require('./data-collection'); + +import DataCollection from "./data-collection"; + + + /** - * Creates a new MaterializedView. - * @param {String} name Name of the View. * @classdesc Describes a CQL materialized view. * @alias module:metadata~MaterializedView * @augments {module:metadata~DataCollection} * @constructor */ -function MaterializedView(name) { - DataCollection.call(this, name); +class MaterializedView extends DataCollection { /** * Name of the table. * @type {String} */ - this.tableName = null; + tableName: string; /** * View where clause. * @type {String} */ - this.whereClause = null; + whereClause: string; /** * Determines if all the table columns where are included in the view. * @type {boolean} */ - this.includeAllColumns = false; -} + includeAllColumns: boolean; + /** + * Creates a new MaterializedView. + * @internal + * @param {String} name Name of the View. + * @augments {module:metadata~DataCollection} + * @constructor + */ + constructor(name: string) { + super(name); -util.inherits(MaterializedView, DataCollection); + this.tableName = null; + this.whereClause = null; + this.includeAllColumns = false; + } +} -module.exports = MaterializedView; \ No newline at end of file +export default MaterializedView; \ No newline at end of file diff --git a/lib/metadata/schema-function.js b/lib/metadata/schema-function.js deleted file mode 100644 index 40105c07f..000000000 --- a/lib/metadata/schema-function.js +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -/** - * Creates a new SchemaFunction. - * @classdesc Describes a CQL function. - * @alias module:metadata~SchemaFunction - * @constructor - */ -function SchemaFunction() { - /** - * Name of the cql function. - * @type {String} - */ - this.name = null; - /** - * Name of the keyspace where the cql function is declared. - */ - this.keyspaceName = null; - /** - * Signature of the function. - * @type {Array.} - */ - this.signature = null; - /** - * List of the function argument names. - * @type {Array.} - */ - this.argumentNames = null; - /** - * List of the function argument types. - * @type {Array.<{code, info}>} - */ - this.argumentTypes = null; - /** - * Body of the function. - * @type {String} - */ - this.body = null; - /** - * Determines if the function is called when the input is null. - * @type {Boolean} - */ - this.calledOnNullInput = null; - /** - * Name of the programming language, for example: java, javascript, ... - * @type {String} - */ - this.language = null; - /** - * Type of the return value. - * @type {{code: number, info: (Object|Array|null)}} - */ - this.returnType = null; - /** - * Indicates whether or not this function is deterministic. This means that - * given a particular input, the function will always produce the same output. - * @type {Boolean} - */ - this.deterministic = null; - /** - * Indicates whether or not this function is monotonic on all of its - * arguments. This means that it is either entirely non-increasing or - * non-decreasing. Even if the function is not monotonic on all of its - * arguments, it's possible to specify that it is monotonic on one of - * its arguments, meaning that partial applications of the function over - * that argument will be monotonic. - * - * Monotonicity is required to use the function in a GROUP BY clause. - * @type {Boolean} - */ - this.monotonic = null; - /** - * The argument names that the function is monotonic on. - * - * If {@link monotonic} is true, this will return all argument names. - * Otherwise, this will return either one argument or an empty array. - * @type {Array.} - */ - this.monotonicOn = null; -} - -module.exports = SchemaFunction; \ No newline at end of file diff --git a/lib/metadata/schema-function.ts b/lib/metadata/schema-function.ts new file mode 100644 index 000000000..ee5f79a09 --- /dev/null +++ b/lib/metadata/schema-function.ts @@ -0,0 +1,181 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { DataTypeInfo } from "../encoder"; + + +/** + * @classdesc Describes a CQL function. + * @alias module:metadata~SchemaFunction +*/ +class SchemaFunction { + /** + * Name of the cql function. + * @type {String} + */ + name: string; + /** + * Name of the keyspace where the cql function is declared. + */ + keyspaceName: string; + /** + * Signature of the function. + * @type {Array.} + */ + signature: Array; + /** + * List of the function argument names. + * @type {Array.} + */ + argumentNames: Array; + /** + * List of the function argument types. + * @type {Array.<{code, info}>} + */ + argumentTypes: Array; + /** + * Body of the function. + * @type {String} + */ + body: string; + /** + * Determines if the function is called when the input is null. + * @type {Boolean} + */ + calledOnNullInput: boolean; + /** + * Name of the programming language, for example: java, javascript, ... + * @type {String} + */ + language: string; + + // Was exposed as string, but I believe it should be DataTypeInfo + /** + * Type of the return value. + * @type {DataTypeInfo} + */ + returnType: DataTypeInfo; + + //TODO: not exposed. I believe it should be. + /** + * Indicates whether or not this function is deterministic. This means that + * given a particular input, the function will always produce the same output. + * @type {Boolean} + */ + deterministic: boolean; + //TODO: not exposed. I believe it should be. + /** + * Indicates whether or not this function is monotonic on all of its + * arguments. This means that it is either entirely non-increasing or + * non-decreasing. Even if the function is not monotonic on all of its + * arguments, it's possible to specify that it is monotonic on one of + * its arguments, meaning that partial applications of the function over + * that argument will be monotonic. + * + * Monotonicity is required to use the function in a GROUP BY clause. + * @type {Boolean} + */ + monotonic: boolean; + //TODO: not exposed. I believe it should be. + /** + * The argument names that the function is monotonic on. + * + * If {@link monotonic} is true, this will return all argument names. + * Otherwise, this will return either one argument or an empty array. + * @type {Array.} + */ + monotonicOn: Array; + /** + * Creates a new SchemaFunction. + * @internal + * @alias module:metadata~SchemaFunction + * @constructor + */ + constructor() { + /** + * Name of the cql function. + * @type {String} + */ + this.name = null; + /** + * Name of the keyspace where the cql function is declared. + */ + this.keyspaceName = null; + /** + * Signature of the function. + * @type {Array.} + */ + this.signature = null; + /** + * List of the function argument names. + * @type {Array.} + */ + this.argumentNames = null; + /** + * List of the function argument types. + * @type {Array.<{code, info}>} + */ + this.argumentTypes = null; + /** + * Body of the function. + * @type {String} + */ + this.body = null; + /** + * Determines if the function is called when the input is null. + * @type {Boolean} + */ + this.calledOnNullInput = null; + /** + * Name of the programming language, for example: java, javascript, ... + * @type {String} + */ + this.language = null; + /** + * Type of the return value. + * @type {{code: number, info: (Object|Array|null)}} + */ + this.returnType = null; + /** + * Indicates whether or not this function is deterministic. This means that + * given a particular input, the function will always produce the same output. + * @type {Boolean} + */ + this.deterministic = null; + /** + * Indicates whether or not this function is monotonic on all of its + * arguments. This means that it is either entirely non-increasing or + * non-decreasing. Even if the function is not monotonic on all of its + * arguments, it's possible to specify that it is monotonic on one of + * its arguments, meaning that partial applications of the function over + * that argument will be monotonic. + * + * Monotonicity is required to use the function in a GROUP BY clause. + * @type {Boolean} + */ + this.monotonic = null; + /** + * The argument names that the function is monotonic on. + * + * If {@link monotonic} is true, this will return all argument names. + * Otherwise, this will return either one argument or an empty array. + * @type {Array.} + */ + this.monotonicOn = null; + } +} + +export default SchemaFunction; \ No newline at end of file diff --git a/lib/metadata/schema-index.js b/lib/metadata/schema-index.js deleted file mode 100644 index 8787bad27..000000000 --- a/lib/metadata/schema-index.js +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const utils = require('../utils'); -const types = require('../types'); - -/** @private */ -const kind = { - custom: 0, - keys: 1, - composites: 2 -}; -/** - * Creates a new Index instance. - * @classdesc Describes a CQL index. - * @param {String} name - * @param {String} target - * @param {Number|String} kind - * @param {Object} options - * @alias module:metadata~Index - * @constructor - */ -function Index(name, target, kind, options) { - /** - * Name of the index. - * @type {String} - */ - this.name = name; - /** - * Target of the index. - * @type {String} - */ - this.target = target; - /** - * A numeric value representing index kind (0: custom, 1: keys, 2: composite); - * @type {Number} - */ - this.kind = typeof kind === 'string' ? getKindByName(kind) : kind; - /** - * An associative array containing the index options - * @type {Object} - */ - this.options = options; -} - -/** - * Determines if the index is of composites kind - * @returns {Boolean} - */ -Index.prototype.isCompositesKind = function () { - return this.kind === kind.composites; -}; - -/** - * Determines if the index is of keys kind - * @returns {Boolean} - */ -Index.prototype.isKeysKind = function () { - return this.kind === kind.keys; -}; - -/** - * Determines if the index is of custom kind - * @returns {Boolean} - */ -Index.prototype.isCustomKind = function () { - return this.kind === kind.custom; -}; - -/** - * Parses Index information from rows in the 'system_schema.indexes' table - * @deprecated It will be removed in the next major version. - * @param {Array.} indexRows - * @returns {Array.} - */ -Index.fromRows = function (indexRows) { - if (!indexRows || indexRows.length === 0) { - return utils.emptyArray; - } - return indexRows.map(function (row) { - const options = row['options']; - return new Index(row['index_name'], options['target'], getKindByName(row['kind']), options); - }); -}; - -/** - * Parses Index information from rows in the legacy 'system.schema_columns' table. - * @deprecated It will be removed in the next major version. - * @param {Array.} columnRows - * @param {Object.} columnsByName - * @returns {Array.} - */ -Index.fromColumnRows = function (columnRows, columnsByName) { - const result = []; - for (let i = 0; i < columnRows.length; i++) { - const row = columnRows[i]; - const indexName = row['index_name']; - if (!indexName) { - continue; - } - const c = columnsByName[row['column_name']]; - let target; - const options = JSON.parse(row['index_options']); - if (options !== null && options['index_keys'] !== undefined) { - target = util.format("keys(%s)", c.name); - } - else if (options !== null && options['index_keys_and_values'] !== undefined) { - target = util.format("entries(%s)", c.name); - } - else if (c.type.options.frozen && (c.type.code === types.dataTypes.map || c.type.code === types.dataTypes.list || - c.type.code === types.dataTypes.set)) { - target = util.format("full(%s)", c.name); - } - else { - target = c.name; - } - result.push(new Index(indexName, target, getKindByName(row['index_type']), options)); - } - return result; -}; - -/** - * Gets the number representing the kind based on the name - * @param {String} name - * @returns {Number} - * @private - */ -function getKindByName(name) { - if (!name) { - return kind.custom; - } - return kind[name.toLowerCase()]; -} - -module.exports = Index; \ No newline at end of file diff --git a/lib/metadata/schema-index.ts b/lib/metadata/schema-index.ts new file mode 100644 index 000000000..9f1af926e --- /dev/null +++ b/lib/metadata/schema-index.ts @@ -0,0 +1,177 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import util from "util"; +import types, { Row } from "../types/index"; +import utils from "../utils"; + + +enum IndexKind { + custom = 0, + keys = 1, + composites = 2 +} + +/** + * @classdesc Describes a CQL index. + * @alias module:metadata~Index + */ +class Index { + /** + * Name of the index. + * @type {String} + */ + name: string; + /** + * Target of the index. + * @type {String} + */ + target: string; + /** + * A numeric value representing index kind (0: custom, 1: keys, 2: composite); + * @type {IndexKind} + */ + kind: IndexKind; + /** + * An associative array containing the index options + * @type {Object} + */ + options: object; + /** + * Creates a new Index instance. + * @classdesc Describes a CQL index. + * @internal + * @param {String} name + * @param {String} target + * @param {Number|String} kind + * @param {Object} options + * @constructor + */ + constructor(name: string, target: string, kind: number | string, options: object) { + /** + * Name of the index. + * @type {String} + */ + this.name = name; + /** + * Target of the index. + * @type {String} + */ + this.target = target; + /** + * A numeric value representing index kind (0: custom, 1: keys, 2: composite); + * @type {Number} + */ + this.kind = typeof kind === 'string' ? getKindByName(kind) : kind; + /** + * An associative array containing the index options + * @type {Object} + */ + this.options = options; + } + /** + * Parses Index information from rows in the 'system_schema.indexes' table + * @deprecated It will be removed in the next major version. + * @internal + * @param {Array.} indexRows + * @returns {Array.} + */ + static fromRows(indexRows: Array): Array { + if (!indexRows || indexRows.length === 0) { + return utils.emptyArray as Array; + } + return indexRows.map(function (row) { + const options = row['options']; + return new Index(row['index_name'], options['target'], getKindByName(row['kind']), options); + }); + } + /** + * Parses Index information from rows in the legacy 'system.schema_columns' table. + * @deprecated It will be removed in the next major version. + * @internal + * @param {Array.} columnRows + * @param {Object.} columnsByName + * @returns {Array.} + */ + static fromColumnRows(columnRows: Array, columnsByName: {[key: string]: {name; type}}): Array { + const result = []; + for (let i = 0; i < columnRows.length; i++) { + const row = columnRows[i]; + const indexName = row['index_name']; + if (!indexName) { + continue; + } + const c = columnsByName[row['column_name']]; + let target; + const options = JSON.parse(row['index_options']); + if (options !== null && options['index_keys'] !== undefined) { + target = util.format("keys(%s)", c.name); + } + else if (options !== null && options['index_keys_and_values'] !== undefined) { + target = util.format("entries(%s)", c.name); + } + else if (c.type.options.frozen && (c.type.code === types.dataTypes.map || c.type.code === types.dataTypes.list || + c.type.code === types.dataTypes.set)) { + target = util.format("full(%s)", c.name); + } + else { + target = c.name; + } + result.push(new Index(indexName, target, getKindByName(row['index_type']), options)); + } + return result; + } + /** + * Determines if the index is of composites kind + * @returns {Boolean} + */ + isCompositesKind(): boolean { + return this.kind === IndexKind.composites; + } + /** + * Determines if the index is of keys kind + * @returns {Boolean} + */ + isKeysKind(): boolean { + return this.kind === IndexKind.keys; + } + /** + * Determines if the index is of custom kind + * @returns {Boolean} + */ + isCustomKind(): boolean { + return this.kind === IndexKind.custom; + } +} + +/** + * Gets the number representing the kind based on the name + * @param {String} name + * @returns {Number} + * @private + */ +function getKindByName(name: string): IndexKind { + if (!name) { + return IndexKind.custom; + } + return IndexKind[name.toLowerCase()]; +} + +export default Index; + +export { + Index, + IndexKind +}; diff --git a/lib/metadata/schema-parser.js b/lib/metadata/schema-parser.ts similarity index 90% rename from lib/metadata/schema-parser.js rename to lib/metadata/schema-parser.ts index af05a3b69..f6a4f6830 100644 --- a/lib/metadata/schema-parser.js +++ b/lib/metadata/schema-parser.ts @@ -13,24 +13,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import events from "events"; +import util from "util"; +import type { ClientOptions } from "../client"; +import type ControlConnection from "../control-connection"; +import errors from "../errors"; +import promiseUtils from "../promise-utils"; +import types, { Row } from "../types/index"; +import utils from "../utils"; +import Aggregate from "./aggregate"; +import MaterializedView from "./materialized-view"; +import SchemaFunction from "./schema-function"; +import Index from "./schema-index"; +import TableMetadata from "./table-metadata"; + -'use strict'; -const util = require('util'); -const events = require('events'); -const types = require('../types'); -const utils = require('../utils'); -const errors = require('../errors'); -const promiseUtils = require('../promise-utils'); -const TableMetadata = require('./table-metadata'); -const Aggregate = require('./aggregate'); -const SchemaFunction = require('./schema-function'); -const Index = require('./schema-index'); -const MaterializedView = require('./materialized-view'); const { format } = util; /** * @module metadata/schemaParser - * @ignore + * @ignore @internal */ const _selectAllKeyspacesV1 = "SELECT * FROM system.schema_keyspaces"; @@ -58,13 +61,25 @@ const _selectVirtualColumns = "SELECT * FROM system_virtual_schema.columns where /** * @abstract - * @param {ClientOptions} options The client options - * @param {ControlConnection} cc - * @constructor - * @ignore + * @ignore @internal */ -class SchemaParser { - constructor(options, cc) { +abstract class SchemaParser { + cc: ControlConnection; + encodingOptions: { map?: Function; set?: Function; copyBuffer?: boolean; useUndefinedAsUnset?: boolean; useBigIntAsLong?: boolean; useBigIntAsVarint?: boolean; }; + selectTable: string; + selectColumns: string; + selectIndexes: string; + selectUdt: string; + selectAggregates: string; + selectFunctions: string; + supportsVirtual: boolean; + /** + * @param {ClientOptions} options The client options + * @param {ControlConnection} cc + * @constructor + * @ignore @internal + */ + constructor(options: ClientOptions, cc: ControlConnection) { this.cc = cc; this.encodingOptions = options.encoding; this.selectTable = null; @@ -85,7 +100,8 @@ class SchemaParser { * @returns {{name, durableWrites, strategy, strategyOptions, tokenToReplica, udts, tables, functions, aggregates}} * @protected */ - _createKeyspace(name, durableWrites, strategy, strategyOptions, virtual) { + protected _createKeyspace(name, durableWrites, strategy, strategyOptions, virtual?): + {name, durableWrites, strategy, strategyOptions, tokenToReplica, udts, tables, functions, aggregates, virtual, views, graphEngine} { return { name, durableWrites, @@ -107,16 +123,14 @@ class SchemaParser { * @param {String} name * @returns {Promise} */ - getKeyspace(name) { - } + abstract getKeyspace(name: string): Promise; /** * @abstract * @param {Boolean} waitReconnect * @returns {Promise>} */ - getKeyspaces(waitReconnect) { - } + abstract getKeyspaces(waitReconnect: boolean): Promise<{ [name: string]: Keyspace}>; /** * @param {String} keyspaceName @@ -125,7 +139,7 @@ class SchemaParser { * @param {Boolean} virtual * @returns {Promise} */ - async getTable(keyspaceName, name, cache, virtual) { + async getTable(keyspaceName: string, name: string, cache: object, virtual: boolean): Promise { let tableInfo = cache && cache[name]; if (!tableInfo) { tableInfo = new TableMetadata(name); @@ -188,12 +202,12 @@ class SchemaParser { } } - async _getFirstRow(query) { + protected async _getFirstRow(query) { const rows = await this._getRows(query); return rows[0]; } - async _getRows(query) { + protected async _getRows(query) { const response = await this.cc.query(query); return response.rows; } @@ -204,7 +218,7 @@ class SchemaParser { * @param {Object} cache * @returns {Promise} */ - async getUdt(keyspaceName, name, cache) { + async getUdt(keyspaceName: string, name: string, cache: object): Promise { let udtInfo = cache && cache[name]; if (!udtInfo) { udtInfo = new events.EventEmitter(); @@ -221,7 +235,7 @@ class SchemaParser { return udtInfo; } if (udtInfo.loading) { - return promiseUtils.fromEvent(udtInfo, 'load'); + return promiseUtils.fromEvent(udtInfo, 'load') as Promise; } udtInfo.loading = true; const query = format(this.selectUdt, keyspaceName, name); @@ -252,8 +266,7 @@ class SchemaParser { * @returns {Promise} * @abstract */ - _parseUdt(udtInfo, row) { - } + protected abstract _parseUdt(udtInfo, row: Row): Promise; /** * Builds the metadata based on the table and column rows @@ -266,7 +279,7 @@ class SchemaParser { * @returns {Promise} * @throws {Error} */ - async _parseTableOrView(tableInfo, tableRow, columnRows, indexRows, virtual) { + protected async _parseTableOrView(tableInfo: TableMetadata, tableRow: Row, columnRows: Array, indexRows: Array, virtual: boolean): Promise { } /** @@ -276,8 +289,7 @@ class SchemaParser { * @param {Object} cache * @returns {Promise} */ - getMaterializedView(keyspaceName, name, cache) { - } + abstract getMaterializedView(keyspaceName: string, name: string, cache: object): Promise; /** * @param {String} keyspaceName @@ -286,9 +298,9 @@ class SchemaParser { * @param {Object} cache * @returns {Promise} */ - async getFunctions(keyspaceName, name, aggregate, cache) { + async getFunctions(keyspaceName: string, name: string, aggregate: boolean, cache: object): Promise> { /** @type {String} */ - let query = this.selectFunctions; + let query: string = this.selectFunctions; let parser = row => this._parseFunction(row); if (aggregate) { query = this.selectAggregates; @@ -338,36 +350,34 @@ class SchemaParser { * @param {Row} row * @returns {Promise} */ - _parseAggregate(row) { - } + protected abstract _parseAggregate(row: Row): Promise; /** * @abstract * @param {Row} row * @returns {Promise} */ - _parseFunction(row) { - } + protected abstract _parseFunction(row: Row): Promise; /** @returns {Map} */ - _asMap(obj) { + protected _asMap(obj): Map { if (!obj) { return new Map(); } if (this.encodingOptions.map && obj instanceof this.encodingOptions.map) { // Its already a Map or a polyfill of a Map - return obj; + return obj as Map; } return new Map(Object.keys(obj).map(k => [k, obj[k]])); } - _mapAsObject(map) { + protected _mapAsObject(map) { if (!map) { return map; } if (this.encodingOptions.map && map instanceof this.encodingOptions.map) { const result = {}; - map.forEach((value, key) => result[key] = value); + (map as Map).forEach((value, key) => result[key] = value); return result; } return map; @@ -376,7 +386,7 @@ class SchemaParser { /** * Used to parse schema information for Cassandra versions 1.2.x, and 2.x - * @ignore + * @ignore @internal */ class SchemaParserV1 extends SchemaParser { @@ -384,7 +394,7 @@ class SchemaParserV1 extends SchemaParser { * @param {ClientOptions} options * @param {ControlConnection} cc */ - constructor(options, cc) { + constructor(options: ClientOptions, cc: ControlConnection) { super(options, cc); this.selectTable = _selectTableV1; this.selectColumns = _selectColumnsV1; @@ -561,7 +571,7 @@ class SchemaParserV1 extends SchemaParser { aggregate.initConditionRaw = row['initcond']; aggregate.argumentTypes = (row['argument_types'] || utils.emptyArray).map(name => encoder.parseFqTypeName(name)); aggregate.stateType = encoder.parseFqTypeName(row['state_type']); - const initConditionValue = encoder.decode(aggregate.initConditionRaw, aggregate.stateType); + const initConditionValue = encoder.decode(aggregate.initConditionRaw, aggregate.stateType as any); if (initConditionValue !== null && typeof initConditionValue !== 'undefined') { aggregate.initCondition = initConditionValue.toString(); } @@ -608,16 +618,17 @@ class SchemaParserV1 extends SchemaParser { * @param {ClientOptions} options The client options * @param {ControlConnection} cc The control connection to be used * @param {Function} udtResolver The function to be used to retrieve the udts. - * @ignore + * @ignore @internal */ class SchemaParserV2 extends SchemaParser { + udtResolver: Function; /** * @param {ClientOptions} options The client options * @param {ControlConnection} cc The control connection to be used * @param {Function} udtResolver The function to be used to retrieve the udts. */ - constructor(options, cc, udtResolver) { + constructor(options: ClientOptions, cc: ControlConnection, udtResolver: Function) { super(options, cc); this.udtResolver = udtResolver; this.selectTable = _selectTableV2; @@ -683,7 +694,7 @@ class SchemaParserV2 extends SchemaParser { } } - _parseKeyspace(row, virtual) { + _parseKeyspace(row, virtual?) { const replication = row['replication']; let strategy; let strategyOptions; @@ -866,7 +877,7 @@ class SchemaParserV2 extends SchemaParser { * * This parser similar to [SchemaParserV2] expect it also parses virtual * keyspaces. - * @ignore + * @ignore @internal */ class SchemaParserV3 extends SchemaParserV2 { /** @@ -874,7 +885,7 @@ class SchemaParserV3 extends SchemaParserV2 { * @param {ControlConnection} cc The control connection to be used * @param {Function} udtResolver The function to be used to retrieve the udts. */ - constructor(options, cc, udtResolver) { + constructor(options: ClientOptions, cc: ControlConnection, udtResolver: Function) { super(options, cc, udtResolver); this.supportsVirtual = true; } @@ -945,7 +956,7 @@ class SchemaParserV3 extends SchemaParserV2 { * We also need to remove the static keyword for all other columns in the table. * @param {module:metadata~TableMetadata} tableInfo */ -function pruneStaticCompactTableColumns(tableInfo) { +function pruneStaticCompactTableColumns(tableInfo: TableMetadata) { let i; let c; //remove "column1 text" clustering column @@ -955,8 +966,8 @@ function pruneStaticCompactTableColumns(tableInfo) { tableInfo.columns.splice(index, 1); delete tableInfo.columnsByName[c.name]; } - tableInfo.clusteringKeys = utils.emptyArray; - tableInfo.clusteringOrder = utils.emptyArray; + tableInfo.clusteringKeys = utils.emptyArray as any[]; + tableInfo.clusteringOrder = utils.emptyArray as any[]; //remove regular columns and set the static columns to non-static i = tableInfo.columns.length; while (i--) { @@ -976,17 +987,17 @@ function pruneStaticCompactTableColumns(tableInfo) { * This column shouldn't be exposed to the user but is currently returned by C*. * @param {module:metadata~TableMetadata} tableInfo */ -function pruneDenseTableColumns(tableInfo) { +function pruneDenseTableColumns(tableInfo: TableMetadata) { let i = tableInfo.columns.length; while (i--) { const c = tableInfo.columns[i]; - if (!c.isStatic && c.type.code === types.dataTypes.custom && c.type.info === 'empty') { + if (!c["isStatic"] && c.type.code === types.dataTypes.custom && c.type.info === 'empty') { // remove "value blob" regular column tableInfo.columns.splice(i, 1); delete tableInfo.columnsByName[c.name]; continue; } - c.isStatic = false; + c["isStatic"] = false; } } @@ -1017,7 +1028,7 @@ function getTokenToReplicaMapper(strategy, strategyOptions) { * @param {Number} replicationFactor * @returns {function} */ -function getTokenToReplicaSimpleMapper(replicationFactor) { +function getTokenToReplicaSimpleMapper(replicationFactor: number): Function { return (function tokenSimpleStrategy(tokenizer, ringTokensAsStrings, primaryReplicas) { const ringLength = ringTokensAsStrings.length; const rf = Math.min(replicationFactor, ringLength); @@ -1048,7 +1059,7 @@ function getTokenToReplicaSimpleMapper(replicationFactor) { * @returns {Function} * @private */ -function getTokenToReplicaNetworkMapper(replicationFactors) { +function getTokenToReplicaNetworkMapper(replicationFactors: object): Function { // A(DC1) // // H B(DC2) @@ -1125,7 +1136,7 @@ function getTokenToReplicaNetworkMapper(replicationFactors) { /** * @returns {Number} The number of skipped hosts added. */ -function addSkippedHosts(dcRf, dcReplicas, tokenReplicas, skippedHosts) { +function addSkippedHosts(dcRf, dcReplicas, tokenReplicas, skippedHosts): number { let i; for (i = 0; i < dcRf - dcReplicas && i < skippedHosts.length; i++) { tokenReplicas.push(skippedHosts[i]); @@ -1160,8 +1171,8 @@ function isDoneForToken(replicationFactors, datacenters, replicasByDc) { * @param {SchemaParser} [currentInstance] The current instance * @returns {SchemaParser} */ -function getByVersion(options, cc, udtResolver, version, currentInstance) { - let parserConstructor = SchemaParserV1; +function getByVersion(options: ClientOptions, cc: ControlConnection, udtResolver: Function, version?: Array, currentInstance?: SchemaParser): SchemaParser { + let parserConstructor : any = SchemaParserV1; if (version && version[0] === 3) { parserConstructor = SchemaParserV2; } else if (version && version[0] >= 4) { @@ -1173,5 +1184,17 @@ function getByVersion(options, cc, udtResolver, version, currentInstance) { return currentInstance; } -exports.getByVersion = getByVersion; -exports.isDoneForToken = isDoneForToken; \ No newline at end of file +interface Keyspace {name, durableWrites, strategy, strategyOptions, tokenToReplica, udts, tables, functions, aggregates, virtual, views, graphEngine} + +export default { + getByVersion, + isDoneForToken, + SchemaParser +}; + +export { + getByVersion, + isDoneForToken, + SchemaParser, + type Keyspace +}; diff --git a/lib/metadata/table-metadata.js b/lib/metadata/table-metadata.js deleted file mode 100644 index 87e171ef2..000000000 --- a/lib/metadata/table-metadata.js +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const util = require('util'); -const DataCollection = require('./data-collection'); -/** - * Creates a new instance of TableMetadata - * @classdesc Describes a table - * @param {String} name Name of the Table - * @augments {module:metadata~DataCollection} - * @alias module:metadata~TableMetadata - * @constructor - */ -function TableMetadata(name) { - DataCollection.call(this, name); - /** - * Applies only to counter tables. - * When set to true, replicates writes to all affected replicas regardless of the consistency level specified by - * the client for a write request. For counter tables, this should always be set to true. - * @type {Boolean} - */ - this.replicateOnWrite = true; - /** - * Returns the memtable flush period (in milliseconds) option for this table. - * @type {Number} - */ - this.memtableFlushPeriod = 0; - /** - * Returns the index interval option for this table. - *

- * Note: this option is only available in Apache Cassandra 2.0. It is deprecated in Apache Cassandra 2.1 and - * above, and will therefore return null for 2.1 nodes. - *

- * @type {Number|null} - */ - this.indexInterval = null; - /** - * Determines whether the table uses the COMPACT STORAGE option. - * @type {Boolean} - */ - this.isCompact = false; - /** - * - * @type {Array.} - */ - this.indexes = null; - - /** - * Determines whether the Change Data Capture (CDC) flag is set for the table. - * @type {Boolean|null} - */ - this.cdc = null; - - /** - * Determines whether the table is a virtual table or not. - * @type {Boolean} - */ - this.virtual = false; -} - -util.inherits(TableMetadata, DataCollection); - -module.exports = TableMetadata; \ No newline at end of file diff --git a/lib/metadata/table-metadata.ts b/lib/metadata/table-metadata.ts new file mode 100644 index 000000000..f56ca1eeb --- /dev/null +++ b/lib/metadata/table-metadata.ts @@ -0,0 +1,124 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import DataCollection from "./data-collection"; +import Index from "./schema-index"; + + + +/** + * @classdesc Describes a table + * @augments {module:metadata~DataCollection} + * @alias module:metadata~TableMetadata + */ +class TableMetadata extends DataCollection { + /** + * Applies only to counter tables. + * When set to true, replicates writes to all affected replicas regardless of the consistency level specified by + * the client for a write request. For counter tables, this should always be set to true. + * @type {Boolean} + */ + replicateOnWrite: boolean; + /** + * Returns the memtable flush period (in milliseconds) option for this table. + * @type {Number} + */ + memtableFlushPeriod: number; + /** + * Returns the index interval option for this table. + *

+ * Note: this option is only available in Apache Cassandra 2.0. It is deprecated in Apache Cassandra 2.1 and + * above, and will therefore return null for 2.1 nodes. + *

+ * @type {Number|null} + */ + indexInterval?: number; + /** + * Determines whether the table uses the COMPACT STORAGE option. + * @type {Boolean} + */ + isCompact: boolean; + /** + * + * @type {Array.} + */ + indexes: Array; + + /** + * Determines whether the Change Data Capture (CDC) flag is set for the table. + * @type {Boolean|null} + */ + cdc?: boolean; + + /** + * Determines whether the table is a virtual table or not. + * @type {Boolean} + */ + virtual: boolean; + /** + * Creates a new instance of TableMetadata + * @internal + * @param {String} name Name of the Table + * @constructor + */ + constructor(name: string) { + super(name); + /** + * Applies only to counter tables. + * When set to true, replicates writes to all affected replicas regardless of the consistency level specified by + * the client for a write request. For counter tables, this should always be set to true. + * @type {Boolean} + */ + this.replicateOnWrite = true; + /** + * Returns the memtable flush period (in milliseconds) option for this table. + * @type {Number} + */ + this.memtableFlushPeriod = 0; + /** + * Returns the index interval option for this table. + *

+ * Note: this option is only available in Apache Cassandra 2.0. It is deprecated in Apache Cassandra 2.1 and + * above, and will therefore return null for 2.1 nodes. + *

+ * @type {Number|null} + */ + this.indexInterval = null; + /** + * Determines whether the table uses the COMPACT STORAGE option. + * @type {Boolean} + */ + this.isCompact = false; + /** + * + * @type {Array.} + */ + this.indexes = null; + + /** + * Determines whether the Change Data Capture (CDC) flag is set for the table. + * @type {Boolean|null} + */ + this.cdc = null; + + /** + * Determines whether the table is a virtual table or not. + * @type {Boolean} + */ + this.virtual = false; + } +} + +export default TableMetadata; \ No newline at end of file diff --git a/lib/metrics/client-metrics.js b/lib/metrics/client-metrics.ts similarity index 83% rename from lib/metrics/client-metrics.js rename to lib/metrics/client-metrics.ts index 88fe073e2..c829fcf24 100644 --- a/lib/metrics/client-metrics.js +++ b/lib/metrics/client-metrics.ts @@ -13,8 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +/* eslint-disable @typescript-eslint/no-unused-vars */ + +import { AuthenticationError, OperationTimedOutError, ResponseError } from "../errors"; + -'use strict'; /** * Represents a base class that is used to measure events from the server and the client as seen by the driver. @@ -26,82 +29,82 @@ class ClientMetrics { * Method invoked when an authentication error is obtained from the server. * @param {AuthenticationError|Error} e The error encountered. */ - onAuthenticationError(e) {} + onAuthenticationError(e: AuthenticationError | Error) {} /** * Method invoked when an error (different than a server or client timeout, authentication or connection error) is * encountered when executing a request. * @param {OperationTimedOutError} e The timeout error. */ - onClientTimeoutError(e) {} + onClientTimeoutError(e: OperationTimedOutError) {} /** * Method invoked when there is a connection error. * @param {Error} e The error encountered. */ - onConnectionError(e) {} + onConnectionError(e: Error) {} /** * Method invoked when an error (different than a server or client timeout, authentication or connection error) is * encountered when executing a request. * @param {Error} e The error encountered. */ - onOtherError(e) {} + onOtherError(e: Error) {} /** * Method invoked when a read timeout error is obtained from the server. * @param {ResponseError} e The error encountered. */ - onReadTimeoutError(e) {} + onReadTimeoutError(e: ResponseError) {} /** * Method invoked when a write timeout error is obtained from the server. * @param {ResponseError} e The error encountered. */ - onWriteTimeoutError(e) {} + onWriteTimeoutError(e: ResponseError) {} /** * Method invoked when an unavailable error is obtained from the server. * @param {ResponseError} e The error encountered. */ - onUnavailableError(e) {} + onUnavailableError(e: ResponseError) {} /** * Method invoked when an execution is retried as a result of a client-level timeout. * @param {Error} e The error that caused the retry. */ - onClientTimeoutRetry(e) {} + onClientTimeoutRetry(e: Error) {} /** * Method invoked when an error (other than a server or client timeout) is retried. * @param {Error} e The error that caused the retry. */ - onOtherErrorRetry(e) {} + onOtherErrorRetry(e: Error) {} /** * Method invoked when an execution is retried as a result of a read timeout from the server (coordinator to replica). * @param {Error} e The error that caused the retry. */ - onReadTimeoutRetry(e) {} + onReadTimeoutRetry(e: Error) {} /** * Method invoked when an execution is retried as a result of an unavailable error from the server. * @param {Error} e The error that caused the retry. */ - onUnavailableRetry(e) {} + onUnavailableRetry(e: Error) {} /** * Method invoked when an execution is retried as a result of a write timeout from the server (coordinator to * replica). * @param {Error} e The error that caused the retry. */ - onWriteTimeoutRetry(e) {} + onWriteTimeoutRetry(e: Error) {} /** * Method invoked when an error is marked as ignored by the retry policy. * @param {Error} e The error that was ignored by the retry policy. */ - onIgnoreError(e) {} + onIgnoreError(e: Error) {} /** * Method invoked when a speculative execution is started. @@ -113,7 +116,7 @@ class ClientMetrics { * @param {Array} latency The latency represented in a [seconds, nanoseconds] tuple * Array, where nanoseconds is the remaining part of the real time that can't be represented in second precision. */ - onSuccessfulResponse(latency) {} + onSuccessfulResponse(latency: Array) {} /** * Method invoked when any response is obtained, the response can be the result of a successful execution or a @@ -121,9 +124,9 @@ class ClientMetrics { * @param {Array} latency The latency represented in a [seconds, nanoseconds] tuple * Array, where nanoseconds is the remaining part of the real time that can't be represented in second precision. */ - onResponse(latency) { + onResponse(latency: Array) { } } -module.exports = ClientMetrics; \ No newline at end of file +export default ClientMetrics; \ No newline at end of file diff --git a/lib/metrics/default-metrics.js b/lib/metrics/default-metrics.ts similarity index 80% rename from lib/metrics/default-metrics.js rename to lib/metrics/default-metrics.ts index 1df5dfaec..dc8eee1fb 100644 --- a/lib/metrics/default-metrics.js +++ b/lib/metrics/default-metrics.ts @@ -13,12 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import EventEmitter from "events"; +import type { AuthenticationError, OperationTimedOutError, ResponseError } from "../errors"; +import ClientMetrics from "./client-metrics"; -'use strict'; -const ClientMetrics = require('./client-metrics'); -const EventEmitter = require('events'); +//TODO: The fields like errors were not exposed. I believe we should. /** * A default implementation of [ClientMetrics]{@link module:metrics~ClientMetrics} that exposes the driver events as * Node.js events. @@ -35,6 +36,29 @@ const EventEmitter = require('events'); * defaultMetrics.responses.on('increment', latency => myHistogram.record(latency)); */ class DefaultMetrics extends ClientMetrics { + errors: EventEmitter & { + authentication: EventEmitter; + clientTimeout: EventEmitter; + connection: EventEmitter; + other: EventEmitter; + readTimeout: EventEmitter; + unavailable: EventEmitter; + writeTimeout: EventEmitter; + }; + retries: EventEmitter & { + clientTimeout: EventEmitter; + other: EventEmitter; + readTimeout: EventEmitter; + unavailable: EventEmitter; + writeTimeout: EventEmitter; + }; + speculativeExecutions: EventEmitter & { + increment: EventEmitter; + }; + ignoredErrors: EventEmitter; + responses: EventEmitter & { + success: EventEmitter; + }; /** * Creates a new instance of [DefaultMetrics]{@link module:metrics~DefaultMetrics}. */ @@ -53,6 +77,7 @@ class DefaultMetrics extends ClientMetrics { * @property {EventEmitter} unavailable Emits the unavailable error events obtained from the server. * @property {EventEmitter} writeTimeout Emits the write timeout error events obtained from the server */ + // @ts-ignore this.errors = new EventEmitter(); this.errors.authentication = new EventEmitter(); this.errors.clientTimeout = new EventEmitter(); @@ -75,6 +100,7 @@ class DefaultMetrics extends ClientMetrics { * @property {EventEmitter} writeTimeout Emits an execution is retried as a result of a write timeout error from the * server (coordinator to replica). */ + // @ts-ignore this.retries = new EventEmitter(); this.retries.clientTimeout = new EventEmitter(); this.retries.other = new EventEmitter(); @@ -86,12 +112,14 @@ class DefaultMetrics extends ClientMetrics { * Emits events when a speculative execution is started. * @type {EventEmitter} */ + // @ts-ignore this.speculativeExecutions = new EventEmitter(); /** * Emits events when an error is ignored by the retry policy. * @type {EventEmitter} */ + // @ts-ignore this.ignoredErrors = new EventEmitter(); /** @@ -99,83 +127,84 @@ class DefaultMetrics extends ClientMetrics { * @type {EventEmitter} * @property {EventEmitter} success Emits when a response was obtained as the result of a successful execution. */ + // @ts-ignore this.responses = new EventEmitter(); this.responses.success = new EventEmitter(); } /** @override */ - onAuthenticationError(e) { + onAuthenticationError(e: Error | AuthenticationError) { this.errors.authentication.emit('increment', e); this.errors.emit('increment', e);} /** @override */ - onConnectionError(e) { + onConnectionError(e: Error) { this.errors.connection.emit('increment', e); this.errors.emit('increment', e); } /** @override */ - onReadTimeoutError(e) { + onReadTimeoutError(e : ResponseError) { this.errors.readTimeout.emit('increment', e); this.errors.emit('increment', e); } /** @override */ - onWriteTimeoutError(e) { + onWriteTimeoutError(e: ResponseError) { this.errors.writeTimeout.emit('increment', e); this.errors.emit('increment', e); } /** @override */ - onUnavailableError(e) { + onUnavailableError(e: Error) { this.errors.unavailable.emit('increment', e); this.errors.emit('increment', e); } /** @override */ - onClientTimeoutError(e) { + onClientTimeoutError(e: OperationTimedOutError) { this.errors.clientTimeout.emit('increment', e); this.errors.emit('increment', e); } /** @override */ - onOtherError(e) { + onOtherError(e: Error) { this.errors.other.emit('increment', e); this.errors.emit('increment', e); } /** @override */ - onClientTimeoutRetry(e) { + onClientTimeoutRetry(e: Error) { this.retries.clientTimeout.emit('increment', e); this.retries.emit('increment', e); } /** @override */ - onOtherErrorRetry(e) { + onOtherErrorRetry(e: Error) { this.retries.other.emit('increment', e); this.retries.emit('increment', e); } /** @override */ - onReadTimeoutRetry(e) { + onReadTimeoutRetry(e: Error) { this.retries.readTimeout.emit('increment', e); this.retries.emit('increment', e); } /** @override */ - onUnavailableRetry(e) { + onUnavailableRetry(e: Error) { this.retries.unavailable.emit('increment', e); this.retries.emit('increment', e); } /** @override */ - onWriteTimeoutRetry(e) { + onWriteTimeoutRetry(e: Error) { this.retries.writeTimeout.emit('increment', e); this.retries.emit('increment', e); } /** @override */ - onIgnoreError(e) { + onIgnoreError(e: Error) { this.ignoredErrors.emit('increment', e); } @@ -185,14 +214,14 @@ class DefaultMetrics extends ClientMetrics { } /** @override */ - onSuccessfulResponse(latency) { + onSuccessfulResponse(latency: number[]) { this.responses.success.emit('increment', latency); } /** @override */ - onResponse(latency) { + onResponse(latency: number[]) { this.responses.emit('increment', latency); } } -module.exports = DefaultMetrics; \ No newline at end of file +export default DefaultMetrics; \ No newline at end of file diff --git a/lib/metrics/index.d.ts b/lib/metrics/index.d.ts deleted file mode 100644 index 4ad80054c..000000000 --- a/lib/metrics/index.d.ts +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { errors } from '../../'; - -export namespace metrics { - interface ClientMetrics { - onAuthenticationError(e: Error | errors.AuthenticationError): void; - - onClientTimeoutError(e: errors.OperationTimedOutError): void; - - onClientTimeoutRetry(e: Error): void; - - onConnectionError(e: Error): void; - - onIgnoreError(e: Error): void; - - onOtherError(e: Error): void; - - onOtherErrorRetry(e: Error): void; - - onReadTimeoutError(e: errors.ResponseError): void; - - onReadTimeoutRetry(e: Error): void; - - onResponse(latency: number[]): void; - - onSpeculativeExecution(): void; - - onSuccessfulResponse(latency: number[]): void; - - onUnavailableError(e: errors.ResponseError): void; - - onUnavailableRetry(e: Error): void; - - onWriteTimeoutError(e: errors.ResponseError): void; - - onWriteTimeoutRetry(e: Error): void; - } - - class DefaultMetrics implements ClientMetrics { - constructor(); - - onAuthenticationError(e: Error | errors.AuthenticationError): void; - - onClientTimeoutError(e: errors.OperationTimedOutError): void; - - onClientTimeoutRetry(e: Error): void; - - onConnectionError(e: Error): void; - - onIgnoreError(e: Error): void; - - onOtherError(e: Error): void; - - onOtherErrorRetry(e: Error): void; - - onReadTimeoutError(e: errors.ResponseError): void; - - onReadTimeoutRetry(e: Error): void; - - onResponse(latency: number[]): void; - - onSpeculativeExecution(): void; - - onSuccessfulResponse(latency: number[]): void; - - onUnavailableError(e: errors.ResponseError): void; - - onUnavailableRetry(e: Error): void; - - onWriteTimeoutError(e: errors.ResponseError): void; - - onWriteTimeoutRetry(e: Error): void; - } -} \ No newline at end of file diff --git a/lib/metrics/index.js b/lib/metrics/index.js deleted file mode 100644 index 9afb03a05..000000000 --- a/lib/metrics/index.js +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -const ClientMetrics = require('./client-metrics'); -const DefaultMetrics = require('./default-metrics'); - -/** - * The metrics module contains interfaces and implementations used by the driver to expose - * measurements of its internal behavior and of the server as seen from the driver side. - * @module metrics - */ - -module.exports = { ClientMetrics, DefaultMetrics }; \ No newline at end of file diff --git a/lib/datastax/index.d.ts b/lib/metrics/index.ts similarity index 74% rename from lib/datastax/index.d.ts rename to lib/metrics/index.ts index e41483f8e..aabcb0a1c 100644 --- a/lib/datastax/index.d.ts +++ b/lib/metrics/index.ts @@ -13,12 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import ClientMetrics from "./client-metrics"; +import DefaultMetrics from "./default-metrics"; -import * as graphModule from './graph'; -import * as searchModule from './search'; -export namespace datastax { - export import graph = graphModule.graph; - export import search = searchModule.search; -} \ No newline at end of file +export { ClientMetrics, DefaultMetrics }; +export default { ClientMetrics, DefaultMetrics }; \ No newline at end of file diff --git a/lib/operation-state.js b/lib/operation-state.ts similarity index 82% rename from lib/operation-state.js rename to lib/operation-state.ts index 8fe623d1d..1d92ac2d6 100644 --- a/lib/operation-state.js +++ b/lib/operation-state.ts @@ -13,12 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import util from "util"; +import errors from "./errors"; +import type { ExecutionOptions } from "./execution-options"; +import type { Request } from "./requests"; +import requests from "./requests"; +import utils from "./utils"; + -const util = require('util'); -const utils = require('./utils'); -const errors = require('./errors'); -const requests = require('./requests'); const ExecuteRequest = requests.ExecuteRequest; const QueryRequest = requests.QueryRequest; @@ -27,19 +29,27 @@ const state = { completed: 1, timedOut: 2, cancelled: 3 -}; +} as const; /** * Maintains the state information of a request inside a Connection. + * @ignore @internal */ class OperationState { + streamId: number; + request: Request; + private _rowCallback: Function; + private _callback: Function; + private _timeout: NodeJS.Timeout; + private _state: number; + private _rowIndex: number; /** * Creates a new instance of OperationState. * @param {Request} request * @param {Function} rowCallback * @param {Function} callback */ - constructor(request, rowCallback, callback) { + constructor(request: Request, rowCallback: Function, callback: Function) { this.request = request; this._rowCallback = rowCallback; this._callback = callback; @@ -78,7 +88,7 @@ class OperationState { * Determines if the response is going to be yielded by row. * @return {boolean} */ - isByRow() { + isByRow(): boolean { return this._rowCallback && (this.request instanceof ExecuteRequest || this.request instanceof QueryRequest); } @@ -90,7 +100,7 @@ class OperationState { * @param {Function} onTimeout The callback to be invoked when it times out. * @param {Function} onResponse The callback to be invoked if a response is obtained after it timed out. */ - setRequestTimeout(execOptions, defaultReadTimeout, address, onTimeout, onResponse) { + setRequestTimeout(execOptions: ExecutionOptions, defaultReadTimeout: number, address: string, onTimeout: Function, onResponse: Function) { if (this._state !== state.init) { // No need to set the timeout return; @@ -125,7 +135,7 @@ class OperationState { * @param {Function} onResponse * @private */ - _markAsTimedOut(err, onResponse) { + _markAsTimedOut(err: Error, onResponse: Function) { if (this._state !== state.init) { return; } @@ -149,16 +159,16 @@ class OperationState { * @param {Object} [result] * @param {Number} [length] */ - setResult(err, result, length) { + setResult(err: Error, result?: object, length?: number) { this._markAsCompleted(); this._swapCallbackAndInvoke(err, result, length); } - _swapCallbackAndInvoke(err, result, length, newCallback) { + _swapCallbackAndInvoke(err, result, length, newCallback?) { const callback = this._callback; this._callback = newCallback || utils.noop; callback(err, result, length); } } -module.exports = OperationState; \ No newline at end of file +export default OperationState; \ No newline at end of file diff --git a/lib/policies/address-resolution.js b/lib/policies/address-resolution.js deleted file mode 100644 index e0a0fc0be..000000000 --- a/lib/policies/address-resolution.js +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const dns = require('dns'); -const util = require('util'); -const utils = require('../utils'); -/** @module policies/addressResolution */ -/** - * @class - * @classdesc - * Translates IP addresses received from Cassandra nodes into locally queryable - * addresses. - *

- * The driver auto-detects new Cassandra nodes added to the cluster through server - * side pushed notifications and through checking the system tables. For each - * node, the address received will correspond to the address set as - * rpc_address in the node yaml file. In most case, this is the correct - * address to use by the driver and that is what is used by default. However, - * sometimes the addresses received through this mechanism will either not be - * reachable directly by the driver or should not be the preferred address to use - * to reach the node (for instance, the rpc_address set on Cassandra nodes - * might be a private IP, but some clients may have to use a public IP, or - * pass by a router to reach that node). This interface allows to deal with - * such cases, by allowing to translate an address as sent by a Cassandra node - * to another address to be used by the driver for connection. - *

- * Please note that the contact points addresses provided while creating the - * {@link Client} instance are not "translated", only IP address retrieve from or sent - * by Cassandra nodes to the driver are. - * @constructor - */ -function AddressTranslator() { - -} - -/** - * Translates a Cassandra rpc_address to another address if necessary. - * @param {String} address the address of a node as returned by Cassandra. - *

- * Note that if the rpc_address of a node has been configured to 0.0.0.0 - * server side, then the provided address will be the node listen_address, - * *not* 0.0.0.0. - *

- * @param {Number} port The port number, as specified in the [protocolOptions]{@link ClientOptions} at Client instance creation (9042 by default). - * @param {Function} callback Callback to invoke with endpoint as first parameter. - * The endpoint is an string composed of the IP address and the port number in the format ipAddress:port. - */ -AddressTranslator.prototype.translate = function (address, port, callback) { - callback(address + ':' + port); -}; - -/** - * @class - * @classdesc - * {@link AddressTranslator} implementation for multi-region EC2 deployments where clients are also deployed in EC2. - *

- * Its distinctive feature is that it translates addresses according to the location of the Cassandra host: - *

- *
    - *
  • addresses in different EC2 regions (than the client) are unchanged
  • - *
  • addresses in the same EC2 region are translated to private IPs
  • - *
- *

- * This optimizes network costs, because Amazon charges more for communication over public IPs. - *

- * @constructor - */ -function EC2MultiRegionTranslator() { - -} - -util.inherits(EC2MultiRegionTranslator, AddressTranslator); - -/** - * Addresses in the same EC2 region are translated to private IPs and addresses in - * different EC2 regions (than the client) are unchanged - */ -EC2MultiRegionTranslator.prototype.translate = function (address, port, callback) { - let newAddress = address; - const self = this; - let name; - utils.series([ - function resolve(next) { - dns.reverse(address, function (err, hostNames) { - if (err) { - return next(err); - } - if (!hostNames) { - return next(); - } - name = hostNames[0]; - next(); - }); - }, - function lookup(next) { - if (!name) { - return next(); - } - dns.lookup(name, function (err, lookupAddress) { - if (err) { - return next(err); - } - newAddress = lookupAddress; - next(); - }); - }], function (err) { - if (err) { - //there was an issue while doing dns resolution - self.logError(address, err); - } - callback(newAddress + ':' + port); - }); -}; - -/** - * Log method called to log errors that occurred while performing dns resolution. - * You can assign your own method to the class instance to do proper logging. - * @param {String} address - * @param {Error} err - */ -EC2MultiRegionTranslator.prototype.logError = function (address, err) { - //Do nothing by default -}; - -exports.AddressTranslator = AddressTranslator; -exports.EC2MultiRegionTranslator = EC2MultiRegionTranslator; \ No newline at end of file diff --git a/lib/policies/address-resolution.ts b/lib/policies/address-resolution.ts new file mode 100644 index 000000000..b009f7cd1 --- /dev/null +++ b/lib/policies/address-resolution.ts @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import dns from "dns"; +import utils from "../utils"; + +/** + * @class + * @classdesc + * Translates IP addresses received from Cassandra nodes into locally queryable + * addresses. + *

+ * The driver auto-detects new Cassandra nodes added to the cluster through server + * side pushed notifications and through checking the system tables. For each + * node, the address received will correspond to the address set as + * rpc_address in the node yaml file. In most case, this is the correct + * address to use by the driver and that is what is used by default. However, + * sometimes the addresses received through this mechanism will either not be + * reachable directly by the driver or should not be the preferred address to use + * to reach the node (for instance, the rpc_address set on Cassandra nodes + * might be a private IP, but some clients may have to use a public IP, or + * pass by a router to reach that node). This interface allows to deal with + * such cases, by allowing to translate an address as sent by a Cassandra node + * to another address to be used by the driver for connection. + *

+ * Please note that the contact points addresses provided while creating the + * {@link Client} instance are not "translated", only IP address retrieve from or sent + * by Cassandra nodes to the driver are. + */ +class AddressTranslator { + /** + * Translates a Cassandra rpc_address to another address if necessary. + * @param {String} address the address of a node as returned by Cassandra. + *

+ * Note that if the rpc_address of a node has been configured to 0.0.0.0 + * server side, then the provided address will be the node listen_address, + * *not* 0.0.0.0. + *

+ * @param {Number} port The port number, as specified in the [protocolOptions]{@link ClientOptions} at Client instance creation (9042 by default). + * @param {Function} callback Callback to invoke with endpoint as first parameter. + * The endpoint is an string composed of the IP address and the port number in the format ipAddress:port. + */ + translate(address: string, port: number, callback: Function): void { + callback(`${address}:${port}`); + } +} + +/** + * @class + * @classdesc + * {@link AddressTranslator} implementation for multi-region EC2 deployments where clients are also deployed in EC2. + *

+ * Its distinctive feature is that it translates addresses according to the location of the Cassandra host: + *

+ *
    + *
  • addresses in different EC2 regions (than the client) are unchanged
  • + *
  • addresses in the same EC2 region are translated to private IPs
  • + *
+ *

+ * This optimizes network costs, because Amazon charges more for communication over public IPs. + *

+ */ +class EC2MultiRegionTranslator extends AddressTranslator { + /** + * Addresses in the same EC2 region are translated to private IPs and addresses in + * different EC2 regions (than the client) are unchanged + * @param {string} address The address of a node as returned by Cassandra. + * @param {number} port The port number, as specified in the protocol options. + * @param {Function} callback Callback to invoke with the translated endpoint. + */ + translate(address: string, port: number, callback: Function): void { + let newAddress = address; + const self = this; + let name; + utils.series([ + function resolve(next) { + dns.reverse(address, function (err, hostNames) { + if (err) { + return next(err); + } + if (!hostNames) { + return next(); + } + name = hostNames[0]; + next(); + }); + }, + function lookup(next) { + if (!name) { + return next(); + } + dns.lookup(name, function (err, lookupAddress) { + if (err) { + return next(err); + } + newAddress = lookupAddress; + next(); + }); + }], function (err) { + if (err) { + //there was an issue while doing dns resolution + self.logError(address, err); + } + callback(newAddress + ':' + port); + }); + } + + //TODO: not exposed. I believe it should. + /** + * Log method called to log errors that occurred while performing dns resolution. + * You can assign your own method to the class instance to do proper logging. + * @param {String} address + * @param {Error} err + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + logError(address: string, err: Error): void { + //Do nothing by default + } +} + +export { AddressTranslator, EC2MultiRegionTranslator }; +export default { AddressTranslator, EC2MultiRegionTranslator }; \ No newline at end of file diff --git a/lib/policies/index.d.ts b/lib/policies/index.d.ts deleted file mode 100644 index ffae185be..000000000 --- a/lib/policies/index.d.ts +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Client, EmptyCallback, ExecutionOptions, Host, HostMap } from '../../'; -import { types } from '../types'; - - -export namespace policies { - function defaultAddressTranslator(): addressResolution.AddressTranslator; - - function defaultLoadBalancingPolicy(localDc?: string): loadBalancing.LoadBalancingPolicy; - - function defaultReconnectionPolicy(): reconnection.ReconnectionPolicy; - - function defaultRetryPolicy(): retry.RetryPolicy; - - function defaultSpeculativeExecutionPolicy(): speculativeExecution.SpeculativeExecutionPolicy; - - function defaultTimestampGenerator(): timestampGeneration.TimestampGenerator; - - namespace addressResolution { - interface AddressTranslator { - translate(address: string, port: number, callback: Function): void; - } - - class EC2MultiRegionTranslator implements AddressTranslator { - translate(address: string, port: number, callback: Function): void; - } - } - - namespace loadBalancing { - abstract class LoadBalancingPolicy { - init(client: Client, hosts: HostMap, callback: EmptyCallback): void; - - getDistance(host: Host): types.distance; - - newQueryPlan( - keyspace: string, - executionOptions: ExecutionOptions, - callback: (error: Error, iterator: Iterator) => void): void; - - getOptions(): Map; - } - - class DCAwareRoundRobinPolicy extends LoadBalancingPolicy { - constructor(localDc: string); - } - - class TokenAwarePolicy extends LoadBalancingPolicy { - constructor(childPolicy: LoadBalancingPolicy); - } - - class AllowListPolicy extends LoadBalancingPolicy { - constructor(childPolicy: LoadBalancingPolicy, allowList: string[]); - } - - class WhiteListPolicy extends AllowListPolicy { - } - - class RoundRobinPolicy extends LoadBalancingPolicy { - constructor(); - } - - class DefaultLoadBalancingPolicy extends LoadBalancingPolicy { - constructor(options?: { localDc?: string, filter?: (host: Host) => boolean }); - } - } - - namespace reconnection { - class ConstantReconnectionPolicy implements ReconnectionPolicy { - constructor(delay: number); - - getOptions(): Map; - - newSchedule(): Iterator; - - } - - class ExponentialReconnectionPolicy implements ReconnectionPolicy { - constructor(baseDelay: number, maxDelay: number, startWithNoDelay?: boolean); - - getOptions(): Map; - - newSchedule(): Iterator; - } - - interface ReconnectionPolicy { - getOptions(): Map; - - newSchedule(): Iterator; - } - } - - namespace retry { - class DecisionInfo { - decision: number; - consistency: types.consistencies; - } - - class OperationInfo { - query: string; - executionOptions: ExecutionOptions; - nbRetry: number; - } - - class IdempotenceAwareRetryPolicy extends RetryPolicy { - constructor(childPolicy: RetryPolicy); - } - - class FallthroughRetryPolicy extends RetryPolicy { - constructor(); - } - - class RetryPolicy { - onReadTimeout( - info: OperationInfo, - consistency: types.consistencies, - received: number, - blockFor: number, - isDataPresent: boolean): DecisionInfo; - - onRequestError(info: OperationInfo, consistency: types.consistencies, err: Error): DecisionInfo; - - onUnavailable( - info: OperationInfo, consistency: types.consistencies, required: number, alive: boolean): DecisionInfo; - - onWriteTimeout( - info: OperationInfo, - consistency: types.consistencies, - received: number, - blockFor: number, - writeType: string): DecisionInfo; - - rethrowResult(): DecisionInfo; - - retryResult(consistency: types.consistencies, useCurrentHost?: boolean): DecisionInfo; - } - - namespace RetryDecision { - enum retryDecision { - ignore, - rethrow, - retry - } - } - } - - namespace speculativeExecution { - class ConstantSpeculativeExecutionPolicy implements SpeculativeExecutionPolicy { - constructor(delay: number, maxSpeculativeExecutions: number); - - getOptions(): Map; - - init(client: Client): void; - - newPlan(keyspace: string, queryInfo: string | Array): { nextExecution: Function }; - - shutdown(): void; - } - - class NoSpeculativeExecutionPolicy implements SpeculativeExecutionPolicy { - constructor(); - - getOptions(): Map; - - init(client: Client): void; - - newPlan(keyspace: string, queryInfo: string | Array): { nextExecution: Function }; - - shutdown(): void; - } - - interface SpeculativeExecutionPolicy { - getOptions(): Map; - - init(client: Client): void; - - newPlan(keyspace: string, queryInfo: string|Array): { nextExecution: Function }; - - shutdown(): void; - } - } - - namespace timestampGeneration { - class MonotonicTimestampGenerator implements TimestampGenerator { - constructor(warningThreshold: number, minLogInterval: number); - - getDate(): number; - - next(client: Client): types.Long | number; - } - - interface TimestampGenerator { - next(client: Client): types.Long|number; - } - } -} \ No newline at end of file diff --git a/lib/policies/index.js b/lib/policies/index.ts similarity index 59% rename from lib/policies/index.js rename to lib/policies/index.ts index 9590d6b57..7db850bc9 100644 --- a/lib/policies/index.js +++ b/lib/policies/index.ts @@ -13,28 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -/** - * Contains driver tuning policies to determine [load balancing]{@link module:policies/loadBalancing}, - * [retrying]{@link module:policies/retry} queries, [reconnecting]{@link module:policies/reconnection} to a node, - * [address resolution]{@link module:policies/addressResolution}, - * [timestamp generation]{@link module:policies/timestampGeneration} and - * [speculative execution]{@link module:policies/speculativeExecution}. - * @module policies - */ -const addressResolution = exports.addressResolution = require('./address-resolution'); -const loadBalancing = exports.loadBalancing = require('./load-balancing'); -const reconnection = exports.reconnection = require('./reconnection'); -const retry = exports.retry = require('./retry'); -const speculativeExecution = exports.speculativeExecution = require('./speculative-execution'); -const timestampGeneration = exports.timestampGeneration = require('./timestamp-generation'); + +import addressResolution, { type AddressTranslator } from './address-resolution'; +import loadBalancing, { type LoadBalancingPolicy } from './load-balancing'; +import reconnection, { type ReconnectionPolicy } from './reconnection'; +import retry, { type RetryPolicy } from './retry'; +import speculativeExecution, { type SpeculativeExecutionPolicy } from './speculative-execution'; +import timestampGeneration, { type TimestampGenerator } from './timestamp-generation'; /** * Returns a new instance of the default address translator policy used by the driver. * @returns {AddressTranslator} */ -exports.defaultAddressTranslator = function () { +const defaultAddressTranslator = function (): AddressTranslator { return new addressResolution.AddressTranslator(); }; @@ -46,7 +38,7 @@ exports.defaultAddressTranslator = function () { * provided in the {@link ClientOptions}.

* @returns {LoadBalancingPolicy} */ -exports.defaultLoadBalancingPolicy = function (localDc) { +const defaultLoadBalancingPolicy = function (localDc?: string): LoadBalancingPolicy { return new loadBalancing.DefaultLoadBalancingPolicy(localDc); }; @@ -54,7 +46,7 @@ exports.defaultLoadBalancingPolicy = function (localDc) { * Returns a new instance of the default retry policy used by the driver. * @returns {RetryPolicy} */ -exports.defaultRetryPolicy = function () { +const defaultRetryPolicy = function (): RetryPolicy { return new retry.RetryPolicy(); }; @@ -62,7 +54,7 @@ exports.defaultRetryPolicy = function () { * Returns a new instance of the default reconnection policy used by the driver. * @returns {ReconnectionPolicy} */ -exports.defaultReconnectionPolicy = function () { +const defaultReconnectionPolicy = function (): ReconnectionPolicy { return new reconnection.ExponentialReconnectionPolicy(1000, 10 * 60 * 1000, false); }; @@ -71,7 +63,7 @@ exports.defaultReconnectionPolicy = function () { * Returns a new instance of the default speculative execution policy used by the driver. * @returns {SpeculativeExecutionPolicy} */ -exports.defaultSpeculativeExecutionPolicy = function () { +const defaultSpeculativeExecutionPolicy = function (): SpeculativeExecutionPolicy { return new speculativeExecution.NoSpeculativeExecutionPolicy(); }; @@ -79,6 +71,48 @@ exports.defaultSpeculativeExecutionPolicy = function () { * Returns a new instance of the default timestamp generator used by the driver. * @returns {TimestampGenerator} */ -exports.defaultTimestampGenerator = function () { +const defaultTimestampGenerator = function (): TimestampGenerator { return new timestampGeneration.MonotonicTimestampGenerator(); }; + +/** + * Contains driver tuning policies to determine [load balancing]{@link module:policies/loadBalancing}, + * [retrying]{@link module:policies/retry} queries, [reconnecting]{@link module:policies/reconnection} to a node, + * [address resolution]{@link module:policies/addressResolution}, + * [timestamp generation]{@link module:policies/timestampGeneration} and + * [speculative execution]{@link module:policies/speculativeExecution}. + * @module policies + */ + +export { + addressResolution, defaultAddressTranslator, + defaultLoadBalancingPolicy, defaultReconnectionPolicy, defaultRetryPolicy, defaultSpeculativeExecutionPolicy, + defaultTimestampGenerator, loadBalancing, + reconnection, + retry, + speculativeExecution, + timestampGeneration, + LoadBalancingPolicy, +}; + +export * from './address-resolution'; +export * from './load-balancing'; +export * from './reconnection'; +export * from './retry'; +export * from './speculative-execution'; +export * from './timestamp-generation'; + +export default { + addressResolution, + loadBalancing, + reconnection, + retry, + speculativeExecution, + timestampGeneration, + defaultAddressTranslator, + defaultLoadBalancingPolicy, + defaultRetryPolicy, + defaultReconnectionPolicy, + defaultSpeculativeExecutionPolicy, + defaultTimestampGenerator +}; diff --git a/lib/policies/load-balancing.js b/lib/policies/load-balancing.js deleted file mode 100644 index de56e0d5c..000000000 --- a/lib/policies/load-balancing.js +++ /dev/null @@ -1,883 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const util = require('util'); -const types = require('../types'); -const utils = require('../utils.js'); -const errors = require('../errors.js'); - -const doneIteratorObject = Object.freeze({ done: true }); -const newlyUpInterval = 60000; - -/** @module policies/loadBalancing */ -/** - * Base class for Load Balancing Policies - * @constructor - */ -function LoadBalancingPolicy() { - -} - -/** - * Initializes the load balancing policy, called after the driver obtained the information of the cluster. - * @param {Client} client - * @param {HostMap} hosts - * @param {Function} callback - */ -LoadBalancingPolicy.prototype.init = function (client, hosts, callback) { - this.client = client; - this.hosts = hosts; - callback(); -}; - -/** - * Returns the distance assigned by this policy to the provided host. - * @param {Host} host - */ -LoadBalancingPolicy.prototype.getDistance = function (host) { - return types.distance.local; -}; - -/** - * Returns an iterator with the hosts for a new query. - * Each new query will call this method. The first host in the result will - * then be used to perform the query. - * @param {String} keyspace Name of currently logged keyspace at Client level. - * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. - * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as - * second parameter. - */ -LoadBalancingPolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { - callback(new Error('You must implement a query plan for the LoadBalancingPolicy class')); -}; - -/** - * Gets an associative array containing the policy options. - */ -LoadBalancingPolicy.prototype.getOptions = function () { - return new Map(); -}; - -/** - * This policy yield nodes in a round-robin fashion. - * @extends LoadBalancingPolicy - * @constructor - */ -function RoundRobinPolicy() { - this.index = 0; -} - -util.inherits(RoundRobinPolicy, LoadBalancingPolicy); - -/** - * Returns an iterator with the hosts to be used as coordinator for a query. - * @param {String} keyspace Name of currently logged keyspace at Client level. - * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. - * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as - * second parameter. - */ -RoundRobinPolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { - if (!this.hosts) { - return callback(new Error('Load balancing policy not initialized')); - } - const hosts = this.hosts.values(); - const self = this; - let counter = 0; - - let planIndex = self.index % hosts.length; - self.index += 1; - if (self.index >= utils.maxInt) { - self.index = 0; - } - - callback(null, { - next: function () { - if (++counter > hosts.length) { - return doneIteratorObject; - } - return {value: hosts[planIndex++ % hosts.length], done: false}; - } - }); -}; - -/** - * A data-center aware Round-robin load balancing policy. - * This policy provides round-robin queries over the nodes of the local - * data center. - * @param {?String} [localDc] local datacenter name. This value overrides the 'localDataCenter' Client option \ - * and is useful for cases where you have multiple execution profiles that you intend on using for routing - * requests to different data centers. - * @extends {LoadBalancingPolicy} - * @constructor - */ -function DCAwareRoundRobinPolicy(localDc) { - this.localDc = localDc; - this.index = 0; - /** @type {Array} */ - this.localHostsArray = null; -} - -util.inherits(DCAwareRoundRobinPolicy, LoadBalancingPolicy); - -/** - * Initializes the load balancing policy. - * @param {Client} client - * @param {HostMap} hosts - * @param {Function} callback - */ -DCAwareRoundRobinPolicy.prototype.init = function (client, hosts, callback) { - this.client = client; - this.hosts = hosts; - hosts.on('add', this._cleanHostCache.bind(this)); - hosts.on('remove', this._cleanHostCache.bind(this)); - - try { - setLocalDc(this, client, this.hosts); - } catch (err) { - return callback(err); - } - - callback(); -}; - -/** - * Returns the distance depending on the datacenter. - * @param {Host} host - */ -DCAwareRoundRobinPolicy.prototype.getDistance = function (host) { - if (host.datacenter === this.localDc) { - return types.distance.local; - } - - return types.distance.ignored; -}; - -DCAwareRoundRobinPolicy.prototype._cleanHostCache = function () { - this.localHostsArray = null; -}; - -DCAwareRoundRobinPolicy.prototype._resolveLocalHosts = function() { - const hosts = this.hosts.values(); - if (this.localHostsArray) { - //there were already calculated - return; - } - this.localHostsArray = []; - hosts.forEach(function (h) { - if (!h.datacenter) { - //not a remote dc node - return; - } - if (h.datacenter === this.localDc) { - this.localHostsArray.push(h); - } - }, this); -}; - -/** - * It returns an iterator that yields local nodes. - * @param {String} keyspace Name of currently logged keyspace at Client level. - * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. - * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as - * second parameter. - */ -DCAwareRoundRobinPolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { - if (!this.hosts) { - return callback(new Error('Load balancing policy not initialized')); - } - this.index += 1; - if (this.index >= utils.maxInt) { - this.index = 0; - } - this._resolveLocalHosts(); - // Use a local reference of hosts - const localHostsArray = this.localHostsArray; - let planLocalIndex = this.index; - let counter = 0; - callback(null, { - next: function () { - let host; - if (counter++ < localHostsArray.length) { - host = localHostsArray[planLocalIndex++ % localHostsArray.length]; - return { value: host, done: false }; - } - return doneIteratorObject; - } - }); -}; - -/** - * Gets an associative array containing the policy options. - */ -DCAwareRoundRobinPolicy.prototype.getOptions = function () { - return new Map([ - ['localDataCenter', this.localDc ] - ]); -}; - -/** - * A wrapper load balancing policy that add token awareness to a child policy. - * @param {LoadBalancingPolicy} childPolicy - * @extends LoadBalancingPolicy - * @constructor - */ -function TokenAwarePolicy (childPolicy) { - if (!childPolicy) { - throw new Error("You must specify a child load balancing policy"); - } - this.childPolicy = childPolicy; -} - -util.inherits(TokenAwarePolicy, LoadBalancingPolicy); - -TokenAwarePolicy.prototype.init = function (client, hosts, callback) { - this.client = client; - this.hosts = hosts; - this.childPolicy.init(client, hosts, callback); -}; - -TokenAwarePolicy.prototype.getDistance = function (host) { - return this.childPolicy.getDistance(host); -}; - -/** - * Returns the hosts to use for a new query. - * The returned plan will return local replicas first, if replicas can be determined, followed by the plan of the - * child policy. - * @param {String} keyspace Name of currently logged keyspace at Client level. - * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. - * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as - * second parameter. - */ -TokenAwarePolicy.prototype.newQueryPlan = function (keyspace, executionOptions, callback) { - let routingKey; - if (executionOptions) { - routingKey = executionOptions.getRoutingKey(); - if (executionOptions.getKeyspace()) { - keyspace = executionOptions.getKeyspace(); - } - } - let replicas; - if (routingKey) { - replicas = this.client.getReplicas(keyspace, routingKey); - } - if (!routingKey || !replicas) { - return this.childPolicy.newQueryPlan(keyspace, executionOptions, callback); - } - const iterator = new TokenAwareIterator(keyspace, executionOptions, replicas, this.childPolicy); - iterator.iterate(callback); -}; - -/** - * An iterator that holds the context for the subsequent next() calls - * @param {String} keyspace - * @param {ExecutionOptions} execOptions - * @param {Array} replicas - * @param childPolicy - * @constructor - * @ignore - */ -function TokenAwareIterator(keyspace, execOptions, replicas, childPolicy) { - this.keyspace = keyspace; - this.childPolicy = childPolicy; - this.options = execOptions; - this.localReplicas = []; - this.replicaIndex = 0; - this.replicaMap = {}; - this.childIterator = null; - // Memoize the local replicas - // The amount of local replicas should be defined before start iterating, in order to select an - // appropriate (pseudo random) startIndex - for (let i = 0; i < replicas.length; i++) { - const host = replicas[i]; - if (this.childPolicy.getDistance(host) !== types.distance.local) { - continue; - } - this.replicaMap[host.address] = true; - this.localReplicas.push(host); - } - // We use a PRNG to set the replica index - // We only care about proportional fair scheduling between replicas of a given token - // Math.random() has an extremely short permutation cycle length but we don't care about collisions - this.startIndex = Math.floor(Math.random() * this.localReplicas.length); -} - -TokenAwareIterator.prototype.iterate = function (callback) { - //Load the child policy hosts - const self = this; - this.childPolicy.newQueryPlan(this.keyspace, this.options, function (err, iterator) { - if (err) { - return callback(err); - } - //get the iterator of the child policy in case is needed - self.childIterator = iterator; - callback(null, { - next: function () { return self.computeNext(); } - }); - }); -}; - -TokenAwareIterator.prototype.computeNext = function () { - let host; - if (this.replicaIndex < this.localReplicas.length) { - host = this.localReplicas[(this.startIndex + (this.replicaIndex++)) % this.localReplicas.length]; - return { value: host, done: false }; - } - // Return hosts from child policy - let item; - while ((item = this.childIterator.next()) && !item.done) { - if (this.replicaMap[item.value.address]) { - // Avoid yielding local replicas from the child load balancing policy query plan - continue; - } - return item; - } - return doneIteratorObject; -}; - -/** - * Gets an associative array containing the policy options. - */ -TokenAwarePolicy.prototype.getOptions = function () { - const map = new Map([ - ['childPolicy', this.childPolicy.constructor !== undefined ? this.childPolicy.constructor.name : null ] - ]); - - if (this.childPolicy instanceof DCAwareRoundRobinPolicy) { - map.set('localDataCenter', this.childPolicy.localDc); - } - - return map; -}; - -/** - * Create a new policy that wraps the provided child policy but only "allow" hosts - * from the provided list. - * @class - * @classdesc - * A load balancing policy wrapper that ensure that only hosts from a provided - * allow list will ever be returned. - *

- * This policy wraps another load balancing policy and will delegate the choice - * of hosts to the wrapped policy with the exception that only hosts contained - * in the allow list provided when constructing this policy will ever be - * returned. Any host not in the while list will be considered ignored - * and thus will not be connected to. - *

- * This policy can be useful to ensure that the driver only connects to a - * predefined set of hosts. Keep in mind however that this policy defeats - * somewhat the host auto-detection of the driver. As such, this policy is only - * useful in a few special cases or for testing, but is not optimal in general. - * If all you want to do is limiting connections to hosts of the local - * data-center then you should use DCAwareRoundRobinPolicy and *not* this policy - * in particular. - * @param {LoadBalancingPolicy} childPolicy the wrapped policy. - * @param {Array.} allowList The hosts address in the format ipAddress:port. - * Only hosts from this list may get connected - * to (whether they will get connected to or not depends on the child policy). - * @extends LoadBalancingPolicy - * @constructor - */ -function AllowListPolicy (childPolicy, allowList) { - if (!childPolicy) { - throw new Error("You must specify a child load balancing policy"); - } - if (!Array.isArray(allowList)) { - throw new Error("You must provide the list of allowed host addresses"); - } - - this.childPolicy = childPolicy; - this.allowList = new Map(allowList.map(address => [ address, true ])); -} - -util.inherits(AllowListPolicy, LoadBalancingPolicy); - -AllowListPolicy.prototype.init = function (client, hosts, callback) { - this.childPolicy.init(client, hosts, callback); -}; - -/** - * Uses the child policy to return the distance to the host if included in the allow list. - * Any host not in the while list will be considered ignored. - * @param host - */ -AllowListPolicy.prototype.getDistance = function (host) { - if (!this._contains(host)) { - return types.distance.ignored; - } - return this.childPolicy.getDistance(host); -}; - -/** - * @param {Host} host - * @returns {boolean} - * @private - */ -AllowListPolicy.prototype._contains = function (host) { - return !!this.allowList.get(host.address); -}; - -/** - * Returns the hosts to use for a new query filtered by the allow list. - */ -AllowListPolicy.prototype.newQueryPlan = function (keyspace, info, callback) { - const self = this; - this.childPolicy.newQueryPlan(keyspace, info, function (err, iterator) { - if (err) { - return callback(err); - } - callback(null, self._filter(iterator)); - }); -}; - -AllowListPolicy.prototype._filter = function (childIterator) { - const self = this; - return { - next: function () { - const item = childIterator.next(); - if (!item.done && !self._contains(item.value)) { - return this.next(); - } - return item; - } - }; -}; - -/** - * Gets an associative array containing the policy options. - */ -AllowListPolicy.prototype.getOptions = function () { - return new Map([ - ['childPolicy', this.childPolicy.constructor !== undefined ? this.childPolicy.constructor.name : null ], - ['allowList', Array.from(this.allowList.keys())] - ]); -}; - -/** - * Creates a new instance of the policy. - * @classdesc - * Exposed for backward-compatibility only, it's recommended that you use {@link AllowListPolicy} instead. - * @param {LoadBalancingPolicy} childPolicy the wrapped policy. - * @param {Array.} allowList The hosts address in the format ipAddress:port. - * Only hosts from this list may get connected to (whether they will get connected to or not depends on the child - * policy). - * @extends AllowListPolicy - * @deprecated Use allow-list instead. It will be removed in future major versions. - * @constructor - */ -function WhiteListPolicy(childPolicy, allowList) { - AllowListPolicy.call(this, childPolicy, allowList); -} - -util.inherits(WhiteListPolicy, AllowListPolicy); - -/** - * A load-balancing policy implementation that attempts to fairly distribute the load based on the amount of in-flight - * request per hosts. The local replicas are initially shuffled and - * between the first two nodes in the - * shuffled list, the one with fewer in-flight requests is selected as coordinator. - * - *

- * Additionally, it detects unresponsive replicas and reorders them at the back of the query plan. - *

- * - *

- * For graph analytics queries, it uses the preferred analytics graph server previously obtained by driver as first - * host in the query plan. - *

- */ -class DefaultLoadBalancingPolicy extends LoadBalancingPolicy { - - /** - * Creates a new instance of DefaultLoadBalancingPolicy. - * @param {String|Object} [options] The local data center name or the optional policy options object. - *

- * Note that when providing the local data center name, it overrides localDataCenter option at - * Client level. - *

- * @param {String} [options.localDc] local data center name. This value overrides the 'localDataCenter' Client option - * and is useful for cases where you have multiple execution profiles that you intend on using for routing - * requests to different data centers. - * @param {Function} [options.filter] A function to apply to determine if hosts are included in the query plan. - * The function takes a Host parameter and returns a Boolean. - */ - constructor(options) { - super(); - - if (typeof options === 'string') { - options = { localDc: options }; - } else if (!options) { - options = utils.emptyObject; - } - - this._client = null; - this._hosts = null; - this._filteredHosts = null; - this._preferredHost = null; - this._index = 0; - this.localDc = options.localDc; - this._filter = options.filter || this._defaultFilter; - - // Allow some checks to be injected - if (options.isHostNewlyUp) { - this._isHostNewlyUp = options.isHostNewlyUp; - } - if (options.healthCheck) { - this._healthCheck = options.healthCheck; - } - if (options.compare) { - this._compare = options.compare; - } - if (options.getReplicas) { - this._getReplicas = options.getReplicas; - } - } - - /** - * Initializes the load balancing policy, called after the driver obtained the information of the cluster. - * @param {Client} client - * @param {HostMap} hosts - * @param {Function} callback - */ - init(client, hosts, callback) { - this._client = client; - this._hosts = hosts; - - // Clean local host cache - this._hosts.on('add', () => this._filteredHosts = null); - this._hosts.on('remove', () => this._filteredHosts = null); - - try { - setLocalDc(this, client, this._hosts); - } catch (err) { - return callback(err); - } - - callback(); - } - - /** - * Returns the distance assigned by this policy to the provided host, relatively to the client instance. - * @param {Host} host - */ - getDistance(host) { - if (this._preferredHost !== null && host === this._preferredHost) { - // Set the last preferred host as local. - // It ensures that the pool for the graph analytics host has the appropriate size - return types.distance.local; - } - - if (!this._filter(host)) { - return types.distance.ignored; - } - - return host.datacenter === this.localDc ? types.distance.local : types.distance.ignored; - } - - /** - * Returns a host iterator to be used for a query execution. - * @override - * @param {String} keyspace - * @param {ExecutionOptions} executionOptions - * @param {Function} callback - */ - newQueryPlan(keyspace, executionOptions, callback) { - let routingKey; - let preferredHost; - - if (executionOptions) { - routingKey = executionOptions.getRoutingKey(); - - if (executionOptions.getKeyspace()) { - keyspace = executionOptions.getKeyspace(); - } - - preferredHost = executionOptions.getPreferredHost(); - } - - let iterable; - - if (!keyspace || !routingKey) { - iterable = this._getLocalHosts(); - } else { - iterable = this._getReplicasAndLocalHosts(keyspace, routingKey); - } - - if (preferredHost) { - // Set it on an instance level field to set the distance - this._preferredHost = preferredHost; - iterable = DefaultLoadBalancingPolicy._getPreferredHostFirst(preferredHost, iterable); - } - - return callback(null, iterable); - } - - /** - * Yields the preferred host first, followed by the host in the provided iterable - * @param preferredHost - * @param iterable - * @private - */ - static *_getPreferredHostFirst(preferredHost, iterable) { - yield preferredHost; - - for (const host of iterable) { - if (host !== preferredHost) { - yield host; - } - } - } - - /** - * Yields the local hosts without the replicas already yielded - * @param {Array} [localReplicas] The local replicas that we should avoid to include again - * @private - */ - *_getLocalHosts(localReplicas) { - // Use a local reference - const hosts = this._getFilteredLocalHosts(); - const initialIndex = this._getIndex(); - - // indexOf() over an Array is a O(n) operation but given that there should be 3 to 7 replicas, - // it shouldn't be an expensive call. Additionally, this will only be executed when the local replicas - // have been exhausted in a lazy manner. - const canBeYield = localReplicas - ? h => localReplicas.indexOf(h) === -1 - : h => true; - - for (let i = 0; i < hosts.length; i++) { - const h = hosts[(i + initialIndex) % hosts.length]; - if (canBeYield(h) && h.isUp()) { - yield h; - } - } - } - - _getReplicasAndLocalHosts(keyspace, routingKey) { - let replicas = this._getReplicas(keyspace, routingKey); - if (replicas === null) { - return this._getLocalHosts(); - } - - const filteredReplicas = []; - let newlyUpReplica = null; - let newlyUpReplicaTimestamp = Number.MIN_SAFE_INTEGER; - let unhealthyReplicas = 0; - - // Filter by DC, predicate and UP replicas - // Use the same iteration to perform other checks: whether if its newly UP or unhealthy - // As this is part of the hot path, we use a simple loop and avoid using Array.prototype.filter() + closure - for (let i = 0; i < replicas.length; i++) { - const h = replicas[i]; - if (!this._filter(h) || h.datacenter !== this.localDc || !h.isUp()) { - continue; - } - const isUpSince = this._isHostNewlyUp(h); - if (isUpSince !== null && isUpSince > newlyUpReplicaTimestamp) { - newlyUpReplica = h; - newlyUpReplicaTimestamp = isUpSince; - } - if (newlyUpReplica === null && !this._healthCheck(h)) { - unhealthyReplicas++; - } - filteredReplicas.push(h); - } - - replicas = filteredReplicas; - - // Shuffle remaining local replicas - utils.shuffleArray(replicas); - - if (replicas.length < 3) { - // Avoid reordering replicas of a set of 2 as we could be doing more harm than good - return this.yieldReplicasFirst(replicas); - } - - let temp; - - if (newlyUpReplica === null) { - if (unhealthyReplicas > 0 && unhealthyReplicas < Math.floor(replicas.length / 2 + 1)) { - // There is one or more unhealthy replicas and there is a majority of healthy replicas - this._sendUnhealthyToTheBack(replicas, unhealthyReplicas); - } - } - else if ((newlyUpReplica === replicas[0] || newlyUpReplica === replicas[1]) && Math.random() * 4 >= 1) { - // There is a newly UP replica and the replica in first or second position is the most recent replica - // marked as UP and dice roll 1d4!=1 -> Send it to the back of the Array - const index = newlyUpReplica === replicas[0] ? 0 : 1; - temp = replicas[replicas.length - 1]; - replicas[replicas.length - 1] = replicas[index]; - replicas[index] = temp; - } - - if (this._compare(replicas[1], replicas[0]) > 0) { - // Power of two random choices - temp = replicas[0]; - replicas[0] = replicas[1]; - replicas[1] = temp; - } - - return this.yieldReplicasFirst(replicas); - } - - /** - * Yields the local replicas followed by the rest of local nodes. - * @param {Array} replicas The local replicas - */ - *yieldReplicasFirst(replicas) { - for (let i = 0; i < replicas.length; i++) { - yield replicas[i]; - } - yield* this._getLocalHosts(replicas); - } - - _isHostNewlyUp(h) { - return (h.isUpSince !== null && Date.now() - h.isUpSince < newlyUpInterval) ? h.isUpSince : null; - } - - /** - * Returns a boolean determining whether the host health is ok or not. - * A Host is considered unhealthy when there are enough items in the queue (10 items in-flight) but the - * Host is not responding to those requests. - * @param {Host} h - * @return {boolean} - * @private - */ - _healthCheck(h) { - return !(h.getInFlight() >= 10 && h.getResponseCount() <= 1); - } - - /** - * Compares to host and returns 1 if it needs to favor the first host otherwise, -1. - * @return {number} - * @private - */ - _compare(h1, h2) { - return h1.getInFlight() < h2.getInFlight() ? 1 : -1; - } - - _getReplicas(keyspace, routingKey) { - return this._client.getReplicas(keyspace, routingKey); - } - - /** - * Returns an Array of hosts filtered by DC and predicate. - * @returns {Array} - * @private - */ - _getFilteredLocalHosts() { - if (this._filteredHosts === null) { - this._filteredHosts = this._hosts.values() - .filter(h => this._filter(h) && h.datacenter === this.localDc); - } - return this._filteredHosts; - } - - _getIndex() { - const result = this._index++; - // Overflow protection - if (this._index === 0x7fffffff) { - this._index = 0; - } - return result; - } - - _sendUnhealthyToTheBack(replicas, unhealthyReplicas) { - let counter = 0; - - // Start from the back, move backwards and stop once all unhealthy replicas are at the back - for (let i = replicas.length - 1; i >= 0 && counter < unhealthyReplicas; i--) { - const host = replicas[i]; - if (this._healthCheck(host)) { - continue; - } - - const targetIndex = replicas.length - 1 - counter; - if (targetIndex !== i) { - const temp = replicas[targetIndex]; - replicas[targetIndex] = host; - replicas[i] = temp; - } - counter++; - } - } - - _defaultFilter() { - return true; - } - - /** - * Gets an associative array containing the policy options. - */ - getOptions() { - return new Map([ - ['localDataCenter', this.localDc ], - ['filterFunction', this._filter !== this._defaultFilter ] - ]); - } -} - -/** - * Validates and sets the local data center to be used. - * @param {LoadBalancingPolicy} lbp - * @param {Client} client - * @param {HostMap} hosts - * @private - */ -function setLocalDc(lbp, client, hosts) { - if (!(lbp instanceof LoadBalancingPolicy)) { - throw new errors.DriverInternalError('LoadBalancingPolicy instance was not provided'); - } - - if (client && client.options) { - if (lbp.localDc && !client.options.localDataCenter) { - client.log('info', `Local data center '${lbp.localDc}' was provided as an argument to the load-balancing` + - ` policy. It is preferable to specify the local data center using 'localDataCenter' in Client` + - ` options instead when your application is targeting a single data center.`); - } - - // If localDc is unset, use value set in client options. - lbp.localDc = lbp.localDc || client.options.localDataCenter; - } - - const dcs = getDataCenters(hosts); - - if (!lbp.localDc) { - throw new errors.ArgumentError( - `'localDataCenter' is not defined in Client options and also was not specified in constructor.` + - ` At least one is required. Available DCs are: [${Array.from(dcs)}]`); - } - - if (!dcs.has(lbp.localDc)) { - throw new errors.ArgumentError(`Datacenter ${lbp.localDc} was not found. Available DCs are: [${Array.from(dcs)}]`); - } -} - -function getDataCenters(hosts) { - return new Set(hosts.values().map(h => h.datacenter)); -} - -module.exports = { - AllowListPolicy, - DCAwareRoundRobinPolicy, - DefaultLoadBalancingPolicy, - LoadBalancingPolicy, - RoundRobinPolicy, - TokenAwarePolicy, - // Deprecated: for backward compatibility only. - WhiteListPolicy -}; \ No newline at end of file diff --git a/lib/policies/load-balancing.ts b/lib/policies/load-balancing.ts new file mode 100644 index 000000000..96ffb54f7 --- /dev/null +++ b/lib/policies/load-balancing.ts @@ -0,0 +1,959 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type Client from "../client"; +import errors from "../errors"; +import type { ExecutionOptions } from "../execution-options"; +import type { Host, HostMap } from "../host"; +import types, { type distance } from "../types/index"; +import utils, { type EmptyCallback } from "../utils"; + +const doneIteratorObject = Object.freeze({ done: true, value: null }); +const newlyUpInterval = 60000; + +/** + * Base class for Load Balancing Policies. + */ +class LoadBalancingPolicy { + protected client: Client; + protected hosts: HostMap; + /** @internal */ + localDc: string; + + /** + * Initializes the load balancing policy, called after the driver obtained the information of the cluster. + * @param {Client} client + * @param {HostMap} hosts + * @param {EmptyCallback} callback + */ + init(client: Client, hosts: HostMap, callback: EmptyCallback): void { + this.client = client; + this.hosts = hosts; + callback(null); + } + + /** + * Returns the distance assigned by this policy to the provided host. + * @param {Host} host + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getDistance(host: Host): distance { + return types.distance.local; + } + + /** + * Returns an iterator with the hosts for a new query. + * Each new query will call this method. The first host in the result will + * then be used to perform the query. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, + executionOptions: ExecutionOptions, + callback: (error: Error, iterator: Iterator) => void): void{ + callback(new Error("You must implement a query plan for the LoadBalancingPolicy class"),null); + } + + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map { + return new Map(); + } +} + +/** + * This policy yield nodes in a round-robin fashion. + */ +class RoundRobinPolicy extends LoadBalancingPolicy { + private index: number; + + constructor() { + super(); + this.index = 0; + } + + /** + * Returns an iterator with the hosts to be used as coordinator for a query. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, + executionOptions: ExecutionOptions, + callback: (error: Error, iterator: Iterator) => void): void{ + if (!this.hosts) { + return callback(new Error('Load balancing policy not initialized'), null); + } + const hosts = this.hosts.values(); + const self = this; + let counter = 0; + + let planIndex = self.index % hosts.length; + self.index += 1; + if (self.index >= utils.maxInt) { + self.index = 0; + } + + callback(null, { + next: function () { + if (++counter > hosts.length) { + return doneIteratorObject; + } + return {value: hosts[planIndex++ % hosts.length], done: false}; + }, + }); + } +} + +/** + * A data-center aware Round-robin load balancing policy. + * This policy provides round-robin queries over the nodes of the local + * data center. + */ +class DCAwareRoundRobinPolicy extends LoadBalancingPolicy { + /** @internal */ + localDc: string; + private index: number; + private localHostsArray: any; + + /** + * A data-center aware Round-robin load balancing policy. + * This policy provides round-robin queries over the nodes of the local + * data center. + * @param {String} [localDc] local datacenter name. This value overrides the 'localDataCenter' Client option \ + * and is useful for cases where you have multiple execution profiles that you intend on using for routing + * requests to different data centers. + * @constructor + */ + constructor(localDc?: string) { + super(); + this.localDc = localDc; + this.index = 0; + this.localHostsArray = null; + } + + init(client: Client, hosts: HostMap, callback: EmptyCallback): void { + this.client = client; + this.hosts = hosts; + hosts.on("add", this._cleanHostCache.bind(this)); + hosts.on("remove", this._cleanHostCache.bind(this)); + + try { + setLocalDc(this, client, this.hosts); + } catch (err) { + return callback(err); + } + + callback(null); + } + + /** + * Returns the distance depending on the datacenter. + * @param {Host} host + */ + getDistance(host: Host): distance { + if (host.datacenter === this.localDc) { + return types.distance.local; + } + + return types.distance.ignored; + } + + private _cleanHostCache(): void { + this.localHostsArray = null; + } + + private _resolveLocalHosts(): void { + const hosts = this.hosts.values(); + if (this.localHostsArray) { + //there were already calculated + return; + } + this.localHostsArray = []; + hosts.forEach(function (h) { + if (!h.datacenter) { + //not a remote dc node + return; + } + if (h.datacenter === this.localDc) { + this.localHostsArray.push(h); + } + }, this); + } + + /** + * It returns an iterator that yields local nodes. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, + executionOptions: ExecutionOptions, + callback: (error: Error, iterator: Iterator) => void): void { + if (!this.hosts) { + return callback(new Error('Load balancing policy not initialized'),null); + } + this.index += 1; + if (this.index >= utils.maxInt) { + this.index = 0; + } + this._resolveLocalHosts(); + // Use a local reference of hosts + const localHostsArray = this.localHostsArray; + let planLocalIndex = this.index; + let counter = 0; + callback(null, { + next: function () { + let host; + if (counter++ < localHostsArray.length) { + host = localHostsArray[planLocalIndex++ % localHostsArray.length]; + return { value: host, done: false }; + } + return doneIteratorObject; + } + }); + } + + getOptions(): Map { + return new Map([ + ['localDataCenter', this.localDc ] + ]); + } +} + +/** + * A wrapper load balancing policy that adds token awareness to a child policy. + */ +class TokenAwarePolicy extends LoadBalancingPolicy { + private childPolicy: LoadBalancingPolicy; + + /** + * A wrapper load balancing policy that add token awareness to a child policy. + * @param {LoadBalancingPolicy} childPolicy + * @constructor + */ + constructor(childPolicy: LoadBalancingPolicy) { + super(); + if (!childPolicy) { + throw new Error("You must specify a child load balancing policy"); + } + this.childPolicy = childPolicy; + } + + init(client: Client, hosts: HostMap, callback: EmptyCallback): void { + this.client = client; + this.hosts = hosts; + this.childPolicy.init(client, hosts, callback); + } + + getDistance(host: Host): distance { + return this.childPolicy.getDistance(host); + } + + /** + * Returns the hosts to use for a new query. + * The returned plan will return local replicas first, if replicas can be determined, followed by the plan of the + * child policy. + * @param {String} keyspace Name of currently logged keyspace at Client level. + * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. + * @param {Function} callback The function to be invoked with the error as first parameter and the host iterator as + * second parameter. + */ + newQueryPlan(keyspace: string, + executionOptions: ExecutionOptions, + callback: (error: Error, iterator: Iterator) => void): void { + let routingKey; + if (executionOptions) { + routingKey = executionOptions.getRoutingKey(); + if (executionOptions.getKeyspace()) { + keyspace = executionOptions.getKeyspace(); + } + } + let replicas; + if (routingKey) { + replicas = this.client.getReplicas(keyspace, routingKey); + } + if (!routingKey || !replicas) { + return this.childPolicy.newQueryPlan(keyspace, executionOptions, callback); + } + const iterator = new TokenAwareIterator(keyspace, executionOptions, replicas, this.childPolicy); + iterator.iterate(callback); + } + + getOptions(): Map { + const map = new Map([["childPolicy", this.childPolicy.constructor?.name || null]]); + if (this.childPolicy instanceof DCAwareRoundRobinPolicy) { + map.set("localDataCenter", this.childPolicy.localDc); + } + return map; + } +} + +/** + * An iterator that holds the context for the subsequent next() calls + * @ignore @internal + */ +class TokenAwareIterator { + keyspace: string; + childPolicy: LoadBalancingPolicy; + options: ExecutionOptions; + localReplicas: any[]; + replicaIndex: number; + replicaMap: Record; + childIterator: Iterator; + startIndex: number; + /** + * An iterator that holds the context for the subsequent next() calls + * @param {String} keyspace + * @param {ExecutionOptions} execOptions + * @param {Array} replicas + * @param childPolicy + * @constructor + * @ignore @internal + */ + constructor(keyspace: string, execOptions: ExecutionOptions, replicas: Array, childPolicy: LoadBalancingPolicy) { + this.keyspace = keyspace; + this.childPolicy = childPolicy; + this.options = execOptions; + this.localReplicas = []; + this.replicaIndex = 0; + this.replicaMap = {}; + this.childIterator = null; + // Memoize the local replicas + // The amount of local replicas should be defined before start iterating, in order to select an + // appropriate (pseudo random) startIndex + for (let i = 0; i < replicas.length; i++) { + const host = replicas[i]; + if (this.childPolicy.getDistance(host) !== types.distance.local) { + continue; + } + this.replicaMap[host.address] = true; + this.localReplicas.push(host); + } + // We use a PRNG to set the replica index + // We only care about proportional fair scheduling between replicas of a given token + // Math.random() has an extremely short permutation cycle length but we don't care about collisions + this.startIndex = Math.floor(Math.random() * this.localReplicas.length); + } + + iterate(callback: Function) { + //Load the child policy hosts + const self = this; + this.childPolicy.newQueryPlan(this.keyspace, this.options, function (err, iterator) { + if (err) { + return callback(err); + } + //get the iterator of the child policy in case is needed + self.childIterator = iterator; + callback(null, { + next: function () { return self.computeNext(); } + }); + }); + } + + computeNext(): object { + let host; + if (this.replicaIndex < this.localReplicas.length) { + host = this.localReplicas[(this.startIndex + (this.replicaIndex++)) % this.localReplicas.length]; + return { value: host, done: false }; + } + // Return hosts from child policy + let item; + while ((item = this.childIterator.next()) && !item.done) { + if (this.replicaMap[item.value.address]) { + // Avoid yielding local replicas from the child load balancing policy query plan + continue; + } + return item; + } + return doneIteratorObject; + } +} + +/** + * @class + * @classdesc + * A load balancing policy wrapper that ensure that only hosts from a provided + * allow list will ever be returned. + *

+ * This policy wraps another load balancing policy and will delegate the choice + * of hosts to the wrapped policy with the exception that only hosts contained + * in the allow list provided when constructing this policy will ever be + * returned. Any host not in the while list will be considered ignored + * and thus will not be connected to. + *

+ * This policy can be useful to ensure that the driver only connects to a + * predefined set of hosts. Keep in mind however that this policy defeats + * somewhat the host auto-detection of the driver. As such, this policy is only + * useful in a few special cases or for testing, but is not optimal in general. + * If all you want to do is limiting connections to hosts of the local + * data-center then you should use DCAwareRoundRobinPolicy and *not* this policy + * in particular. + * @extends LoadBalancingPolicy + */ +class AllowListPolicy extends LoadBalancingPolicy { + private childPolicy: LoadBalancingPolicy; + private allowList: Map; + /** + * Create a new policy that wraps the provided child policy but only "allow" hosts + * from the provided list. + * @class + * @classdesc + * A load balancing policy wrapper that ensure that only hosts from a provided + * allow list will ever be returned. + *

+ * This policy wraps another load balancing policy and will delegate the choice + * of hosts to the wrapped policy with the exception that only hosts contained + * in the allow list provided when constructing this policy will ever be + * returned. Any host not in the while list will be considered ignored + * and thus will not be connected to. + *

+ * This policy can be useful to ensure that the driver only connects to a + * predefined set of hosts. Keep in mind however that this policy defeats + * somewhat the host auto-detection of the driver. As such, this policy is only + * useful in a few special cases or for testing, but is not optimal in general. + * If all you want to do is limiting connections to hosts of the local + * data-center then you should use DCAwareRoundRobinPolicy and *not* this policy + * in particular. + * @param {LoadBalancingPolicy} childPolicy the wrapped policy. + * @param {Array.} allowList The hosts address in the format ipAddress:port. + * Only hosts from this list may get connected + * to (whether they will get connected to or not depends on the child policy). + * @constructor + */ + constructor(childPolicy: LoadBalancingPolicy, allowList: Array) { + super(); + + if (!childPolicy) { + throw new Error("You must specify a child load balancing policy"); + } + if (!Array.isArray(allowList)) { + throw new Error("You must provide the list of allowed host addresses"); + } + + this.childPolicy = childPolicy; + this.allowList = new Map(allowList.map(address => [address, true])); + } + + init(client: Client, hosts: HostMap, callback: EmptyCallback):void { + this.childPolicy.init(client, hosts, callback); + } + + /** + * Uses the child policy to return the distance to the host if included in the allow list. + * Any host not in the while list will be considered ignored. + * @param host + */ + getDistance(host: Host): distance { + if (!this._contains(host)) { + return types.distance.ignored; + } + return this.childPolicy.getDistance(host); + } + + /** + * Checks if the host is in the allow list. + * @param {Host} host + * @returns {boolean} + * @private + */ + private _contains(host: Host): boolean { + return !!this.allowList.get(host.address); + } + + /** + * Returns the hosts to use for a new query filtered by the allow list. + * @internal + */ + newQueryPlan(keyspace: string, info: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void { + const self = this; + this.childPolicy.newQueryPlan(keyspace, info, function (err, iterator) { + if (err) { + return callback(err, null); + } + callback(null, self._filter(iterator)); + }); + } + + private _filter(childIterator: Iterator): Iterator { + const self = this; + return { + next: function () { + const item = childIterator.next(); + if (!item.done && !self._contains(item.value)) { + return this.next(); + } + return item; + } + }; + } + + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map { + return new Map([ + ['childPolicy', this.childPolicy.constructor !== undefined ? this.childPolicy.constructor.name : null ], + ['allowList', Array.from(this.allowList.keys())] + ]); + } +} + +/** + * @classdesc + * Exposed for backward-compatibility only, it's recommended that you use {@link AllowListPolicy} instead. + * @extends AllowListPolicy + * @deprecated Use allow-list instead. It will be removed in future major versions. + */ +class WhiteListPolicy extends AllowListPolicy { + /** + * Creates a new instance of WhiteListPolicy. + * @param {LoadBalancingPolicy} childPolicy - The wrapped policy. + * @param {Array.} allowList - The hosts address in the format ipAddress:port. + * @deprecated Use AllowListPolicy instead. It will be removed in future major versions. + */ + constructor(childPolicy: LoadBalancingPolicy, allowList: Array) { + super(childPolicy, allowList); + } +} + +/** + * A load-balancing policy implementation that attempts to fairly distribute the load based on the amount of in-flight + * request per hosts. The local replicas are initially shuffled and + * between the first two nodes in the + * shuffled list, the one with fewer in-flight requests is selected as coordinator. + * + *

+ * Additionally, it detects unresponsive replicas and reorders them at the back of the query plan. + *

+ * + *

+ * For graph analytics queries, it uses the preferred analytics graph server previously obtained by driver as first + * host in the query plan. + *

+ */ +class DefaultLoadBalancingPolicy extends LoadBalancingPolicy { + private _client: Client; + private _hosts: HostMap; + private _filteredHosts: Host[]; + private _preferredHost: null; + private _index: number; + private _filter: (host: Host) => boolean ; + + /** + * Creates a new instance of DefaultLoadBalancingPolicy. + * @param {String|Object} [options] The local data center name or the optional policy options object. + *

+ * Note that when providing the local data center name, it overrides localDataCenter option at + * Client level. + *

+ * @param {String} [options.localDc] local data center name. This value overrides the 'localDataCenter' Client option + * and is useful for cases where you have multiple execution profiles that you intend on using for routing + * requests to different data centers. + * @param {Function} [options.filter] A function to apply to determine if hosts are included in the query plan. + * The function takes a Host parameter and returns a Boolean. + */ + constructor(options?: { localDc?: string, filter?: (host: Host) => boolean } | string) { + super(); + + if (typeof options === 'string') { + options = { localDc: options }; + } else if (!options) { + options = utils.emptyObject; + } + + this._client = null; + this._hosts = null; + this._filteredHosts = null; + this._preferredHost = null; + this._index = 0; + this.localDc = options.localDc; + this._filter = options.filter || this._defaultFilter; + + // Allow some checks to be injected + //TODO: Shouldn't we expose and document those? + // @ts-ignore + if (options.isHostNewlyUp) { + // @ts-ignore + this._isHostNewlyUp = options.isHostNewlyUp; + } + // @ts-ignore + if (options.healthCheck) { + // @ts-ignore + this._healthCheck = options.healthCheck; + } + // @ts-ignore + if (options.compare) { + // @ts-ignore + this._compare = options.compare; + } + // @ts-ignore + if (options.getReplicas) { + // @ts-ignore + this._getReplicas = options.getReplicas; + } + } + + /** + * Initializes the load balancing policy, called after the driver obtained the information of the cluster. + * @param {Client} client + * @param {HostMap} hosts + * @param {Function} callback + */ + init(client: Client, hosts: HostMap, callback: EmptyCallback):void { + this._client = client; + this._hosts = hosts; + + // Clean local host cache + this._hosts.on('add', () => this._filteredHosts = null); + this._hosts.on('remove', () => this._filteredHosts = null); + + try { + setLocalDc(this, client, this._hosts); + } catch (err) { + return callback(err); + } + + callback(null); + } + + /** + * Returns the distance assigned by this policy to the provided host, relatively to the client instance. + * @param {Host} host + */ + getDistance(host: Host) : distance{ + if (this._preferredHost !== null && host === this._preferredHost) { + // Set the last preferred host as local. + // It ensures that the pool for the graph analytics host has the appropriate size + return types.distance.local; + } + + if (!this._filter(host)) { + return types.distance.ignored; + } + + return host.datacenter === this.localDc ? types.distance.local : types.distance.ignored; + } + + /** + * Returns a host iterator to be used for a query execution. + * @override + * @param {String} keyspace + * @param {ExecutionOptions} executionOptions + * @param {Function} callback + */ + newQueryPlan(keyspace: string, executionOptions: ExecutionOptions, callback: (error: Error, iterator: Iterator) => void): void { + let routingKey; + let preferredHost; + + if (executionOptions) { + routingKey = executionOptions.getRoutingKey(); + + if (executionOptions.getKeyspace()) { + keyspace = executionOptions.getKeyspace(); + } + + preferredHost = executionOptions.getPreferredHost(); + } + + let iterable; + + if (!keyspace || !routingKey) { + iterable = this._getLocalHosts(); + } else { + iterable = this._getReplicasAndLocalHosts(keyspace, routingKey); + } + + if (preferredHost) { + // Set it on an instance level field to set the distance + this._preferredHost = preferredHost; + iterable = DefaultLoadBalancingPolicy._getPreferredHostFirst(preferredHost, iterable); + } + + return callback(null, iterable); + } + + /** + * Yields the preferred host first, followed by the host in the provided iterable + * @param preferredHost + * @param iterable + * @private + */ + private static *_getPreferredHostFirst(preferredHost, iterable) { + yield preferredHost; + + for (const host of iterable) { + if (host !== preferredHost) { + yield host; + } + } + } + + /** + * Yields the local hosts without the replicas already yielded + * @param {Array} [localReplicas] The local replicas that we should avoid to include again + * @private + */ + private *_getLocalHosts(localReplicas?: Array) { + // Use a local reference + const hosts = this._getFilteredLocalHosts(); + const initialIndex = this._getIndex(); + + // indexOf() over an Array is a O(n) operation but given that there should be 3 to 7 replicas, + // it shouldn't be an expensive call. Additionally, this will only be executed when the local replicas + // have been exhausted in a lazy manner. + const canBeYield = localReplicas + ? h => localReplicas.indexOf(h) === -1 + : _h => true; + + for (let i = 0; i < hosts.length; i++) { + const h = hosts[(i + initialIndex) % hosts.length]; + if (canBeYield(h) && h.isUp()) { + yield h; + } + } + } + + private _getReplicasAndLocalHosts(keyspace, routingKey) { + let replicas = this._getReplicas(keyspace, routingKey); + if (replicas === null) { + return this._getLocalHosts(); + } + + const filteredReplicas = []; + let newlyUpReplica = null; + let newlyUpReplicaTimestamp = Number.MIN_SAFE_INTEGER; + let unhealthyReplicas = 0; + + // Filter by DC, predicate and UP replicas + // Use the same iteration to perform other checks: whether if its newly UP or unhealthy + // As this is part of the hot path, we use a simple loop and avoid using Array.prototype.filter() + closure + for (let i = 0; i < replicas.length; i++) { + const h = replicas[i]; + if (!this._filter(h) || h.datacenter !== this.localDc || !h.isUp()) { + continue; + } + const isUpSince = this._isHostNewlyUp(h); + if (isUpSince !== null && isUpSince > newlyUpReplicaTimestamp) { + newlyUpReplica = h; + newlyUpReplicaTimestamp = isUpSince; + } + if (newlyUpReplica === null && !this._healthCheck(h)) { + unhealthyReplicas++; + } + filteredReplicas.push(h); + } + + replicas = filteredReplicas; + + // Shuffle remaining local replicas + utils.shuffleArray(replicas); + + if (replicas.length < 3) { + // Avoid reordering replicas of a set of 2 as we could be doing more harm than good + return this.yieldReplicasFirst(replicas); + } + + let temp; + + if (newlyUpReplica === null) { + if (unhealthyReplicas > 0 && unhealthyReplicas < Math.floor(replicas.length / 2 + 1)) { + // There is one or more unhealthy replicas and there is a majority of healthy replicas + this._sendUnhealthyToTheBack(replicas, unhealthyReplicas); + } + } + else if ((newlyUpReplica === replicas[0] || newlyUpReplica === replicas[1]) && Math.random() * 4 >= 1) { + // There is a newly UP replica and the replica in first or second position is the most recent replica + // marked as UP and dice roll 1d4!=1 -> Send it to the back of the Array + const index = newlyUpReplica === replicas[0] ? 0 : 1; + temp = replicas[replicas.length - 1]; + replicas[replicas.length - 1] = replicas[index]; + replicas[index] = temp; + } + + if (this._compare(replicas[1], replicas[0]) > 0) { + // Power of two random choices + temp = replicas[0]; + replicas[0] = replicas[1]; + replicas[1] = temp; + } + + return this.yieldReplicasFirst(replicas); + } + + /** + * Yields the local replicas followed by the rest of local nodes. + * @param {Array} replicas The local replicas + */ + private *yieldReplicasFirst(replicas: Array) { + for (let i = 0; i < replicas.length; i++) { + yield replicas[i]; + } + yield* this._getLocalHosts(replicas); + } + + private _isHostNewlyUp(h: Host): number { + return (h.isUpSince !== null && Date.now() - h.isUpSince < newlyUpInterval) ? h.isUpSince : null; + } + + /** + * Returns a boolean determining whether the host health is ok or not. + * A Host is considered unhealthy when there are enough items in the queue (10 items in-flight) but the + * Host is not responding to those requests. + * @param {Host} h + * @return {boolean} + * @private + */ + private _healthCheck(h: Host): boolean { + return !(h.getInFlight() >= 10 && h.getResponseCount() <= 1); + } + + /** + * Compares to host and returns 1 if it needs to favor the first host otherwise, -1. + * @return {number} + * @private + */ + private _compare(h1, h2): number { + return h1.getInFlight() < h2.getInFlight() ? 1 : -1; + } + + private _getReplicas(keyspace, routingKey) { + return this._client.getReplicas(keyspace, routingKey); + } + + /** + * Returns an Array of hosts filtered by DC and predicate. + * @returns {Array} + * @private + */ + private _getFilteredLocalHosts(): Array { + if (this._filteredHosts === null) { + this._filteredHosts = this._hosts.values() + .filter(h => this._filter(h) && h.datacenter === this.localDc); + } + return this._filteredHosts; + } + + private _getIndex() { + const result = this._index++; + // Overflow protection + if (this._index === 0x7fffffff) { + this._index = 0; + } + return result; + } + + private _sendUnhealthyToTheBack(replicas, unhealthyReplicas) { + let counter = 0; + + // Start from the back, move backwards and stop once all unhealthy replicas are at the back + for (let i = replicas.length - 1; i >= 0 && counter < unhealthyReplicas; i--) { + const host = replicas[i]; + if (this._healthCheck(host)) { + continue; + } + + const targetIndex = replicas.length - 1 - counter; + if (targetIndex !== i) { + const temp = replicas[targetIndex]; + replicas[targetIndex] = host; + replicas[i] = temp; + } + counter++; + } + } + + private _defaultFilter() { + return true; + } + + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map { + return new Map([ + ['localDataCenter', this.localDc ], + ['filterFunction', this._filter !== this._defaultFilter ] + ]); + } +} + +/** + * Validates and sets the local data center to be used. + * @param {LoadBalancingPolicy} lbp + * @param {Client} client + * @param {HostMap} hosts + * @private + */ +function setLocalDc(lbp: LoadBalancingPolicy, client: Client, hosts: HostMap) { + if (!(lbp instanceof LoadBalancingPolicy)) { + throw new errors.DriverInternalError('LoadBalancingPolicy instance was not provided'); + } + + if (client && client.options) { + if (lbp.localDc && !client.options.localDataCenter) { + client.log('info', `Local data center '${lbp.localDc}' was provided as an argument to the load-balancing` + + ` policy. It is preferable to specify the local data center using 'localDataCenter' in Client` + + ` options instead when your application is targeting a single data center.`); + } + + // If localDc is unset, use value set in client options. + lbp.localDc = lbp.localDc || client.options.localDataCenter; + } + + const dcs = getDataCenters(hosts); + + if (!lbp.localDc) { + throw new errors.ArgumentError( + `'localDataCenter' is not defined in Client options and also was not specified in constructor.` + + ` At least one is required. Available DCs are: [${Array.from(dcs)}]`); + } + + if (!dcs.has(lbp.localDc)) { + throw new errors.ArgumentError(`Datacenter ${lbp.localDc} was not found. Available DCs are: [${Array.from(dcs)}]`); + } +} + +function getDataCenters(hosts) { + return new Set(hosts.values().map(h => h.datacenter)); +} + +export { + AllowListPolicy, + DCAwareRoundRobinPolicy, + DefaultLoadBalancingPolicy, + LoadBalancingPolicy, + RoundRobinPolicy, + TokenAwarePolicy, + // Deprecated: for backward compatibility only. + WhiteListPolicy +}; + +export default { + AllowListPolicy, + DCAwareRoundRobinPolicy, + DefaultLoadBalancingPolicy, + LoadBalancingPolicy, + RoundRobinPolicy, + TokenAwarePolicy, + // Deprecated: for backward compatibility only. + WhiteListPolicy +}; \ No newline at end of file diff --git a/lib/policies/reconnection.js b/lib/policies/reconnection.js deleted file mode 100644 index fa6a899ad..000000000 --- a/lib/policies/reconnection.js +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); - -/** @module policies/reconnection */ -/** - * Base class for Reconnection Policies - * @constructor - */ -function ReconnectionPolicy() { - -} - -/** - * A new reconnection schedule. - * @returns {{next: function}} An infinite iterator - */ -ReconnectionPolicy.prototype.newSchedule = function () { - throw new Error('You must implement a new schedule for the Reconnection class'); -}; - -/** - * Gets an associative array containing the policy options. - */ -ReconnectionPolicy.prototype.getOptions = function () { - return new Map(); -}; - -/** - * A reconnection policy that waits a constant time between each reconnection attempt. - * @param {Number} delay Delay in ms - * @constructor - */ -function ConstantReconnectionPolicy(delay) { - this.delay = delay; -} - -util.inherits(ConstantReconnectionPolicy, ReconnectionPolicy); - -/** - * A new reconnection schedule that returns the same next delay value - * @returns {{next: Function}} An infinite iterator - */ -ConstantReconnectionPolicy.prototype.newSchedule = function () { - const self = this; - return { - next: function () { - return {value: self.delay, done: false}; - } - }; -}; - -/** - * Gets an associative array containing the policy options. - */ -ConstantReconnectionPolicy.prototype.getOptions = function () { - return new Map([['delay', this.delay ]]); -}; - -/** - * A reconnection policy that waits exponentially longer between each - * reconnection attempt (but keeps a constant delay once a maximum delay is reached). - *

- * A random amount of jitter (+/- 15%) will be added to the pure exponential delay value to avoid situations - * where many clients are in the reconnection process at exactly the same time. The jitter will never cause the - * delay to be less than the base delay, or more than the max delay. - *

- * @param {Number} baseDelay The base delay in milliseconds to use for the schedules created by this policy. - * @param {Number} maxDelay The maximum delay in milliseconds to wait between two reconnection attempt. - * @param {Boolean} startWithNoDelay Determines if the first attempt should be zero delay - * @constructor - */ -function ExponentialReconnectionPolicy(baseDelay, maxDelay, startWithNoDelay) { - this.baseDelay = baseDelay; - this.maxDelay = maxDelay; - this.startWithNoDelay = startWithNoDelay; -} - -util.inherits(ExponentialReconnectionPolicy, ReconnectionPolicy); - -/** - * A new schedule that uses an exponentially growing delay between reconnection attempts. - * @returns {{next: Function}} An infinite iterator. - */ -ExponentialReconnectionPolicy.prototype.newSchedule = function* () { - let index = this.startWithNoDelay ? -1 : 0; - - while (true) { - let delay = 0; - - if (index >= 64) { - delay = this.maxDelay; - } else if (index !== -1) { - delay = Math.min(Math.pow(2, index) * this.baseDelay, this.maxDelay); - } - - index++; - - yield this._addJitter(delay); - } -}; - -/** - * Adds a random portion of +-15% to the delay provided. - * Initially, its adds a random value of 15% to avoid reconnection before reaching the base delay. - * When the schedule reaches max delay, only subtracts a random portion of 15%. - */ -ExponentialReconnectionPolicy.prototype._addJitter = function (value) { - if (value === 0) { - // Instant reconnection without jitter - return value; - } - - // Use the formula: 85% + rnd() * 30% to calculate the percentage of the original delay - let minPercentage = 0.85; - let range = 0.30; - - if (!this.startWithNoDelay && value === this.baseDelay) { - // Between 100% to 115% of the original value - minPercentage = 1; - range = 0.15; - } else if (value === this.maxDelay) { - // Between 85% to 100% of the original value - range = 0.15; - } - - return Math.floor(value * (Math.random() * range + minPercentage)); -}; - -/** - * Gets an associative array containing the policy options. - */ -ExponentialReconnectionPolicy.prototype.getOptions = function () { - return new Map([ - ['baseDelay', this.baseDelay ], - ['maxDelay', this.maxDelay ], - ['startWithNoDelay', this.startWithNoDelay ] - ]); -}; - -exports.ReconnectionPolicy = ReconnectionPolicy; -exports.ConstantReconnectionPolicy = ConstantReconnectionPolicy; -exports.ExponentialReconnectionPolicy = ExponentialReconnectionPolicy; \ No newline at end of file diff --git a/lib/policies/reconnection.ts b/lib/policies/reconnection.ts new file mode 100644 index 000000000..b676b02c1 --- /dev/null +++ b/lib/policies/reconnection.ts @@ -0,0 +1,177 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +/** @module policies/reconnection */ +/** + * Base class for Reconnection Policies + */ +class ReconnectionPolicy { + constructor() { + } + /** + * A new reconnection schedule. + * @returns {Iterator} An infinite iterator + */ + newSchedule(): Iterator { + throw new Error('You must implement a new schedule for the Reconnection class'); + } + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map { + return new Map(); + } +} + + + +/** + * A reconnection policy that waits a constant time between each reconnection attempt. + */ +class ConstantReconnectionPolicy extends ReconnectionPolicy { + private delay: number; + /** + * A reconnection policy that waits a constant time between each reconnection attempt. + * @param {Number} delay Delay in ms + * @constructor + */ + constructor(delay: number) { + super(); + this.delay = delay; + } + /** + * A new reconnection schedule that returns the same next delay value + * @returns { Iterator} An infinite iterator + */ + newSchedule(): Iterator { + const self = this; + return { + next: function () { + return { value: self.delay, done: false }; + } + }; + } + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map { + return new Map([['delay', this.delay]]); + } +} + +/** + * A reconnection policy that waits exponentially longer between each + * reconnection attempt (but keeps a constant delay once a maximum delay is reached). + *

+ * A random amount of jitter (+/- 15%) will be added to the pure exponential delay value to avoid situations + * where many clients are in the reconnection process at exactly the same time. The jitter will never cause the + * delay to be less than the base delay, or more than the max delay. + *

+ */ +class ExponentialReconnectionPolicy extends ReconnectionPolicy { + private baseDelay: number; + private maxDelay: number; + private startWithNoDelay: boolean; + /** + * A reconnection policy that waits exponentially longer between each + * reconnection attempt (but keeps a constant delay once a maximum delay is reached). + *

+ * A random amount of jitter (+/- 15%) will be added to the pure exponential delay value to avoid situations + * where many clients are in the reconnection process at exactly the same time. The jitter will never cause the + * delay to be less than the base delay, or more than the max delay. + *

+ * @param {Number} baseDelay The base delay in milliseconds to use for the schedules created by this policy. + * @param {Number} maxDelay The maximum delay in milliseconds to wait between two reconnection attempt. + * @param {Boolean} [startWithNoDelay] Determines if the first attempt should be zero delay + * @constructor + */ + constructor(baseDelay: number, maxDelay: number, startWithNoDelay?: boolean) { + super(); + this.baseDelay = baseDelay; + this.maxDelay = maxDelay; + this.startWithNoDelay = startWithNoDelay; + } + /** + * A new schedule that uses an exponentially growing delay between reconnection attempts. + * @returns {Iterator} An infinite iterator. + */ + *newSchedule(): Iterator { + let index = this.startWithNoDelay ? -1 : 0; + + while (true) { + let delay = 0; + + if (index >= 64) { + delay = this.maxDelay; + } else if (index !== -1) { + delay = Math.min(Math.pow(2, index) * this.baseDelay, this.maxDelay); + } + + index++; + + yield this._addJitter(delay); + } + } + /** + * Adds a random portion of +-15% to the delay provided. + * Initially, its adds a random value of 15% to avoid reconnection before reaching the base delay. + * When the schedule reaches max delay, only subtracts a random portion of 15%. + */ + private _addJitter(value) { + if (value === 0) { + // Instant reconnection without jitter + return value; + } + + // Use the formula: 85% + rnd() * 30% to calculate the percentage of the original delay + let minPercentage = 0.85; + let range = 0.30; + + if (!this.startWithNoDelay && value === this.baseDelay) { + // Between 100% to 115% of the original value + minPercentage = 1; + range = 0.15; + } else if (value === this.maxDelay) { + // Between 85% to 100% of the original value + range = 0.15; + } + + return Math.floor(value * (Math.random() * range + minPercentage)); + } + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map { + return new Map([ + ['baseDelay', this.baseDelay], + ['maxDelay', this.maxDelay], + ['startWithNoDelay', this.startWithNoDelay] + ]); + } +} + +export { + ConstantReconnectionPolicy, + ExponentialReconnectionPolicy, ReconnectionPolicy +}; + +export default { + ReconnectionPolicy, + ConstantReconnectionPolicy, + ExponentialReconnectionPolicy +}; \ No newline at end of file diff --git a/lib/policies/retry.js b/lib/policies/retry.js deleted file mode 100644 index 370d6c5d9..000000000 --- a/lib/policies/retry.js +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); - - -/** @module policies/retry */ -/** - * Base and default RetryPolicy. - * Determines what to do when the drivers runs into an specific Cassandra exception - * @constructor - */ -function RetryPolicy() { - -} - -/** - * Determines what to do when the driver gets an UnavailableException response from a Cassandra node. - * @param {OperationInfo} info - * @param {Number} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered - * the exception. - * @param {Number} required The number of replicas whose response is required to achieve the - * required [consistency]{@link module:types~consistencies}. - * @param {Number} alive The number of replicas that were known to be alive when the request had been processed - * (since an unavailable exception has been triggered, there will be alive < required) - * @returns {DecisionInfo} - */ -RetryPolicy.prototype.onUnavailable = function (info, consistency, required, alive) { - if (info.nbRetry > 0) { - return this.rethrowResult(); - } - return this.retryResult(undefined, false); -}; - -/** - * Determines what to do when the driver gets a ReadTimeoutException response from a Cassandra node. - * @param {OperationInfo} info - * @param {Number} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered - * the exception. - * @param {Number} received The number of nodes having answered the request. - * @param {Number} blockFor The number of replicas whose response is required to achieve the - * required [consistency]{@link module:types~consistencies}. - * @param {Boolean} isDataPresent When false, it means the replica that was asked for data has not responded. - * @returns {DecisionInfo} - */ -RetryPolicy.prototype.onReadTimeout = function (info, consistency, received, blockFor, isDataPresent) { - if (info.nbRetry > 0) { - return this.rethrowResult(); - } - return ((received >= blockFor && !isDataPresent) ? - this.retryResult() : - this.rethrowResult()); -}; - -/** - * Determines what to do when the driver gets a WriteTimeoutException response from a Cassandra node. - * @param {OperationInfo} info - * @param {Number} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered - * the exception. - * @param {Number} received The number of nodes having acknowledged the request. - * @param {Number} blockFor The number of replicas whose acknowledgement is required to achieve the required - * [consistency]{@link module:types~consistencies}. - * @param {String} writeType A string that describes the type of the write that timed out ("SIMPLE" - * / "BATCH" / "BATCH_LOG" / "UNLOGGED_BATCH" / "COUNTER"). - * @returns {DecisionInfo} - */ -RetryPolicy.prototype.onWriteTimeout = function (info, consistency, received, blockFor, writeType) { - if (info.nbRetry > 0) { - return this.rethrowResult(); - } - // If the batch log write failed, retry the operation as this might just be we were unlucky at picking candidates - return writeType === "BATCH_LOG" ? this.retryResult() : this.rethrowResult(); -}; - -/** - * Defines whether to retry and at which consistency level on an unexpected error. - *

- * This method might be invoked in the following situations: - *

- *
    - *
  1. On a client timeout, while waiting for the server response - * (see [socketOptions.readTimeout]{@link ClientOptions}), being the error an instance of - * [OperationTimedOutError]{@link module:errors~OperationTimedOutError}.
  2. - *
  3. On a connection error (socket closed, etc.).
  4. - *
  5. When the contacted host replies with an error, such as overloaded, isBootstrapping, - * serverError, etc. In this case, the error is instance of [ResponseError]{@link module:errors~ResponseError}. - *
  6. - *
- *

- * Note that when this method is invoked, the driver cannot guarantee that the mutation has been effectively - * applied server-side; a retry should only be attempted if the request is known to be idempotent. - *

- * @param {OperationInfo} info - * @param {Number|undefined} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered - * the exception. - * @param {Error} err The error that caused this request to fail. - * @returns {DecisionInfo} - */ -RetryPolicy.prototype.onRequestError = function (info, consistency, err) { - // The default implementation triggers a retry on the next host in the query plan with the same consistency level, - // regardless of the statement's idempotence, for historical reasons. - return this.retryResult(undefined, false); -}; - -/** - * Returns a {@link DecisionInfo} to retry the request with the given [consistency]{@link module:types~consistencies}. - * @param {Number|undefined} [consistency] When specified, it retries the request with the given consistency. - * @param {Boolean} [useCurrentHost] When specified, determines if the retry should be made using the same coordinator. - * Default: true. - * @returns {DecisionInfo} - */ -RetryPolicy.prototype.retryResult = function (consistency, useCurrentHost) { - return { - decision: RetryPolicy.retryDecision.retry, - consistency: consistency, - useCurrentHost: useCurrentHost !== false - }; -}; - -/** - * Returns a {@link DecisionInfo} to callback in error when a err is obtained for a given request. - * @returns {DecisionInfo} - */ -RetryPolicy.prototype.rethrowResult = function () { - return { decision: RetryPolicy.retryDecision.rethrow }; -}; - -/** - * Determines the retry decision for the retry policies. - * @type {Object} - * @property {Number} rethrow - * @property {Number} retry - * @property {Number} ignore - * @static - */ -RetryPolicy.retryDecision = { - rethrow: 0, - retry: 1, - ignore: 2 -}; - -/** - * Creates a new instance of IdempotenceAwareRetryPolicy. - * @classdesc - * A retry policy that avoids retrying non-idempotent statements. - *

- * In case of write timeouts or unexpected errors, this policy will always return - * [rethrowResult()]{@link module:policies/retry~RetryPolicy#rethrowResult} if the statement is deemed non-idempotent - * (see [QueryOptions.isIdempotent]{@link QueryOptions}). - *

- * For all other cases, this policy delegates the decision to the child policy. - * @param {RetryPolicy} [childPolicy] The child retry policy to wrap. When not defined, it will use an instance of - * [RetryPolicy]{@link module:policies/retry~RetryPolicy} as child policy. - * @extends module:policies/retry~RetryPolicy - * @constructor - * @deprecated Since version 4.0 non-idempotent operations are never tried for write timeout or request error, use the - * default retry policy instead. - */ -function IdempotenceAwareRetryPolicy(childPolicy) { - this._childPolicy = childPolicy || new RetryPolicy(); -} - -util.inherits(IdempotenceAwareRetryPolicy, RetryPolicy); - -IdempotenceAwareRetryPolicy.prototype.onReadTimeout = function (info, consistency, received, blockFor, isDataPresent) { - return this._childPolicy.onReadTimeout(info, consistency, received, blockFor, isDataPresent); -}; - -/** - * If the query is not idempotent, it returns a rethrow decision. Otherwise, it relies on the child policy to decide. - */ -IdempotenceAwareRetryPolicy.prototype.onRequestError = function (info, consistency, err) { - if (info.executionOptions.isIdempotent()) { - return this._childPolicy.onRequestError(info, consistency, err); - } - return this.rethrowResult(); -}; - -IdempotenceAwareRetryPolicy.prototype.onUnavailable = function (info, consistency, required, alive) { - return this._childPolicy.onUnavailable(info, consistency, required, alive); -}; - -/** - * If the query is not idempotent, it return a rethrow decision. Otherwise, it relies on the child policy to decide. - */ -IdempotenceAwareRetryPolicy.prototype.onWriteTimeout = function (info, consistency, received, blockFor, writeType) { - if (info.executionOptions.isIdempotent()) { - return this._childPolicy.onWriteTimeout(info, consistency, received, blockFor, writeType); - } - return this.rethrowResult(); -}; - -/** - * Creates a new instance of FallthroughRetryPolicy. - * @classdesc - * A retry policy that never retries nor ignores. - *

- * All of the methods of this retry policy unconditionally return - * [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. If this policy is used, retry logic will have to be - * implemented in business code. - *

- * @alias module:policies/retry~FallthroughRetryPolicy - * @extends RetryPolicy - * @constructor - */ -function FallthroughRetryPolicy() { - -} - -util.inherits(FallthroughRetryPolicy, RetryPolicy); - -/** - * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. - */ -FallthroughRetryPolicy.prototype.onReadTimeout = function () { - return this.rethrowResult(); -}; - -/** - * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. - */ -FallthroughRetryPolicy.prototype.onRequestError = function () { - return this.rethrowResult(); -}; - -/** - * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. - */ -FallthroughRetryPolicy.prototype.onUnavailable = function () { - return this.rethrowResult(); -}; - -/** - * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. - */ -FallthroughRetryPolicy.prototype.onWriteTimeout = function () { - return this.rethrowResult(); -}; - -/** - * Decision information - * @typedef {Object} DecisionInfo - * @property {Number} decision The decision as specified in - * [retryDecision]{@link module:policies/retry~RetryPolicy.retryDecision}. - * @property {Number} [consistency] The [consistency level]{@link module:types~consistencies}. - * @property {useCurrentHost} [useCurrentHost] Determines if it should use the same host to retry the request. - *

- * In the case that the current host is not available anymore, it will be retried on the next host even when - * useCurrentHost is set to true. - *

- */ - -/** - * Information of the execution to be used to determine whether the operation should be retried. - * @typedef {Object} OperationInfo - * @property {String} query The query that was executed. - * @param {ExecutionOptions} executionOptions The options related to the execution of the request. - * @property {Number} nbRetry The number of retries already performed for this operation. - */ - -exports.IdempotenceAwareRetryPolicy = IdempotenceAwareRetryPolicy; -exports.FallthroughRetryPolicy = FallthroughRetryPolicy; -exports.RetryPolicy = RetryPolicy; \ No newline at end of file diff --git a/lib/policies/retry.ts b/lib/policies/retry.ts new file mode 100644 index 000000000..dfdd3ee6a --- /dev/null +++ b/lib/policies/retry.ts @@ -0,0 +1,331 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import type { ExecutionOptions } from "../execution-options"; +import type { consistencies } from "../types"; + +/** + * Base and default RetryPolicy. + * Determines what to do when the driver encounters specific Cassandra exceptions. + */ +class RetryPolicy { + + //TODO: alive was in type boolean. But I think it's number + /** + * Determines what to do when the driver gets an UnavailableException response from a Cassandra node. + * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} required The number of replicas whose response is required to achieve the + * required [consistency]{@link module:types~consistencies}. + * @param {Number} alive The number of replicas that were known to be alive when the request had been processed + * (since an unavailable exception has been triggered, there will be alive < required) + * @returns {DecisionInfo} + */ + onUnavailable(info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo { + if (info.nbRetry > 0) { + return this.rethrowResult(); + } + return this.retryResult(undefined, false); + } + + /** + * Determines what to do when the driver gets a ReadTimeoutException response from a Cassandra node. + * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} received The number of nodes having answered the request. + * @param {Number} blockFor The number of replicas whose response is required to achieve the + * required [consistency]{@link module:types~consistencies}. + * @param {Boolean} isDataPresent When false, it means the replica that was asked for data has not responded. + * @returns {DecisionInfo} + */ + onReadTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, isDataPresent: boolean): DecisionInfo { + if (info.nbRetry > 0) { + return this.rethrowResult(); + } + return received >= blockFor && !isDataPresent + ? this.retryResult() + : this.rethrowResult(); + } + + /** + * Determines what to do when the driver gets a WriteTimeoutException response from a Cassandra node. + * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Number} received The number of nodes having acknowledged the request. + * @param {Number} blockFor The number of replicas whose acknowledgement is required to achieve the required + * [consistency]{@link module:types~consistencies}. + * @param {String} writeType A string that describes the type of the write that timed out ("SIMPLE" + * / "BATCH" / "BATCH_LOG" / "UNLOGGED_BATCH" / "COUNTER"). + * @returns {DecisionInfo} + */ + onWriteTimeout(info: OperationInfo, consistency: consistencies, received: number, blockFor: number, writeType: string): DecisionInfo { + if (info.nbRetry > 0) { + return this.rethrowResult(); + } + // If the batch log write failed, retry the operation as this might just be we were unlucky at picking candidates + return writeType === "BATCH_LOG" ? this.retryResult() : this.rethrowResult(); + } + + /** + * Defines whether to retry and at which consistency level on an unexpected error. + *

+ * This method might be invoked in the following situations: + *

+ *
    + *
  1. On a client timeout, while waiting for the server response + * (see [socketOptions.readTimeout]{@link ClientOptions}), being the error an instance of + * [OperationTimedOutError]{@link module:errors~OperationTimedOutError}.
  2. + *
  3. On a connection error (socket closed, etc.).
  4. + *
  5. When the contacted host replies with an error, such as overloaded, isBootstrapping, + * serverError, etc. In this case, the error is instance of [ResponseError]{@link module:errors~ResponseError}. + *
  6. + *
+ *

+ * Note that when this method is invoked, the driver cannot guarantee that the mutation has been effectively + * applied server-side; a retry should only be attempted if the request is known to be idempotent. + *

+ * @param {OperationInfo} info + * @param {consistencies} consistency The [consistency]{@link module:types~consistencies} level of the query that triggered + * the exception. + * @param {Error} err The error that caused this request to fail. + * @returns {DecisionInfo} + */ + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo { + // The default implementation triggers a retry on the next host in the query plan with the same consistency level, + // regardless of the statement's idempotence, for historical reasons. + return this.retryResult(undefined, false); + } + + /** + * Returns a {@link DecisionInfo} to retry the request with the given [consistency]{@link module:types~consistencies}. + * @param {consistencies} [consistency] When specified, it retries the request with the given consistency. + * @param {Boolean} [useCurrentHost] When specified, determines if the retry should be made using the same coordinator. + * Default: true. + * @returns {DecisionInfo} + */ + retryResult(consistency?: consistencies, useCurrentHost: boolean = true): DecisionInfo { + return { + decision: RetryPolicy.retryDecision.retry, + consistency, + useCurrentHost, + }; + } + + /** + * Returns a {@link DecisionInfo} to callback in error when a err is obtained for a given request. + * @returns {DecisionInfo} + */ + rethrowResult(): DecisionInfo { + return { decision: RetryPolicy.retryDecision.rethrow }; + } +} + +//TODO: What is happening to the previous .d.ts? +/** + * namespace RetryDecision { + enum retryDecision { + ignore, + rethrow, + retry + } + } + */ +// This RetryDecision doesn't exist in JavaScript or anywhere else +/** + * Determines the retry decision for the retry policies. + * @type {Object} + * @property {Number} rethrow + * @property {Number} retry + * @property {Number} ignore + * @static + */ +namespace RetryPolicy { + export enum retryDecision { + rethrow = 0, + retry = 1, + ignore = 2 + } +} + +/** + * @classdesc + * A retry policy that avoids retrying non-idempotent statements. + *

+ * In case of write timeouts or unexpected errors, this policy will always return + * [rethrowResult()]{@link module:policies/retry~RetryPolicy#rethrowResult} if the statement is deemed non-idempotent + * (see [QueryOptions.isIdempotent]{@link QueryOptions}). + *

+ * For all other cases, this policy delegates the decision to the child policy. + * @extends module:policies/retry~RetryPolicy + * @deprecated Since version 4.0 non-idempotent operations are never tried for write timeout or request error, use the + * default retry policy instead. + */ +class IdempotenceAwareRetryPolicy extends RetryPolicy { + private _childPolicy: RetryPolicy; + + /** + * Creates a new instance of IdempotenceAwareRetryPolicy. + * This is a retry policy that avoids retrying non-idempotent statements. + *

+ * In case of write timeouts or unexpected errors, this policy will always return + * [rethrowResult()]{@link module:policies/retry~RetryPolicy#rethrowResult} if the statement is deemed non-idempotent + * (see [QueryOptions.isIdempotent]{@link QueryOptions}). + *

+ * For all other cases, this policy delegates the decision to the child policy. + * @param {RetryPolicy} [childPolicy] The child retry policy to wrap. When not defined, it will use an instance of + * [RetryPolicy]{@link module:policies/retry~RetryPolicy} as child policy. + * @constructor + * @deprecated Since version 4.0 non-idempotent operations are never tried for write timeout or request error, use the + * default retry policy instead. + */ + constructor(childPolicy: RetryPolicy = new RetryPolicy()) { + super(); + this._childPolicy = childPolicy; + } + + onReadTimeout( + info: OperationInfo, + consistency: consistencies, + received: number, + blockFor: number, + isDataPresent: boolean): DecisionInfo { + return this._childPolicy.onReadTimeout(info, consistency, received, blockFor, isDataPresent); + } + + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo{ + if (info.executionOptions.isIdempotent()) { + return this._childPolicy.onRequestError(info, consistency, err); + } + return this.rethrowResult(); + } + + onUnavailable( + info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo{ + return this._childPolicy.onUnavailable(info, consistency, required, alive); + } + + /** + * If the query is not idempotent, it return a rethrow decision. Otherwise, it relies on the child policy to decide. + */ + onWriteTimeout( + info: OperationInfo, + consistency: consistencies, + received: number, + blockFor: number, + writeType: string): DecisionInfo { + if (info.executionOptions.isIdempotent()) { + return this._childPolicy.onWriteTimeout(info, consistency, received, blockFor, writeType); + } + return this.rethrowResult(); + } +} + +/** + * @classdesc + * A retry policy that never retries nor ignores. + *

+ * All of the methods of this retry policy unconditionally return + * [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. If this policy is used, retry logic will have to be + * implemented in business code. + *

+ * @alias module:policies/retry~FallthroughRetryPolicy + * @extends RetryPolicy + */ +class FallthroughRetryPolicy extends RetryPolicy { + + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onReadTimeout( + info: OperationInfo, + consistency: consistencies, + received: number, + blockFor: number, + isDataPresent: boolean): DecisionInfo { + return this.rethrowResult(); + } + + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onRequestError(info: OperationInfo, consistency: consistencies, err: Error): DecisionInfo{ + return this.rethrowResult(); + } + + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onUnavailable( + info: OperationInfo, consistency: consistencies, required: number, alive: number): DecisionInfo{ + return this.rethrowResult(); + } + /** + * Implementation of RetryPolicy method that returns [rethrow]{@link module:policies/retry~Retry#rethrowResult()}. + */ + onWriteTimeout( + info: OperationInfo, + consistency: consistencies, + received: number, + blockFor: number, + writeType: string): DecisionInfo { + return this.rethrowResult(); + } +} + +/** + * Decision information + * @typedef {Object} DecisionInfo + * @property {Number} decision The decision as specified in + * [retryDecision]{@link module:policies/retry~RetryPolicy.retryDecision}. + * @property {Number} [consistency] The [consistency level]{@link module:types~consistencies}. + * @property {useCurrentHost} [useCurrentHost] Determines if it should use the same host to retry the request. + *

+ * In the case that the current host is not available anymore, it will be retried on the next host even when + * useCurrentHost is set to true. + *

+ */ +type DecisionInfo = { + decision: number; + consistency?: consistencies; + useCurrentHost?: boolean; +} + +/** + * Information of the execution to be used to determine whether the operation should be retried. + * @typedef {Object} OperationInfo + * @property {String} query The query that was executed. + * @param {ExecutionOptions} executionOptions The options related to the execution of the request. + * @property {Number} nbRetry The number of retries already performed for this operation. + */ +type OperationInfo = { + query: string; + executionOptions: ExecutionOptions; + nbRetry: number; +} + +export { + FallthroughRetryPolicy, IdempotenceAwareRetryPolicy, RetryPolicy, + type DecisionInfo +}; + +export default { + IdempotenceAwareRetryPolicy, + FallthroughRetryPolicy, + RetryPolicy +}; \ No newline at end of file diff --git a/lib/policies/speculative-execution.js b/lib/policies/speculative-execution.js deleted file mode 100644 index 7705802cb..000000000 --- a/lib/policies/speculative-execution.js +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const util = require('util'); -const errors = require('../errors'); - -/** @module policies/speculativeExecution */ - -/** - * @classdesc - * The policy that decides if the driver will send speculative queries to the next hosts when the current host takes too - * long to respond. - *

Note that only idempotent statements will be speculatively retried.

- * @constructor - * @abstract - */ -function SpeculativeExecutionPolicy() { - -} - -/** - * Initialization method that gets invoked on Client startup. - * @param {Client} client - * @abstract - */ -SpeculativeExecutionPolicy.prototype.init = function (client) { - -}; - -/** - * Gets invoked at client shutdown, giving the opportunity to the implementor to perform cleanup. - * @abstract - */ -SpeculativeExecutionPolicy.prototype.shutdown = function () { - -}; - -/** - * Gets the plan to use for a new query. - * Returns an object with a nextExecution() method, which returns a positive number representing the - * amount of milliseconds to delay the next execution or a non-negative number to avoid further executions. - * @param {String} keyspace The currently logged keyspace. - * @param {String|Array} queryInfo The query, or queries in the case of batches, for which to build a plan. - * @return {{nextExecution: function}} - * @abstract - */ -SpeculativeExecutionPolicy.prototype.newPlan = function (keyspace, queryInfo) { - throw new Error('You must implement newPlan() method in the SpeculativeExecutionPolicy'); -}; - -/** - * Gets an associative array containing the policy options. - */ -SpeculativeExecutionPolicy.prototype.getOptions = function () { - return new Map(); -}; - -/** - * Creates a new instance of NoSpeculativeExecutionPolicy. - * @classdesc - * A {@link SpeculativeExecutionPolicy} that never schedules speculative executions. - * @constructor - * @extends {SpeculativeExecutionPolicy} - */ -function NoSpeculativeExecutionPolicy() { - this._plan = { - nextExecution: function () { - return -1; - } - }; -} - -util.inherits(NoSpeculativeExecutionPolicy, SpeculativeExecutionPolicy); - -NoSpeculativeExecutionPolicy.prototype.newPlan = function () { - return this._plan; -}; - - -/** - * Creates a new instance of ConstantSpeculativeExecutionPolicy. - * @classdesc - * A {@link SpeculativeExecutionPolicy} that schedules a given number of speculative executions, - * separated by a fixed delay. - * @constructor - * @param {Number} delay The delay between each speculative execution. - * @param {Number} maxSpeculativeExecutions The amount of speculative executions that should be scheduled after the - * initial execution. Must be strictly positive. - * @extends {SpeculativeExecutionPolicy} - */ -function ConstantSpeculativeExecutionPolicy(delay, maxSpeculativeExecutions) { - if (!(delay >= 0)) { - throw new errors.ArgumentError('delay must be a positive number or zero'); - } - if (!(maxSpeculativeExecutions > 0)) { - throw new errors.ArgumentError('maxSpeculativeExecutions must be a positive number'); - } - this._delay = delay; - this._maxSpeculativeExecutions = maxSpeculativeExecutions; -} - -util.inherits(ConstantSpeculativeExecutionPolicy, SpeculativeExecutionPolicy); - -ConstantSpeculativeExecutionPolicy.prototype.newPlan = function () { - let executions = 0; - const self = this; - return { - nextExecution: function () { - if (executions++ < self._maxSpeculativeExecutions) { - return self._delay; - } - return -1; - } - }; -}; - -/** - * Gets an associative array containing the policy options. - */ -ConstantSpeculativeExecutionPolicy.prototype.getOptions = function () { - return new Map([ - ['delay', this._delay ], - ['maxSpeculativeExecutions', this._maxSpeculativeExecutions ] - ]); -}; - -exports.NoSpeculativeExecutionPolicy = NoSpeculativeExecutionPolicy; -exports.SpeculativeExecutionPolicy = SpeculativeExecutionPolicy; -exports.ConstantSpeculativeExecutionPolicy = ConstantSpeculativeExecutionPolicy; \ No newline at end of file diff --git a/lib/policies/speculative-execution.ts b/lib/policies/speculative-execution.ts new file mode 100644 index 000000000..4b7c12cee --- /dev/null +++ b/lib/policies/speculative-execution.ts @@ -0,0 +1,157 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import type Client from "../client"; +import errors from "../errors"; + + + +/** @module policies/speculativeExecution */ + +/** + * @classdesc + * The policy that decides if the driver will send speculative queries to the next hosts when the current host takes too + * long to respond. + *

Note that only idempotent statements will be speculatively retried.

+ * @abstract + */ +class SpeculativeExecutionPolicy { + constructor() { + } + /** + * Initialization method that gets invoked on Client startup. + * @param {Client} client + * @abstract + */ + init(client: Client) { + } + /** + * Gets invoked at client shutdown, giving the opportunity to the implementor to perform cleanup. + * @abstract + */ + shutdown() { + } + /** + * Gets the plan to use for a new query. + * Returns an object with a nextExecution() method, which returns a positive number representing the + * amount of milliseconds to delay the next execution or a non-negative number to avoid further executions. + * @param {String} keyspace The currently logged keyspace. + * @param {String|Array} queryInfo The query, or queries in the case of batches, for which to build a plan. + * @return {{nextExecution: function}} + * @abstract + */ + newPlan(keyspace: string, queryInfo: string | Array): { nextExecution: () => number; } { + throw new Error('You must implement newPlan() method in the SpeculativeExecutionPolicy'); + } + /** + * Gets an associative array containing the policy options. + */ + getOptions() : Map { + return new Map(); + } +} + + + + + +/** + * Creates a new instance of NoSpeculativeExecutionPolicy. + * @classdesc + * A {@link SpeculativeExecutionPolicy} that never schedules speculative executions. + * @extends {SpeculativeExecutionPolicy} + */ +class NoSpeculativeExecutionPolicy extends SpeculativeExecutionPolicy { + private _plan: { nextExecution: () => number; }; + /** + * Creates a new instance of NoSpeculativeExecutionPolicy. + */ + constructor() { + super(); + this._plan = { + nextExecution: function () { + return -1; + } + }; + } + newPlan(keyspace: string, queryInfo: string | Array): { nextExecution: () => number; }{ + return this._plan; + } +} + + +/** + * @classdesc + * A {@link SpeculativeExecutionPolicy} that schedules a given number of speculative executions, + * separated by a fixed delay. + * @extends {SpeculativeExecutionPolicy} + */ +class ConstantSpeculativeExecutionPolicy extends SpeculativeExecutionPolicy { + private _delay: number; + private _maxSpeculativeExecutions: number; + /** + * Creates a new instance of ConstantSpeculativeExecutionPolicy. + * @constructor + * @param {Number} delay The delay between each speculative execution. + * @param {Number} maxSpeculativeExecutions The amount of speculative executions that should be scheduled after the + * initial execution. Must be strictly positive. + */ + constructor(delay: number, maxSpeculativeExecutions: number) { + super(); + if (!(delay >= 0)) { + throw new errors.ArgumentError('delay must be a positive number or zero'); + } + if (!(maxSpeculativeExecutions > 0)) { + throw new errors.ArgumentError('maxSpeculativeExecutions must be a positive number'); + } + this._delay = delay; + this._maxSpeculativeExecutions = maxSpeculativeExecutions; + } + + newPlan(keyspace: string, queryInfo: string | Array): { nextExecution: () => number; }{ + let executions = 0; + const self = this; + return { + nextExecution: function () { + if (executions++ < self._maxSpeculativeExecutions) { + return self._delay; + } + return -1; + } + }; + } + /** + * Gets an associative array containing the policy options. + */ + getOptions(): Map { + return new Map([ + ['delay', this._delay], + ['maxSpeculativeExecutions', this._maxSpeculativeExecutions] + ]); + } +} + + +export { + ConstantSpeculativeExecutionPolicy, NoSpeculativeExecutionPolicy, + SpeculativeExecutionPolicy +}; + +export default{ + NoSpeculativeExecutionPolicy, + SpeculativeExecutionPolicy, + ConstantSpeculativeExecutionPolicy +}; \ No newline at end of file diff --git a/lib/policies/timestamp-generation.js b/lib/policies/timestamp-generation.js deleted file mode 100644 index dbae075e5..000000000 --- a/lib/policies/timestamp-generation.js +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const util = require('util'); -const { Long } = require('../types'); -const errors = require('../errors'); - -/** @module policies/timestampGeneration */ - -/** - * Defines the maximum date in milliseconds that can be represented in microseconds using Number ((2 ^ 53) / 1000) - * @const - * @private - */ -const _maxSafeNumberDate = 9007199254740; - -/** - * A long representing the value 1000 - * @const - * @private - */ -const _longOneThousand = Long.fromInt(1000); - -/** - * Creates a new instance of {@link TimestampGenerator}. - * @classdesc - * Generates client-side, microsecond-precision query timestamps. - *

- * Given that Cassandra uses those timestamps to resolve conflicts, implementations should generate - * monotonically increasing timestamps for successive invocations of {@link TimestampGenerator.next()}. - *

- * @constructor - */ -function TimestampGenerator() { - -} - -/** - * Returns the next timestamp. - *

- * Implementors should enforce increasing monotonicity of timestamps, that is, - * a timestamp returned should always be strictly greater that any previously returned - * timestamp. - *

- *

- * Implementors should strive to achieve microsecond precision in the best possible way, - * which is usually largely dependent on the underlying operating system's capabilities. - *

- * @param {Client} client The {@link Client} instance to generate timestamps to. - * @returns {Long|Number|null} the next timestamp (in microseconds). If it's equals to null, it won't be - * sent by the driver, letting the server to generate the timestamp. - * @abstract - */ -TimestampGenerator.prototype.next = function (client) { - throw new Error('next() must be implemented'); -}; - -/** - * A timestamp generator that guarantees monotonically increasing timestamps and logs warnings when timestamps - * drift in the future. - *

- * {@link Date} has millisecond precision and client timestamps require microsecond precision. This generator - * keeps track of the last generated timestamp, and if the current time is within the same millisecond as the last, - * it fills the microsecond portion of the new timestamp with the value of an incrementing counter. - *

- * @param {Number} [warningThreshold] Determines how far in the future timestamps are allowed to drift before a - * warning is logged, expressed in milliseconds. Default: 1000. - * @param {Number} [minLogInterval] In case of multiple log events, it determines the time separation between log - * events, expressed in milliseconds. Use 0 to disable. Default: 1000. - * @extends {TimestampGenerator} - * @constructor - */ -function MonotonicTimestampGenerator(warningThreshold, minLogInterval) { - if (warningThreshold < 0) { - throw new errors.ArgumentError('warningThreshold can not be lower than 0'); - } - this._warningThreshold = warningThreshold || 1000; - this._minLogInterval = 1000; - if (typeof minLogInterval === 'number') { - // A value under 1 will disable logging - this._minLogInterval = minLogInterval; - } - this._micros = -1; - this._lastDate = 0; - this._lastLogDate = 0; -} - -util.inherits(MonotonicTimestampGenerator, TimestampGenerator); - -/** - * Returns the current time in milliseconds since UNIX epoch - * @returns {Number} - */ -MonotonicTimestampGenerator.prototype.getDate = function () { - return Date.now(); -}; - -MonotonicTimestampGenerator.prototype.next = function (client) { - let date = this.getDate(); - let drifted = 0; - if (date > this._lastDate) { - this._micros = 0; - this._lastDate = date; - return this._generateMicroseconds(); - } - - if (date < this._lastDate) { - drifted = this._lastDate - date; - date = this._lastDate; - } - if (++this._micros === 1000) { - this._micros = 0; - if (date === this._lastDate) { - // Move date 1 millisecond into the future - date++; - drifted++; - } - } - const lastDate = this._lastDate; - this._lastDate = date; - const result = this._generateMicroseconds(); - if (drifted >= this._warningThreshold) { - // Avoid logging an unbounded amount of times within a clock-skew event or during an interval when more than 1 - // query is being issued by microsecond - const currentLogDate = Date.now(); - if (this._minLogInterval > 0 && this._lastLogDate + this._minLogInterval <= currentLogDate){ - const message = util.format( - 'Timestamp generated using current date was %d milliseconds behind the last generated timestamp (which ' + - 'millisecond portion was %d), the returned value (%s) is being artificially incremented to guarantee ' + - 'monotonicity.', - drifted, lastDate, result); - this._lastLogDate = currentLogDate; - client.log('warning', message); - } - } - return result; -}; - -/** - * @private - * @returns {Number|Long} - */ -MonotonicTimestampGenerator.prototype._generateMicroseconds = function () { - if (this._lastDate < _maxSafeNumberDate) { - // We are safe until Jun 06 2255, its faster to perform this operations on Number than on Long - // We hope to have native int64 by then :) - return this._lastDate * 1000 + this._micros; - } - return Long - .fromNumber(this._lastDate) - .multiply(_longOneThousand) - .add(Long.fromInt(this._micros)); -}; - -exports.TimestampGenerator = TimestampGenerator; -exports.MonotonicTimestampGenerator = MonotonicTimestampGenerator; \ No newline at end of file diff --git a/lib/policies/timestamp-generation.ts b/lib/policies/timestamp-generation.ts new file mode 100644 index 000000000..5b93b32fc --- /dev/null +++ b/lib/policies/timestamp-generation.ts @@ -0,0 +1,190 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import util from "util"; +import type Client from "../client"; +import errors from "../errors"; +import { Long } from "../types/index"; + + +/** @module policies/timestampGeneration */ + +/** + * Defines the maximum date in milliseconds that can be represented in microseconds using Number ((2 ^ 53) / 1000) + * @const + * @private + */ +const _maxSafeNumberDate = 9007199254740; + +/** + * A long representing the value 1000 + * @const + * @private + */ +const _longOneThousand = Long.fromInt(1000); + +/** + * Creates a new instance of {@link TimestampGenerator}. + * @classdesc + * Generates client-side, microsecond-precision query timestamps. + *

+ * Given that Cassandra uses those timestamps to resolve conflicts, implementations should generate + * monotonically increasing timestamps for successive invocations of {@link TimestampGenerator.next()}. + *

+ * @constructor + */ +class TimestampGenerator { + constructor() { + } + /** + * Returns the next timestamp. + *

+ * Implementors should enforce increasing monotonicity of timestamps, that is, + * a timestamp returned should always be strictly greater that any previously returned + * timestamp. + *

+ *

+ * Implementors should strive to achieve microsecond precision in the best possible way, + * which is usually largely dependent on the underlying operating system's capabilities. + *

+ * @param {Client} client The {@link Client} instance to generate timestamps to. + * @returns {Long|Number|null} the next timestamp (in microseconds). If it's equals to null, it won't be + * sent by the driver, letting the server to generate the timestamp. + * @abstract + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + next(client: Client): Long | number | null { + throw new Error('next() must be implemented'); + } +} + + +/** + * A timestamp generator that guarantees monotonically increasing timestamps and logs warnings when timestamps + * drift in the future. + *

+ * {@link Date} has millisecond precision and client timestamps require microsecond precision. This generator + * keeps track of the last generated timestamp, and if the current time is within the same millisecond as the last, + * it fills the microsecond portion of the new timestamp with the value of an incrementing counter. + *

+ * @extends {TimestampGenerator} + */ +class MonotonicTimestampGenerator extends TimestampGenerator { + private _warningThreshold: number; + private _minLogInterval: number; + private _micros: number; + private _lastDate: number; + private _lastLogDate: number; + /** + * A timestamp generator that guarantees monotonically increasing timestamps and logs warnings when timestamps + * drift in the future. + *

+ * {@link Date} has millisecond precision and client timestamps require microsecond precision. This generator + * keeps track of the last generated timestamp, and if the current time is within the same millisecond as the last, + * it fills the microsecond portion of the new timestamp with the value of an incrementing counter. + *

+ * @param {Number} [warningThreshold] Determines how far in the future timestamps are allowed to drift before a + * warning is logged, expressed in milliseconds. Default: 1000. + * @param {Number} [minLogInterval] In case of multiple log events, it determines the time separation between log + * events, expressed in milliseconds. Use 0 to disable. Default: 1000. + * @constructor + */ + constructor(warningThreshold?: number, minLogInterval?: number) { + super(); + if (warningThreshold < 0) { + throw new errors.ArgumentError('warningThreshold can not be lower than 0'); + } + this._warningThreshold = warningThreshold || 1000; + this._minLogInterval = 1000; + if (typeof minLogInterval === 'number') { + // A value under 1 will disable logging + this._minLogInterval = minLogInterval; + } + this._micros = -1; + this._lastDate = 0; + this._lastLogDate = 0; + } + /** + * Returns the current time in milliseconds since UNIX epoch + * @returns {Number} + */ + getDate(): number { + return Date.now(); + } + next(client: Client): Long | number | null{ + let date = this.getDate(); + let drifted = 0; + if (date > this._lastDate) { + this._micros = 0; + this._lastDate = date; + return this._generateMicroseconds(); + } + + if (date < this._lastDate) { + drifted = this._lastDate - date; + date = this._lastDate; + } + if (++this._micros === 1000) { + this._micros = 0; + if (date === this._lastDate) { + // Move date 1 millisecond into the future + date++; + drifted++; + } + } + const lastDate = this._lastDate; + this._lastDate = date; + const result = this._generateMicroseconds(); + if (drifted >= this._warningThreshold) { + // Avoid logging an unbounded amount of times within a clock-skew event or during an interval when more than 1 + // query is being issued by microsecond + const currentLogDate = Date.now(); + if (this._minLogInterval > 0 && this._lastLogDate + this._minLogInterval <= currentLogDate) { + const message = util.format( + 'Timestamp generated using current date was %d milliseconds behind the last generated timestamp (which ' + + 'millisecond portion was %d), the returned value (%s) is being artificially incremented to guarantee ' + + 'monotonicity.', + drifted, lastDate, result); + this._lastLogDate = currentLogDate; + client.log('warning', message); + } + } + return result; + } + /** + * @private + * @returns {Number|Long} + */ + private _generateMicroseconds(): number | Long { + if (this._lastDate < _maxSafeNumberDate) { + // We are safe until Jun 06 2255, its faster to perform this operations on Number than on Long + // We hope to have native int64 by then :) + return this._lastDate * 1000 + this._micros; + } + return Long + .fromNumber(this._lastDate) + .multiply(_longOneThousand) + .add(Long.fromInt(this._micros)); + } +} + +export { + MonotonicTimestampGenerator, TimestampGenerator +}; + +export default{ + TimestampGenerator, + MonotonicTimestampGenerator +}; \ No newline at end of file diff --git a/lib/prepare-handler.js b/lib/prepare-handler.ts similarity index 81% rename from lib/prepare-handler.js rename to lib/prepare-handler.ts index 0edaa60de..33e481c61 100644 --- a/lib/prepare-handler.js +++ b/lib/prepare-handler.ts @@ -13,28 +13,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import type Client from "./client"; +import Connection from "./connection"; +import errors from "./errors"; +import { type ProfileManager } from "./execution-profile"; +import type { Host } from "./host"; +import { type PreparedQueryInfo } from "./metadata"; +import { LoadBalancingPolicy } from "./policies/load-balancing"; +import promiseUtils from "./promise-utils"; +import types from "./types/index"; +import utils from "./utils"; + -const errors = require('./errors'); -const utils = require('./utils'); -const types = require('./types'); -const promiseUtils = require('./promise-utils'); /** * Encapsulates the logic for dealing with the different prepare request and response flows, including failover when * trying to prepare a query. + * @ignore @internal */ class PrepareHandler { + private _client: Client; + private _loadBalancing: LoadBalancingPolicy; + logEmitter: any; + log: (type: string, info: string, furtherInfo?: any, options?: any) => void; /** * Creates a new instance of PrepareHandler * @param {Client} client * @param {LoadBalancingPolicy} loadBalancing */ - constructor(client, loadBalancing) { + constructor(client: Client, loadBalancing: LoadBalancingPolicy) { this._client = client; this._loadBalancing = loadBalancing; this.logEmitter = client.options.logEmitter; - this.log = utils.log; + this.log = utils.log.bind(this); } /** @@ -47,7 +58,7 @@ class PrepareHandler { * @returns {Promise<{queryId, meta}>} * @static */ - static async getPrepared(client, loadBalancing, query, keyspace) { + static async getPrepared(client: Client, loadBalancing: LoadBalancingPolicy, query: string, keyspace: string): Promise { const info = client.metadata.getPreparedInfo(keyspace, query); if (info.queryId) { return info; @@ -69,7 +80,7 @@ class PrepareHandler { * @param {String} keyspace * @static */ - static async getPreparedMultiple(client, loadBalancing, queries, keyspace) { + static async getPreparedMultiple(client: Client, loadBalancing: LoadBalancingPolicy, queries: Array, keyspace: string) { const result = []; for (const item of queries) { @@ -98,7 +109,7 @@ class PrepareHandler { * @param {String} keyspace * @returns {Promise<{queryId, meta}>} */ - async _prepare(info, query, keyspace) { + async _prepare(info: PreparedQueryInfo, query: string, keyspace: string): Promise { info.preparing = true; let iterator; @@ -123,7 +134,7 @@ class PrepareHandler { * @returns {Promise<{queryId, meta}>} * @private */ - async _prepareWithQueryPlan(info, iterator, query, keyspace) { + async _prepareWithQueryPlan(info: PreparedQueryInfo, iterator: Iterator, query: string, keyspace: string): Promise { const triedHosts = {}; while (true) { @@ -168,7 +179,7 @@ class PrepareHandler { * @param {Object} [triedHosts] * @return {Host|null} */ - static getNextHost(iterator, profileManager, triedHosts) { + static getNextHost(iterator: Iterator, profileManager: ProfileManager, triedHosts?: object): Host | null { let host; // Get a host that is UP in a sync loop while (true) { @@ -203,7 +214,7 @@ class PrepareHandler { * @param {Host} host * @param {Array} allPrepared */ - static async prepareAllQueries(host, allPrepared) { + static async prepareAllQueries(host: Host, allPrepared: Array) { const anyKeyspaceQueries = []; const queriesByKeyspace = new Map(); @@ -238,7 +249,7 @@ class PrepareHandler { * @returns {Promise} * @private */ - static async _borrowAndPrepare(host, keyspace, queries) { + static async _borrowAndPrepare(host: Host, keyspace: string, queries: Array): Promise { if (queries.length === 0) { return; } @@ -261,7 +272,7 @@ class PrepareHandler { * @throws {Error} For socket errors. * @private */ - static async _borrowWithKeyspace(host, keyspace) { + static async _borrowWithKeyspace(host: Host, keyspace: string): Promise { const connection = host.borrowConnection(); if (keyspace && connection.keyspace !== keyspace) { @@ -278,7 +289,7 @@ class PrepareHandler { * @param {String} keyspace * @private */ - _prepareOnAllHosts(iterator, query, keyspace) { + _prepareOnAllHosts(iterator: Iterator, query: string, keyspace: string) { const queries = [ query ]; let h; const hosts = []; @@ -294,4 +305,4 @@ class PrepareHandler { } } -module.exports = PrepareHandler; \ No newline at end of file +export default PrepareHandler; \ No newline at end of file diff --git a/lib/promise-utils.js b/lib/promise-utils.ts similarity index 81% rename from lib/promise-utils.js rename to lib/promise-utils.ts index 1fc568029..abe0e8681 100644 --- a/lib/promise-utils.js +++ b/lib/promise-utils.ts @@ -14,14 +14,19 @@ * limitations under the License. */ -'use strict'; +import type { EventEmitter } from "stream"; +import type { ExecutionOptions } from "./execution-options"; +import type { Host } from "./host"; +import type { LoadBalancingPolicy } from "./policies/load-balancing"; + + /** * Creates a non-clearable timer that resolves the promise once elapses. * @param {number} ms * @returns {Promise} */ -function delay(ms) { +function delay(ms: number): Promise { return new Promise(r => setTimeout(r, ms || 0)); } @@ -31,7 +36,7 @@ function delay(ms) { * @param {string} eventName * @returns {Promise} */ -function fromEvent(emitter, eventName) { +function fromEvent(emitter: EventEmitter, eventName: string): Promise { return new Promise((resolve, reject) => emitter.once(eventName, (err, result) => { if (err) { @@ -47,7 +52,7 @@ function fromEvent(emitter, eventName) { * @param {Function} fn * @returns {Promise} */ -function fromCallback(fn) { +function fromCallback(fn: Function): Promise { return new Promise((resolve, reject) => fn((err, result) => { if (err) { @@ -64,7 +69,7 @@ function fromCallback(fn) { * @param {Function} reject * @returns {Function} */ -function getCallback(resolve, reject) { +function getCallback(resolve: Function, reject: Function): Function { return function (err, result) { if (err) { reject(err); @@ -88,7 +93,7 @@ async function invokeSequentially(info, length, fn) { * @param {ExecutionOptions|null} executionOptions The information related to the execution of the request. * @returns {Promise} */ -function newQueryPlan(lbp, keyspace, executionOptions) { +function newQueryPlan(lbp: LoadBalancingPolicy, keyspace: string, executionOptions: ExecutionOptions | null): Promise> { return new Promise((resolve, reject) => { lbp.newQueryPlan(keyspace, executionOptions, (err, iterator) => { if (err) { @@ -108,7 +113,7 @@ function newQueryPlan(lbp, keyspace, executionOptions) { * @param {Function?} callback * @returns {Promise|undefined} */ -function optionalCallback(promise, callback) { +function optionalCallback(promise: Promise, callback: Function | null): Promise | undefined { if (!callback) { return promise; } @@ -123,7 +128,7 @@ function optionalCallback(promise, callback) { * @param {Function} fn * @returns {Promise} */ -function times(count, limit, fn) { +function times(count: number, limit: number, fn: Function): Promise { if (limit > count) { limit = count; } @@ -146,7 +151,7 @@ function times(count, limit, fn) { * @param {Promise} promise * @returns {undefined} */ -function toBackground(promise) { +function toBackground(promise: Promise): undefined { promise.catch(() => {}); } @@ -156,7 +161,7 @@ function toBackground(promise) { * @param {Function?} callback * @returns {undefined} */ -function toCallback(promise, callback) { +function toCallback(promise: Promise, callback: Function | null): undefined { promise .then( result => process.nextTick(() => callback(null, result)), @@ -164,7 +169,7 @@ function toCallback(promise, callback) { err => process.nextTick(() => callback(err))); } -module.exports = { +export default { delay, fromCallback, fromEvent, diff --git a/lib/readers.js b/lib/readers.ts similarity index 89% rename from lib/readers.js rename to lib/readers.ts index 57f0e0f73..e62f5cfa3 100644 --- a/lib/readers.js +++ b/lib/readers.ts @@ -13,13 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -'use strict'; - -const util = require('util'); -const utils = require('./utils'); -const types = require('./types'); -const errors = require('./errors'); +import util from "util"; +import errors, { ResponseError } from "./errors"; +import types, { FrameHeader, InetAddress, Uuid } from "./types/index"; +import utils from "./utils"; /** * Information on the formatting of the returned rows @@ -41,6 +38,26 @@ const _unavailableMessage = 'Not enough replicas available for query at consiste const _readTimeoutMessage = 'Server timeout during read query at consistency %s (%s)'; const _readFailureMessage = 'Server failure during read query at consistency %s (%d responses were required but only %d replicas responded, %d failed)'; +type Meta = { + resultId?: Buffer; + flags?: number; + partitionKeys?: number[]; + pageState?: Buffer; + newResultId?: Buffer; + continuousPageIndex?: number; + lastContinuousPage?: boolean; + global_tables_spec?: boolean; + keyspace?: string; + table?: string; + columns?: Array<{ + ksname?: string; + tablename?: string; + name?: string; + type?: { code: number; info: any | null }; + }>; + columnsByName?: { [key: string]: number }; +}; + /** * Buffer forward reader of CQL binary frames * @param {FrameHeader} header @@ -48,6 +65,10 @@ const _readFailureMessage = 'Server failure during read query at consistency %s * @param {Number} [offset] */ class FrameReader { + header: FrameHeader; + opcode: any; + offset: number; + buf: Buffer; /** * Creates a new instance of the reader @@ -55,7 +76,7 @@ class FrameReader { * @param {Buffer} body * @param {Number} [offset] */ - constructor(header, body, offset) { + constructor(header: FrameHeader, body: Buffer, offset: number) { this.header = header; this.opcode = header.opcode; this.offset = offset || 0; @@ -76,7 +97,7 @@ class FrameReader { * @param {Number} [end] * @returns {Buffer} */ - slice(begin, end) { + slice(begin: number, end?: number): Buffer { if (typeof end === 'undefined') { end = this.buf.length; } @@ -99,7 +120,7 @@ class FrameReader { * @param length * @returns {Buffer} */ - read(length) { + read(length): Buffer { let end = this.buf.length; if (typeof length !== 'undefined' && this.offset + length < this.buf.length) { end = this.offset + length; @@ -120,14 +141,14 @@ class FrameReader { * Reads a BE Int and moves the offset * @returns {Number} */ - readInt() { + readInt(): number { const result = this.buf.readInt32BE(this.offset); this.offset += 4; return result; } /** @returns {Number} */ - readShort() { + readShort(): number { const result = this.buf.readUInt16BE(this.offset); this.offset += 2; return result; @@ -151,9 +172,10 @@ class FrameReader { * Checks that the new length to read is within the range of the buffer length. Throws a RangeError if not. * @param {Number} newLength */ - checkOffset(newLength) { + checkOffset(newLength: number) { if (this.offset + newLength > this.buf.length) { const err = new RangeError('Trying to access beyond buffer length'); + // @ts-ignore err.expectedLength = newLength; throw err; } @@ -163,7 +185,7 @@ class FrameReader { * Reads a protocol string list * @returns {Array} */ - readStringList() { + readStringList(): Array { const length = this.readShort(); const list = new Array(length); for (let i = 0; i < length; i++) { @@ -176,7 +198,7 @@ class FrameReader { * Reads the amount of bytes that the field has and returns them (slicing them). * @returns {Buffer} */ - readBytes() { + readBytes(): Buffer { const length = this.readInt(); if (length < 0) { return null; @@ -201,7 +223,7 @@ class FrameReader { * @param {Function} valueFn * @returns {Object} */ - readMap(length, keyFn, valueFn) { + readMap(length: number, keyFn: Function, valueFn: Function): object { if (length < 0) { return null; } @@ -216,7 +238,7 @@ class FrameReader { * Reads an associative array of strings as keys and string lists as values * @returns {Object} */ - readStringMultiMap() { + readStringMultiMap(): object { //A [short] n, followed by n pair where is a //[string] and is a [string[]]. const length = this.readShort(); @@ -234,10 +256,11 @@ class FrameReader { * Reads a data type definition * @returns {{code: Number, info: Object|null}} An array of 2 elements */ - readType() { + readType(): {code: number, info: any|null} { let i; - const type = { + const type : {code: number, info: any|null} = { code: this.readShort(), + // @ts-ignore type: null }; switch (type.code) { @@ -278,7 +301,7 @@ class FrameReader { * Reads an Ip address and port * @returns {{address: exports.InetAddress, port: Number}} */ - readInet() { + readInet(): { address: InetAddress; port: number; } { const length = this.readByte(); const address = this.read(length); return { address: new types.InetAddress(address), port: this.readInt() }; @@ -288,7 +311,7 @@ class FrameReader { * Reads an Ip address * @returns {InetAddress} */ - readInetAddress() { + readInetAddress(): InetAddress { const length = this.readByte(); return new types.InetAddress(this.read(length)); } @@ -298,11 +321,11 @@ class FrameReader { * @returns {{traceId: Uuid, warnings: Array, customPayload}} * @throws {RangeError} */ - readFlagsInfo() { + readFlagsInfo(): { traceId?: Uuid; warnings?: Array; customPayload?; } { if (this.header.flags === 0) { return utils.emptyObject; } - const result = {}; + const result : { traceId?: Uuid; warnings?: Array; customPayload?; } = {}; if (this.header.flags & types.frameFlags.tracing) { this.checkOffset(16); result.traceId = new types.Uuid(utils.copyBuffer(this.read(16))); @@ -323,11 +346,11 @@ class FrameReader { * @returns {Object} * @throws {RangeError} */ - readMetadata(kind) { + readMetadata(kind: number): Meta { let i; //Determines if its a prepared metadata const isPrepared = (kind === types.resultKind.prepared); - const meta = {}; + const meta : Meta = {}; if (types.protocolVersion.supportsResultMetadataId(this.header.version) && isPrepared) { meta.resultId = utils.copyBuffer(this.readShortBytes()); } @@ -363,7 +386,9 @@ class FrameReader { meta.columnsByName = {}; } for (i = 0; i < columnLength; i++) { - const col = {}; + const col: { + ksname?: string; tablename?: string; + name?: string; type?: { code: number; info: any | null };} = {}; if (!meta.global_tables_spec) { col.ksname = this.readString(); col.tablename = this.readString(); @@ -383,7 +408,7 @@ class FrameReader { * @throws {RangeError} * @returns {ResponseError} */ - readError() { + readError(): ResponseError { const code = this.readInt(); const message = this.readString(); const err = new errors.ResponseError(code, message); @@ -472,9 +497,9 @@ class FrameReader { /** * Reads an event from Cassandra and returns the detail - * @returns {{eventType: String, inet: {address: Buffer, port: Number}}, *} + * @returns {{eventType: String, inet: {address: InetAddress, port: Number}}, *} */ - readEvent() { + readEvent(): { eventType: string; inet?: { address: InetAddress; port: number; }; added?: boolean; up?: boolean } { const eventType = this.readString(); switch (eventType) { case types.protocolEvents.topologyChange: @@ -539,4 +564,5 @@ class FrameReader { } } -module.exports = { FrameReader }; +export { FrameReader }; +export default { FrameReader }; \ No newline at end of file diff --git a/lib/request-execution.js b/lib/request-execution.ts similarity index 92% rename from lib/request-execution.js rename to lib/request-execution.ts index d663fee89..ae72b1c1f 100644 --- a/lib/request-execution.js +++ b/lib/request-execution.ts @@ -13,14 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import Connection from "./connection"; +import errors, { ResponseError } from "./errors"; +import type { Host } from "./host"; +import type { ClientMetrics } from "./metrics"; +import type OperationState from "./operation-state"; +import retry, { type DecisionInfo } from "./policies/retry"; +import promiseUtils from "./promise-utils"; +import type RequestHandler from "./request-handler"; +import requests from "./requests"; +import types, { consistencies } from "./types/index"; +import utils from "./utils"; -const errors = require('./errors'); -const requests = require('./requests'); -const retry = require('./policies/retry'); -const types = require('./types'); -const utils = require('./utils'); -const promiseUtils = require('./promise-utils'); const retryOnCurrentHost = Object.freeze({ decision: retry.RetryPolicy.retryDecision.retry, @@ -74,13 +78,21 @@ const metricsRetryHandlers = new Map([ ]); class RequestExecution { + private _parent: RequestHandler; + private _operation: OperationState; + private _host: Host; + private _connection: Connection; + private _cancelled: boolean; + private _startTime: [number, number]; + private _retryCount: number; + private _request: any; /** * Encapsulates a single flow of execution against a coordinator, handling individual retries and failover. * @param {RequestHandler!} parent * @param {Host!} host * @param {Connection!} connection */ - constructor(parent, host, connection) { + constructor(parent: RequestHandler, host: Host, connection: Connection) { this._parent = parent; /** @type {OperationState} */ this._operation = null; @@ -108,7 +120,7 @@ class RequestExecution { * Borrows the next connection available using the query plan and sends the request. * @returns {Promise} */ - async restart() { + async restart(): Promise { try { const { host, connection } = this._parent.getNextConnection(); @@ -213,7 +225,7 @@ class RequestExecution { } } - _getResultSet(response, agreement) { + _getResultSet(response, agreement?) { const rs = new types.ResultSet(response, this._host.address, this._parent.triedHosts, this._parent.speculativeExecutions, this._request.consistency, agreement === undefined || agreement); @@ -232,7 +244,7 @@ class RequestExecution { * @param {Array} latency * @private */ - static _invokeMetricsHandler(errorCode, metrics, err, latency) { + static _invokeMetricsHandler(errorCode: number, metrics: ClientMetrics, err: Error, latency: Array) { const handler = metricsHandlers.get(errorCode); if (handler !== undefined) { handler(metrics, err, latency); @@ -250,7 +262,7 @@ class RequestExecution { * @param {Error} err * @private */ - static _invokeMetricsHandlerForRetry(errorCode, metrics, err) { + static _invokeMetricsHandlerForRetry(errorCode: number, metrics: ClientMetrics, err: Error) { const handler = metricsRetryHandlers.get(errorCode); if (handler !== undefined) { @@ -320,10 +332,10 @@ class RequestExecution { /** * Gets a decision whether or not to retry based on the error information. * @param {Number} errorCode - * @param {Error} err + * @param {ResponseError} err * @returns {{decision, useCurrentHost, consistency}} */ - _getDecision(errorCode, err) { + _getDecision(errorCode: number, err: ResponseError): DecisionInfo { const operationInfo = { query: this._request && this._request.query, executionOptions: this._parent.executionOptions, @@ -403,7 +415,7 @@ class RequestExecution { * @param {Object} [meta] * @private */ - _retry(consistency, useCurrentHost, meta) { + _retry(consistency: consistencies, useCurrentHost: boolean, meta?: object) { if (this._cancelled) { // No point in retrying return; @@ -432,7 +444,7 @@ class RequestExecution { // When there was a socket error, the connection provided was already removed from the pool earlier. try { this._connection = this._host.borrowConnection(this._connection); - } catch (err) { + } catch (_err) { // All connections are busy (`BusyConnectionError`) or there isn't a ready connection in the pool (`Error`) // The retry policy declared the intention to retry on the current host but its not available anymore. // Use the next host @@ -452,7 +464,7 @@ class RequestExecution { * @param {Buffer} queryId * @private */ - _prepareAndRetry(queryId) { + _prepareAndRetry(queryId: Buffer) { const connection = this._connection; this._parent.log('info', @@ -494,4 +506,4 @@ class RequestExecution { } } -module.exports = RequestExecution; +export default RequestExecution; \ No newline at end of file diff --git a/lib/request-handler.js b/lib/request-handler.ts similarity index 82% rename from lib/request-handler.js rename to lib/request-handler.ts index c1c7b6daf..1732c03be 100644 --- a/lib/request-handler.js +++ b/lib/request-handler.ts @@ -13,31 +13,50 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const util = require('util'); +import util from "util"; +import type Client from "./client"; +import errors, { NoHostAvailableError } from "./errors"; +import type { ExecutionOptions } from "./execution-options"; +import { Host } from "./host"; +import promiseUtils from "./promise-utils"; +import RequestExecution from "./request-execution"; +import { Request } from "./requests"; +import types, { ResultSet } from "./types/index"; +import utils from "./utils"; + -const errors = require('./errors'); -const types = require('./types'); -const utils = require('./utils'); -const RequestExecution = require('./request-execution'); -const promiseUtils = require('./promise-utils'); /** * Handles a BATCH, QUERY and EXECUTE request to the server, dealing with host fail-over and retries on error */ class RequestHandler { + client: Client; + _speculativeExecutionPlan: { nextExecution: () => number; }; + logEmitter: any; + log: (type: string, info: string, furtherInfo?: any, options?: any) => void; + request: Request; + executionOptions: ExecutionOptions; + stackContainer: object; + triedHosts: Record; + speculativeExecutions: number; + _hostIterator: Iterator; + _resolveCallback: Function; + _rejectCallback: Function; + _newExecutionTimeout: NodeJS.Timeout; + _executions: any[]; + /** * Creates a new instance of RequestHandler. * @param {Request} request * @param {ExecutionOptions} execOptions * @param {Client} client Client instance used to retrieve and set the keyspace. */ - constructor(request, execOptions, client) { + constructor(request: Request, execOptions: ExecutionOptions, client: Client) { this.client = client; this._speculativeExecutionPlan = client.options.policies.speculativeExecution.newPlan( - client.keyspace, request.query || request.queries); + client.keyspace, request["query"] || request["queries"]); this.logEmitter = client.options.logEmitter; - this.log = utils.log; + this.log = utils.log.bind(this); this.request = request; this.executionOptions = execOptions; this.stackContainer = null; @@ -59,7 +78,7 @@ class RequestHandler { * @param {Client} client Client instance used to retrieve and set the keyspace. * @returns {Promise} */ - static send(request, execOptions, client) { + static send(request: Request, execOptions: ExecutionOptions, client: Client): Promise { const instance = new RequestHandler(request, execOptions, client); return instance.send(); } @@ -69,7 +88,7 @@ class RequestHandler { * @returns {{host, connection}} * @throws {NoHostAvailableError} */ - getNextConnection() { + getNextConnection(): { host; connection; } { let host; let connection; const iterator = this._hostIterator; @@ -111,7 +130,7 @@ class RequestHandler { * Gets an available connection and sends the request * @returns {Promise} */ - send() { + send(): Promise { if (this.executionOptions.getCaptureStackTrace()) { Error.captureStackTrace(this.stackContainer = {}); } @@ -147,7 +166,7 @@ class RequestHandler { * @returns {Promise} * @private */ - async _startNewExecution(isSpecExec) { + async _startNewExecution(isSpecExec?: boolean): Promise { if (isSpecExec) { this.client.metrics.onSpeculativeExecution(); } @@ -192,7 +211,9 @@ class RequestHandler { * @param {Host!} host * @private */ - _scheduleSpeculativeExecution(host) { + _scheduleSpeculativeExecution(host: Host) { + // @ts-ignore + //TODO: none of built-in policies or the interface use the host argument const delay = this._speculativeExecutionPlan.nextExecution(host); if (typeof delay !== 'number' || delay < 0) { return; @@ -215,7 +236,7 @@ class RequestHandler { * @param {Client} client * @returns {Promise} */ - static setKeyspace(client) { + static setKeyspace(client: Client): Promise { let connection; for (const host of client.hosts.values()) { @@ -236,7 +257,7 @@ class RequestHandler { * @param {Error} err * @param {ResultSet} [result] */ - setCompleted(err, result) { + setCompleted(err: Error, result?: ResultSet) { if (this._newExecutionTimeout !== null) { clearTimeout(this._newExecutionTimeout); } @@ -248,7 +269,7 @@ class RequestHandler { if (err) { if (this.executionOptions.getCaptureStackTrace()) { - utils.fixStack(this.stackContainer.stack, err); + utils.fixStack(this.stackContainer["stack"], err); } // Reject the promise @@ -278,7 +299,7 @@ class RequestHandler { * @param {NoHostAvailableError} err * @param {RequestExecution|null} execution */ - handleNoHostAvailable(err, execution) { + handleNoHostAvailable(err: NoHostAvailableError, execution: RequestExecution | null) { if (execution !== null) { // Remove the execution const index = this._executions.indexOf(execution); @@ -296,7 +317,7 @@ class RequestHandler { * Gets a long lived closure that can fetch the next page. * @returns {Function} */ - getNextPageHandler() { + getNextPageHandler(): Function { const request = this.request; const execOptions = this.executionOptions; const client = this.client; @@ -308,4 +329,4 @@ class RequestHandler { } } -module.exports = RequestHandler; +export default RequestHandler; diff --git a/lib/requests.js b/lib/requests.ts similarity index 87% rename from lib/requests.js rename to lib/requests.ts index 443347ee3..a4680d780 100644 --- a/lib/requests.js +++ b/lib/requests.ts @@ -13,14 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const util = require('util'); +import util from "util"; +import packageInfo from "../package.json"; +import Encoder from "./encoder"; +import { ExecutionOptions } from "./execution-options"; +import types from "./types/index"; +import utils from "./utils"; +import { FrameWriter } from "./writers"; -const { FrameWriter } = require('./writers'); -const types = require('./types'); -const utils = require('./utils'); -const { ExecutionOptions } = require('./execution-options'); -const packageInfo = require('../package.json'); /** * Options for the execution of the query / prepared statement @@ -68,6 +68,7 @@ const batchType = { * Abstract class Request */ class Request { + length: number; constructor() { this.length = 0; } @@ -79,7 +80,8 @@ class Request { * @throws {TypeError} * @returns {Buffer} */ - write(encoder, streamId) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + write(encoder: Encoder, streamId: number): Buffer { throw new Error('Method must be implemented'); } @@ -87,8 +89,8 @@ class Request { * Creates a new instance using the same constructor as the current instance, copying the properties. * @return {Request} */ - clone() { - const newRequest = new (this.constructor)(); + clone(): Request { + const newRequest = new (this.constructor as {new() : Request})(); const keysArray = Object.keys(this); for (let i = 0; i < keysArray.length; i++) { const key = keysArray[i]; @@ -106,6 +108,13 @@ class Request { * @param options */ class ExecuteRequest extends Request { + query: string; + queryId: any; + params: any; + meta: any; + options: ExecutionOptions; + consistency: any; + namedParameters: boolean; /** * @param {String} query * @param queryId @@ -113,7 +122,7 @@ class ExecuteRequest extends Request { * @param {ExecutionOptions} execOptions * @param meta */ - constructor(query, queryId, params, execOptions, meta) { + constructor(query: string, queryId, params, execOptions: ExecutionOptions, meta) { super(); this.query = query; @@ -164,7 +173,7 @@ class ExecuteRequest extends Request { * @param {Encoder} encoder * @param {Boolean} [isQuery] True if query, otherwise assumed to be execute request. */ - writeQueryParameters(frameWriter, encoder, isQuery) { + writeQueryParameters(frameWriter: FrameWriter, encoder: Encoder, isQuery?: boolean) { //v1: .... //v2: [...][][][] //v3: [[name_1]...[name_n]][][][][] @@ -237,13 +246,14 @@ class ExecuteRequest extends Request { } class QueryRequest extends ExecuteRequest { + hints: any; /** * @param {String} query * @param params * @param {ExecutionOptions} [execOptions] * @param {Boolean} [namedParameters] */ - constructor(query, params, execOptions, namedParameters) { + constructor(query: string, params?, execOptions?: ExecutionOptions, namedParameters?: boolean) { super(query, null, params, execOptions, null); this.hints = this.options.getHints() || utils.emptyArray; this.namedParameters = namedParameters; @@ -284,6 +294,8 @@ class QueryRequest extends ExecuteRequest { } class PrepareRequest extends Request { + query: any; + keyspace: any; constructor(query, keyspace) { super(); this.query = query; @@ -305,6 +317,7 @@ class PrepareRequest extends Request { } class StartupRequest extends Request { + options: { cqlVersion?: any; noCompact?: any; clientId?: any; applicationName?: any; applicationVersion?: any; }; /** * Creates a new instance of {@link StartupRequest}. @@ -315,7 +328,7 @@ class StartupRequest extends Request { * @param [options.applicationName] * @param [options.applicationVersion] */ - constructor(options) { + constructor(options: { cqlVersion?: any; noCompact?: any; clientId?: any; applicationName?: any; applicationVersion?: any; }) { super(); this.options = options || {}; } @@ -351,6 +364,7 @@ class StartupRequest extends Request { } class RegisterRequest extends Request { + events: any; constructor(events) { super(); this.events = events; @@ -368,6 +382,7 @@ class RegisterRequest extends Request { * @param {Buffer} token */ class AuthResponseRequest extends Request { + token: any; constructor(token) { super(); this.token = token; @@ -384,6 +399,8 @@ class AuthResponseRequest extends Request { * Represents a protocol v1 CREDENTIALS request message */ class CredentialsRequest extends Request { + username: any; + password: any; constructor(username, password) { super(); this.username = username; @@ -398,12 +415,16 @@ class CredentialsRequest extends Request { } class BatchRequest extends Request { + queries: Array<{query, params, info?, queryId?, meta?}>; + options: ExecutionOptions; + hints: readonly any[]; + type: number; /** * Creates a new instance of BatchRequest. * @param {Array.<{query, params, [info]}>} queries Array of objects with the properties query and params * @param {ExecutionOptions} execOptions */ - constructor(queries, execOptions) { + constructor(queries: Array<{query, params, info?}>, execOptions: ExecutionOptions) { super(); this.queries = queries; this.options = execOptions; @@ -420,7 +441,7 @@ class BatchRequest extends Request { /** * Writes a batch request */ - write(encoder, streamId) { + write(encoder: Encoder, streamId) { //v2: ... //v3: ...[][] //dseV1+: similar to v3/v4, flags is an int instead of a byte @@ -500,20 +521,21 @@ class BatchRequest extends Request { } } -function CancelRequest(operationId) { - this.streamId = null; - this.operationId = operationId; +class CancelRequest { + streamId: number; + operationId: any; + constructor(operationId) { + this.streamId = null; + this.operationId = operationId; + } + write(encoder, streamId) { + const frameWriter = new FrameWriter(types.opcodes.cancel); + frameWriter.writeInt(1); + frameWriter.writeInt(this.operationId); + return frameWriter.write(encoder.protocolVersion, streamId); + } } -util.inherits(CancelRequest, Request); - -CancelRequest.prototype.write = function (encoder, streamId) { - const frameWriter = new FrameWriter(types.opcodes.cancel); - frameWriter.writeInt(1); - frameWriter.writeInt(this.operationId); - return frameWriter.write(encoder.protocolVersion, streamId); -}; - class OptionsRequest extends Request { write(encoder, streamId) { @@ -529,14 +551,25 @@ class OptionsRequest extends Request { const options = new OptionsRequest(); -exports.AuthResponseRequest = AuthResponseRequest; -exports.BatchRequest = BatchRequest; -exports.CancelRequest = CancelRequest; -exports.CredentialsRequest = CredentialsRequest; -exports.ExecuteRequest = ExecuteRequest; -exports.PrepareRequest = PrepareRequest; -exports.QueryRequest = QueryRequest; -exports.Request = Request; -exports.RegisterRequest = RegisterRequest; -exports.StartupRequest = StartupRequest; -exports.options = options; +export { + AuthResponseRequest, + BatchRequest, + CancelRequest, + CredentialsRequest, + ExecuteRequest, options, PrepareRequest, + QueryRequest, RegisterRequest, Request, StartupRequest +}; + +export default { + AuthResponseRequest, + BatchRequest, + CancelRequest, + CredentialsRequest, + ExecuteRequest, + PrepareRequest, + QueryRequest, + Request, + RegisterRequest, + StartupRequest, + options +}; diff --git a/lib/stream-id-stack.js b/lib/stream-id-stack.ts similarity index 90% rename from lib/stream-id-stack.js rename to lib/stream-id-stack.ts index 7947af79d..367deb4f5 100644 --- a/lib/stream-id-stack.js +++ b/lib/stream-id-stack.ts @@ -13,22 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import types from "./types/index"; -const types = require('./types'); /** * Group size * @type {number} */ -const groupSize = 128; +const groupSize: number = 128; /** * Number used to right shift ids to allocate them into groups * @const * @type {number} */ -const shiftToGroup = 7; +const shiftToGroup: number = 7; /** * Amount of groups that can be released per time @@ -36,21 +35,21 @@ const shiftToGroup = 7; * @const * @type {number} */ -const releasableSize = 4; +const releasableSize: number = 4; /** * 32K possible stream ids depending for protocol v3 and above * @const * @type {number} */ -const maxGroupsFor2Bytes = 256; +const maxGroupsFor2Bytes: number = 256; /** * Delay used to check if groups can be released * @const * @type {number} */ -const defaultReleaseDelay = 5000; +const defaultReleaseDelay: number = 5000; /** * Represents a queue of ids from 0 to maximum stream id supported by the protocol version. @@ -58,12 +57,19 @@ const defaultReleaseDelay = 5000; * {@link StreamIdStack#push()} */ class StreamIdStack { + currentGroup: any[]; + groupIndex: number; + groups: any[]; + releaseTimeout: NodeJS.Timeout; + inUse: number; + releaseDelay: number; + maxGroups: number; /** * Creates a new instance of StreamIdStack. * @param {number} version Protocol version * @constructor */ - constructor(version) { + constructor(version: number) { //Ecmascript Number is 64-bit double, it can be optimized by the engine into a 32-bit int, but nothing below that. //We try to allocate as few as possible in arrays of 128 this.currentGroup = generateGroup(0); @@ -83,7 +89,7 @@ class StreamIdStack { * Sets the protocol version * @param {Number} version */ - setVersion(version) { + setVersion(version: number) { //128 or 32K stream ids depending on the protocol version this.maxGroups = types.protocolVersion.uses2BytesStreamIds(version) ? maxGroupsFor2Bytes : 1; } @@ -93,7 +99,7 @@ class StreamIdStack { * Similar to {@link Array#pop()}. * @returns {Number} Returns an id or null */ - pop() { + pop(): number { let id = this.currentGroup.pop(); if (typeof id !== 'undefined') { this.inUse++; @@ -118,7 +124,7 @@ class StreamIdStack { * Similar to {@link Array#push()}. * @param {Number} id */ - push(id) { + push(id: number) { this.inUse--; const groupIndex = id >> shiftToGroup; const group = this.groups[groupIndex]; @@ -146,7 +152,7 @@ class StreamIdStack { * @returns {Number} Returns a new id or null if it's not possible to create a new group * @private */ - _tryCreateGroup() { + _tryCreateGroup(): number { if (this.groups.length === this.maxGroups) { //we can have an additional group return null; @@ -197,4 +203,4 @@ function generateGroup(initialValue) { return arr; } -module.exports = StreamIdStack; \ No newline at end of file +export default StreamIdStack; \ No newline at end of file diff --git a/lib/streams.js b/lib/streams.js deleted file mode 100644 index 158cc8b72..000000000 --- a/lib/streams.js +++ /dev/null @@ -1,582 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -const util = require('util'); -const { Transform, Writable } = require('stream'); - -const types = require('./types'); -const utils = require('./utils'); -const errors = require('./errors'); -const { FrameHeader } = types; -const { FrameReader } = require('./readers'); - -/** - * Transforms chunks, emits data objects {header, chunk} - * @param options Stream options - * @extends Transform - */ -function Protocol (options) { - Transform.call(this, options); - this.header = null; - this.bodyLength = 0; - this.clearHeaderChunks(); - this.version = 0; - this.headerSize = 0; -} - -util.inherits(Protocol, Transform); - -Protocol.prototype._transform = function (chunk, encoding, callback) { - let error = null; - try { - this.readItems(chunk); - } - catch (err) { - error = err; - } - callback(error); -}; - -/** - * Parses the chunk into frames (header and body). - * Emits (push) complete frames or frames with incomplete bodies. Following chunks containing the rest of the body will - * be emitted using the same frame. - * It buffers incomplete headers. - * @param {Buffer} chunk - */ -Protocol.prototype.readItems = function (chunk) { - if (!chunk || chunk.length === 0) { - return; - } - if (this.version === 0) { - //The server replies the first message with the max protocol version supported - this.version = FrameHeader.getProtocolVersion(chunk); - this.headerSize = FrameHeader.size(this.version); - } - let offset = 0; - let currentHeader = this.header; - this.header = null; - if (this.headerChunks.byteLength !== 0) { - //incomplete header was buffered try to read the header from the buffered chunks - this.headerChunks.parts.push(chunk); - if (this.headerChunks.byteLength + chunk.length < this.headerSize) { - this.headerChunks.byteLength += chunk.length; - return; - } - currentHeader = FrameHeader.fromBuffer(Buffer.concat(this.headerChunks.parts, this.headerSize)); - offset = this.headerSize - this.headerChunks.byteLength; - this.clearHeaderChunks(); - } - const items = []; - while (true) { - if (!currentHeader) { - if (this.headerSize > chunk.length - offset) { - if (chunk.length - offset <= 0) { - break; - } - //the header is incomplete, buffer it until the next chunk - const headerPart = chunk.slice(offset, chunk.length); - this.headerChunks.parts.push(headerPart); - this.headerChunks.byteLength = headerPart.length; - break; - } - //read header - currentHeader = FrameHeader.fromBuffer(chunk, offset); - offset += this.headerSize; - } - //parse body - const remaining = chunk.length - offset; - if (currentHeader.bodyLength <= remaining + this.bodyLength) { - items.push({ header: currentHeader, chunk: chunk, offset: offset, frameEnded: true }); - offset += currentHeader.bodyLength - this.bodyLength; - //reset the body length - this.bodyLength = 0; - } - else if (remaining >= 0) { - //the body is not fully contained in this chunk - //will continue later - this.header = currentHeader; - this.bodyLength += remaining; - if (remaining > 0) { - //emit if there is at least a byte to emit - items.push({ header: currentHeader, chunk: chunk, offset: offset, frameEnded: false }); - } - break; - } - currentHeader = null; - } - for (let i = 0; i < items.length; i++) { - this.push(items[i]); - } -}; - -Protocol.prototype.clearHeaderChunks = function () { - this.headerChunks = { byteLength: 0, parts: [] }; -}; - -/** - * A stream that gets reads header + body chunks and transforms them into header + (row | error) - * @param {Object} streamOptions Node.js Stream options - * @param {Encoder} encoder Encoder instance for the parser to use - * @extends Transform - */ -function Parser (streamOptions, encoder) { - Transform.call(this, streamOptions); - //frames that are streaming, indexed by id - this.frames = {}; - this.encoder = encoder; -} - -util.inherits(Parser, Transform); - -Parser.prototype._transform = function (item, encoding, callback) { - const frameInfo = this.frameState(item); - - let error = null; - try { - this.parseBody(frameInfo, item); - } - catch (err) { - error = err; - } - callback(error); - - if (item.frameEnded) { - if (frameInfo.cellBuffer) { - //Frame was being streamed but an error force it to buffer the result - this.push({ - header: frameInfo.header, - error: new errors.DriverInternalError('There was an problem while parsing streaming frame, opcode ' + - frameInfo.header.opcode) - }); - } - //all the parsing finished and it was streamed down - //emit an item that signals it - this.push({ header: frameInfo.header, frameEnded: true}); - } -}; - -/** - * @param frameInfo - * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item - */ -Parser.prototype.parseBody = function (frameInfo, item) { - frameInfo.isStreaming = frameInfo.byRow && item.header.opcode === types.opcodes.result; - if (!this.handleFrameBuffers(frameInfo, item)) { - // Frame isn't complete and we are not streaming the frame - return; - } - const reader = new FrameReader(item.header, item.chunk, item.offset); - // Check that flags have not been parsed yet for this frame - if (frameInfo.flagsInfo === undefined) { - const originalOffset = reader.offset; - try { - frameInfo.flagsInfo = reader.readFlagsInfo(); - } - catch (e) { - return this.handleParsingError(e, frameInfo, reader, originalOffset); - } - } - - //All the body for most operations is already buffered at this stage - //Except for RESULT - switch (item.header.opcode) { - case types.opcodes.result: - return this.parseResult(frameInfo, reader); - case types.opcodes.ready: - case types.opcodes.authSuccess: - return this.push({ header: frameInfo.header, ready: true }); - case types.opcodes.authChallenge: - return this.push({ header: frameInfo.header, authChallenge: true, token: reader.readBytes()}); - case types.opcodes.authenticate: - return this.push({ header: frameInfo.header, mustAuthenticate: true, authenticatorName: reader.readString()}); - case types.opcodes.error: - return this.push({ header: frameInfo.header, error: reader.readError()}); - case types.opcodes.supported: - return this.push({ header: frameInfo.header, supported: reader.readStringMultiMap()}); - case types.opcodes.event: - return this.push({ header: frameInfo.header, event: reader.readEvent()}); - default: - return this.push({ header: frameInfo.header, error: new Error('Received invalid opcode: ' + item.header.opcode) }); - } -}; - -/** - * Buffers if needed and returns true if it has all the necessary data to continue parsing the frame. - * @param frameInfo - * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item - * @returns {Boolean} - */ -Parser.prototype.handleFrameBuffers = function (frameInfo, item) { - if (!frameInfo.isStreaming) { - // Handle buffering for complete frame bodies - const currentLength = (frameInfo.bufferLength || 0) + item.chunk.length - item.offset; - if (currentLength < item.header.bodyLength) { - //buffer until the frame is completed - this.addFrameBuffer(frameInfo, item); - return false; - } - //We have received the full frame body - if (frameInfo.buffers) { - item.chunk = this.getFrameBuffer(frameInfo, item); - item.offset = 0; - } - return true; - } - if (frameInfo.cellBuffer) { - // Handle buffering for frame cells (row cells or metadata cells) - if (item.offset !== 0) { - throw new errors.DriverInternalError('Following chunks can not have an offset greater than zero'); - } - frameInfo.cellBuffer.parts.push(item.chunk); - if (!frameInfo.cellBuffer.expectedLength) { - //Its a buffer outside a row cell (metadata or other) - if (frameInfo.cellBuffer.parts.length !== 2) { - throw new errors.DriverInternalError('Buffer for streaming frame can not contain more than 1 item'); - } - item.chunk = Buffer.concat(frameInfo.cellBuffer.parts, frameInfo.cellBuffer.byteLength + item.chunk.length); - frameInfo.cellBuffer = null; - return true; - } - if (frameInfo.cellBuffer.expectedLength > frameInfo.cellBuffer.byteLength + item.chunk.length) { - //We still haven't got the cell data - frameInfo.cellBuffer.byteLength += item.chunk.length; - return false; - } - item.chunk = Buffer.concat(frameInfo.cellBuffer.parts, frameInfo.cellBuffer.byteLength + item.chunk.length); - frameInfo.cellBuffer = null; - } - return true; -}; - -/** - * Adds this chunk to the frame buffers. - * @param frameInfo - * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item - */ -Parser.prototype.addFrameBuffer = function (frameInfo, item) { - if (!frameInfo.buffers) { - frameInfo.buffers = [ item.chunk.slice(item.offset) ]; - frameInfo.bufferLength = item.chunk.length - item.offset; - return; - } - if (item.offset > 0) { - throw new errors.DriverInternalError('Following chunks can not have an offset greater than zero'); - } - frameInfo.buffers.push(item.chunk); - frameInfo.bufferLength += item.chunk.length; -}; - -/** - * Adds the last chunk and concatenates the frame buffers - * @param frameInfo - * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item - */ -Parser.prototype.getFrameBuffer = function (frameInfo, item) { - frameInfo.buffers.push(item.chunk); - const result = Buffer.concat(frameInfo.buffers, frameInfo.bodyLength); - frameInfo.buffers = null; - return result; -}; - -/** - * Tries to read the result in the body of a message - * @param frameInfo Frame information, header / metadata - * @param {FrameReader} reader - */ -Parser.prototype.parseResult = function (frameInfo, reader) { - let result; - // As we might be streaming and the frame buffer might not be complete, - // read the metadata and different types of result values in a try-catch. - // Store the reader position - const originalOffset = reader.offset; - try { - if (!frameInfo.meta) { - frameInfo.kind = reader.readInt(); - // Spec 4.2.5 - switch (frameInfo.kind) { - case types.resultKind.voidResult: - result = { header: frameInfo.header, flags: frameInfo.flagsInfo }; - break; - case types.resultKind.rows: - // Parse the rows metadata, the rest of the response is going to be parsed afterwards - frameInfo.meta = reader.readMetadata(frameInfo.kind); - break; - case types.resultKind.setKeyspace: - result = { header: frameInfo.header, keyspaceSet: reader.readString(), flags: frameInfo.flagsInfo }; - break; - case types.resultKind.prepared: - { - const preparedId = utils.copyBuffer(reader.readShortBytes()); - frameInfo.meta = reader.readMetadata(frameInfo.kind); - result = { header: frameInfo.header, id: preparedId, meta: frameInfo.meta, flags: frameInfo.flagsInfo }; - break; - } - case types.resultKind.schemaChange: - result = { header: frameInfo.header, schemaChange: reader.parseSchemaChange(), flags: frameInfo.flagsInfo }; - break; - default: - throw errors.DriverInternalError('Unexpected result kind: ' + frameInfo.kind); - } - } - } - catch (e) { - return this.handleParsingError(e, frameInfo, reader, originalOffset); - } - if (result) { - if (frameInfo.emitted) { - // It may contain additional metadata and info that it's not being parsed - return; - } - frameInfo.emitted = true; - return this.push(result); - } - if (reader.remainingLength() > 0) { - this.parseRows(frameInfo, reader); - } -}; - -/** - * @param frameInfo - * @param {FrameReader} reader - */ -Parser.prototype.parseRows = function (frameInfo, reader) { - if (frameInfo.parsingError) { - //No more processing on this frame - return; - } - if (frameInfo.rowLength === undefined) { - try { - frameInfo.rowLength = reader.readInt(); - } - catch (e) { - return this.handleParsingError(e, frameInfo, reader); - } - } - if (frameInfo.rowLength === 0) { - return this.push({ - header: frameInfo.header, - result: { rows: utils.emptyArray, meta: frameInfo.meta, flags: frameInfo.flagsInfo } - }); - } - const meta = frameInfo.meta; - frameInfo.rowIndex = frameInfo.rowIndex || 0; - for (let i = frameInfo.rowIndex; i < frameInfo.rowLength; i++) { - const rowOffset = reader.offset; - const row = new types.Row(meta.columns); - let cellBuffer; - for (let j = 0; j < meta.columns.length; j++ ) { - const c = meta.columns[j]; - try { - cellBuffer = reader.readBytes(); - } - catch (e) { - return this.handleParsingError(e, frameInfo, reader, rowOffset, i); - } - try { - row[c.name] = this.encoder.decode(cellBuffer, c.type); - } - catch (e) { - //Something went wrong while decoding, we are not going to be able to recover - return this.handleParsingError(e, frameInfo, null); - } - } - this.push({ - header: frameInfo.header, - row: row, - meta: frameInfo.meta, - byRow: frameInfo.byRow, - length: frameInfo.rowLength, - flags: frameInfo.flagsInfo - }); - } - if (frameInfo.byRow) { - // Use an event item to identify that all the streaming rows have finished processing - this.push({ - header: frameInfo.header, - byRowCompleted: true, - meta: frameInfo.meta, - length: frameInfo.rowLength, - flags: frameInfo.flagsInfo - }); - } -}; - -/** - * Sets parser options (ie: how to yield the results as they are parsed) - * @param {Number} id Id of the stream - * @param options - */ -Parser.prototype.setOptions = function (id, options) { - if (this.frames[id.toString()]) { - throw new types.DriverError('There was already state for this frame'); - } - this.frames[id.toString()] = options; -}; - -/** - * Manually clears the frame options. - * This class already clears the provided options when the frame ends, so it's usually not required to invoke this - * method. - * When manually setting the options for continuous paging, it's possible that the frame options are set while - * it's being cancelled. - * @param {Number} id The streamId - */ -Parser.prototype.clearOptions = function (id) { - delete this.frames[id.toString()]; -}; - -/** - * Gets the frame info from the internal state. - * In case it is not there, it creates it. - * In case the frame ended - */ -Parser.prototype.frameState = function (item) { - let frameInfo = this.frames[item.header.streamId]; - if (!frameInfo) { - frameInfo = {}; - if (!item.frameEnded) { - //store it in the frames - this.frames[item.header.streamId] = frameInfo; - } - } - else if (item.frameEnded) { - //if it was already stored, remove it - delete this.frames[item.header.streamId]; - } - frameInfo.header = item.header; - return frameInfo; -}; - -/** - * Handles parsing error: pushing an error if its unexpected or buffer the cell if its streaming - * @param {Error} e - * @param frameInfo - * @param {FrameReader} reader - * @param {Number} [originalOffset] - * @param {Number} [rowIndex] - */ -Parser.prototype.handleParsingError = function (e, frameInfo, reader, originalOffset, rowIndex) { - if (reader && frameInfo.isStreaming && (e instanceof RangeError)) { - //A controlled error, buffer from offset and move on - return this.bufferResultCell(frameInfo, reader, originalOffset, rowIndex, e.expectedLength); - } - frameInfo.parsingError = true; - frameInfo.cellBuffer = null; - this.push({ header: frameInfo.header, error: e }); -}; - -/** - * When streaming, it buffers data since originalOffset. - * @param frameInfo - * @param {FrameReader} reader - * @param {Number} [originalOffset] - * @param {Number} [rowIndex] - * @param {Number} [expectedLength] - */ -Parser.prototype.bufferResultCell = function (frameInfo, reader, originalOffset, rowIndex, expectedLength) { - if (!originalOffset && originalOffset !== 0) { - originalOffset = reader.offset; - } - frameInfo.rowIndex = rowIndex; - const buffer = reader.slice(originalOffset); - frameInfo.cellBuffer = { - parts: [ buffer ], - byteLength: buffer.length, - expectedLength: expectedLength - }; -}; - -/** - * Represents a writable streams that emits results - */ -function ResultEmitter(options) { - Writable.call(this, options); - /** - * Stores the rows for frames that needs to be yielded as one result with many rows - */ - this.rowBuffer = {}; -} - -util.inherits(ResultEmitter, Writable); - -ResultEmitter.prototype._write = function (item, encoding, callback) { - let error = null; - try { - this.each(item); - } - catch (err) { - error = err; - } - callback(error); -}; - - -/** - * Analyzes the item and emit the corresponding event - */ -ResultEmitter.prototype.each = function (item) { - if (item.error || item.result) { - //Its either an error or an empty array rows - //no transformation needs to be made - return this.emit('result', item.header, item.error, item.result); - } - if (item.frameEnded) { - return this.emit('frameEnded', item.header); - } - if (item.lastContinuousPage) { - return this.emit('lastContinuousPage', item.header); - } - if (item.byRowCompleted) { - return this.emit('byRowCompleted', item.header, item.row, item.meta, item.flags); - } - if (item.byRow) { - //it should be yielded by row - return this.emit('row', item.header, item.row, item.meta, item.length, item.flags); - } - if (item.row) { - //it should be yielded as a result - //it needs to be buffered to an array of rows - return this.bufferAndEmit(item); - } - if (item.event) { - //its an event from Cassandra - return this.emit('nodeEvent', item.header, item.event); - } - //its a raw response (object with flags) - return this.emit('result', item.header, null, item); -}; - -/** - * Buffers the rows until the result set is completed and emits the result event. - */ -ResultEmitter.prototype.bufferAndEmit = function (item) { - let rows = this.rowBuffer[item.header.streamId]; - if (!rows) { - rows = this.rowBuffer[item.header.streamId] = []; - } - rows.push(item.row); - if (rows.length === item.length) { - this.emit('result', item.header, null, { rows: rows, meta: item.meta, flags: item.flags}); - delete this.rowBuffer[item.header.streamId]; - } -}; - -exports.Protocol = Protocol; -exports.Parser = Parser; -exports.ResultEmitter = ResultEmitter; diff --git a/lib/streams.ts b/lib/streams.ts new file mode 100644 index 000000000..f4c959fbd --- /dev/null +++ b/lib/streams.ts @@ -0,0 +1,574 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Transform, Writable } from "stream"; +import errors from "./errors"; +import { FrameReader } from "./readers"; +import types, { FrameHeader } from "./types/index"; +import utils from "./utils"; + +/** + * Transforms chunks, emits data objects {header, chunk} + * @extends Transform + */ +class Protocol extends Transform{ + header: FrameHeader; + bodyLength: number; + version: number; + headerSize: number; + headerChunks: any; + /** + * Transforms chunks, emits data objects {header, chunk} + * @param options Stream options + */ + constructor(options) { + super(options); + this.header = null; + this.bodyLength = 0; + this.clearHeaderChunks(); + this.version = 0; + this.headerSize = 0; + } + _transform(chunk, encoding, callback) { + let error = null; + try { + this.readItems(chunk); + } + catch (err) { + error = err; + } + callback(error); + } + /** + * Parses the chunk into frames (header and body). + * Emits (push) complete frames or frames with incomplete bodies. Following chunks containing the rest of the body will + * be emitted using the same frame. + * It buffers incomplete headers. + * @param {Buffer} chunk + */ + readItems(chunk: Buffer) { + if (!chunk || chunk.length === 0) { + return; + } + if (this.version === 0) { + //The server replies the first message with the max protocol version supported + this.version = FrameHeader.getProtocolVersion(chunk); + this.headerSize = FrameHeader.size(this.version); + } + let offset = 0; + let currentHeader = this.header; + this.header = null; + if (this.headerChunks.byteLength !== 0) { + //incomplete header was buffered try to read the header from the buffered chunks + this.headerChunks.parts.push(chunk); + if (this.headerChunks.byteLength + chunk.length < this.headerSize) { + this.headerChunks.byteLength += chunk.length; + return; + } + currentHeader = FrameHeader.fromBuffer(Buffer.concat(this.headerChunks.parts, this.headerSize)); + offset = this.headerSize - this.headerChunks.byteLength; + this.clearHeaderChunks(); + } + const items = []; + while (true) { + if (!currentHeader) { + if (this.headerSize > chunk.length - offset) { + if (chunk.length - offset <= 0) { + break; + } + //the header is incomplete, buffer it until the next chunk + const headerPart = chunk.slice(offset, chunk.length); + this.headerChunks.parts.push(headerPart); + this.headerChunks.byteLength = headerPart.length; + break; + } + //read header + currentHeader = FrameHeader.fromBuffer(chunk, offset); + offset += this.headerSize; + } + //parse body + const remaining = chunk.length - offset; + if (currentHeader.bodyLength <= remaining + this.bodyLength) { + items.push({ header: currentHeader, chunk: chunk, offset: offset, frameEnded: true }); + offset += currentHeader.bodyLength - this.bodyLength; + //reset the body length + this.bodyLength = 0; + } + else if (remaining >= 0) { + //the body is not fully contained in this chunk + //will continue later + this.header = currentHeader; + this.bodyLength += remaining; + if (remaining > 0) { + //emit if there is at least a byte to emit + items.push({ header: currentHeader, chunk: chunk, offset: offset, frameEnded: false }); + } + break; + } + currentHeader = null; + } + for (let i = 0; i < items.length; i++) { + this.push(items[i]); + } + } + clearHeaderChunks() { + this.headerChunks = { byteLength: 0, parts: [] }; + } +} + +/** + * A stream that gets reads header + body chunks and transforms them into header + (row | error) + * @param {Object} streamOptions Node.js Stream options + * @param {Encoder} encoder Encoder instance for the parser to use + * @extends Transform + */ +class Parser extends Transform{ + frames: object; + encoder: any; + constructor(streamOptions, encoder) { + super(streamOptions); + //frames that are streaming, indexed by id + this.frames = {}; + this.encoder = encoder; + } + _transform(item, encoding, callback) { + const frameInfo = this.frameState(item); + + let error = null; + try { + this.parseBody(frameInfo, item); + } + catch (err) { + error = err; + } + callback(error); + + if (item.frameEnded) { + if (frameInfo.cellBuffer) { + //Frame was being streamed but an error force it to buffer the result + this.push({ + header: frameInfo.header, + error: new errors.DriverInternalError('There was an problem while parsing streaming frame, opcode ' + + frameInfo.header.opcode) + }); + } + //all the parsing finished and it was streamed down + //emit an item that signals it + this.push({ header: frameInfo.header, frameEnded: true }); + } + } + /** + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + */ + parseBody(frameInfo, item: { header: FrameHeader; chunk: Buffer; offset: number; }) { + frameInfo.isStreaming = frameInfo.byRow && item.header.opcode === types.opcodes.result; + if (!this.handleFrameBuffers(frameInfo, item)) { + // Frame isn't complete and we are not streaming the frame + return; + } + const reader = new FrameReader(item.header, item.chunk, item.offset); + // Check that flags have not been parsed yet for this frame + if (frameInfo.flagsInfo === undefined) { + const originalOffset = reader.offset; + try { + frameInfo.flagsInfo = reader.readFlagsInfo(); + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader, originalOffset); + } + } + + //All the body for most operations is already buffered at this stage + //Except for RESULT + switch (item.header.opcode) { + case types.opcodes.result: + return this.parseResult(frameInfo, reader); + case types.opcodes.ready: + case types.opcodes.authSuccess: + return this.push({ header: frameInfo.header, ready: true }); + case types.opcodes.authChallenge: + return this.push({ header: frameInfo.header, authChallenge: true, token: reader.readBytes() }); + case types.opcodes.authenticate: + return this.push({ header: frameInfo.header, mustAuthenticate: true, authenticatorName: reader.readString() }); + case types.opcodes.error: + return this.push({ header: frameInfo.header, error: reader.readError() }); + case types.opcodes.supported: + return this.push({ header: frameInfo.header, supported: reader.readStringMultiMap() }); + case types.opcodes.event: + return this.push({ header: frameInfo.header, event: reader.readEvent() }); + default: + return this.push({ header: frameInfo.header, error: new Error('Received invalid opcode: ' + item.header.opcode) }); + } + } + /** + * Buffers if needed and returns true if it has all the necessary data to continue parsing the frame. + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + * @returns {Boolean} + */ + handleFrameBuffers(frameInfo, item: { header: FrameHeader; chunk: Buffer; offset: number; }): boolean { + if (!frameInfo.isStreaming) { + // Handle buffering for complete frame bodies + const currentLength = (frameInfo.bufferLength || 0) + item.chunk.length - item.offset; + if (currentLength < item.header.bodyLength) { + //buffer until the frame is completed + this.addFrameBuffer(frameInfo, item); + return false; + } + //We have received the full frame body + if (frameInfo.buffers) { + item.chunk = this.getFrameBuffer(frameInfo, item); + item.offset = 0; + } + return true; + } + if (frameInfo.cellBuffer) { + // Handle buffering for frame cells (row cells or metadata cells) + if (item.offset !== 0) { + throw new errors.DriverInternalError('Following chunks can not have an offset greater than zero'); + } + frameInfo.cellBuffer.parts.push(item.chunk); + if (!frameInfo.cellBuffer.expectedLength) { + //Its a buffer outside a row cell (metadata or other) + if (frameInfo.cellBuffer.parts.length !== 2) { + throw new errors.DriverInternalError('Buffer for streaming frame can not contain more than 1 item'); + } + item.chunk = Buffer.concat(frameInfo.cellBuffer.parts, frameInfo.cellBuffer.byteLength + item.chunk.length); + frameInfo.cellBuffer = null; + return true; + } + if (frameInfo.cellBuffer.expectedLength > frameInfo.cellBuffer.byteLength + item.chunk.length) { + //We still haven't got the cell data + frameInfo.cellBuffer.byteLength += item.chunk.length; + return false; + } + item.chunk = Buffer.concat(frameInfo.cellBuffer.parts, frameInfo.cellBuffer.byteLength + item.chunk.length); + frameInfo.cellBuffer = null; + } + return true; + } + /** + * Adds this chunk to the frame buffers. + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + */ + addFrameBuffer(frameInfo, item: { header: FrameHeader; chunk: Buffer; offset: number; }) { + if (!frameInfo.buffers) { + frameInfo.buffers = [item.chunk.slice(item.offset)]; + frameInfo.bufferLength = item.chunk.length - item.offset; + return; + } + if (item.offset > 0) { + throw new errors.DriverInternalError('Following chunks can not have an offset greater than zero'); + } + frameInfo.buffers.push(item.chunk); + frameInfo.bufferLength += item.chunk.length; + } + /** + * Adds the last chunk and concatenates the frame buffers + * @param frameInfo + * @param {{header: FrameHeader, chunk: Buffer, offset: Number}} item + */ + getFrameBuffer(frameInfo, item: { header: FrameHeader; chunk: Buffer; offset: number; }) { + frameInfo.buffers.push(item.chunk); + const result = Buffer.concat(frameInfo.buffers, frameInfo.bodyLength); + frameInfo.buffers = null; + return result; + } + /** + * Tries to read the result in the body of a message + * @param frameInfo Frame information, header / metadata + * @param {FrameReader} reader + */ + parseResult(frameInfo, reader: FrameReader) { + let result; + // As we might be streaming and the frame buffer might not be complete, + // read the metadata and different types of result values in a try-catch. + // Store the reader position + const originalOffset = reader.offset; + try { + if (!frameInfo.meta) { + frameInfo.kind = reader.readInt(); + // Spec 4.2.5 + switch (frameInfo.kind) { + case types.resultKind.voidResult: + result = { header: frameInfo.header, flags: frameInfo.flagsInfo }; + break; + case types.resultKind.rows: + // Parse the rows metadata, the rest of the response is going to be parsed afterwards + frameInfo.meta = reader.readMetadata(frameInfo.kind); + break; + case types.resultKind.setKeyspace: + result = { header: frameInfo.header, keyspaceSet: reader.readString(), flags: frameInfo.flagsInfo }; + break; + case types.resultKind.prepared: + { + const preparedId = utils.copyBuffer(reader.readShortBytes()); + frameInfo.meta = reader.readMetadata(frameInfo.kind); + result = { header: frameInfo.header, id: preparedId, meta: frameInfo.meta, flags: frameInfo.flagsInfo }; + break; + } + case types.resultKind.schemaChange: + result = { header: frameInfo.header, schemaChange: reader.parseSchemaChange(), flags: frameInfo.flagsInfo }; + break; + default: + throw new errors.DriverInternalError('Unexpected result kind: ' + frameInfo.kind); + } + } + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader, originalOffset); + } + if (result) { + if (frameInfo.emitted) { + // It may contain additional metadata and info that it's not being parsed + return; + } + frameInfo.emitted = true; + return this.push(result); + } + if (reader.remainingLength() > 0) { + this.parseRows(frameInfo, reader); + } + } + /** + * @param frameInfo + * @param {FrameReader} reader + */ + parseRows(frameInfo, reader: FrameReader) { + if (frameInfo.parsingError) { + //No more processing on this frame + return; + } + if (frameInfo.rowLength === undefined) { + try { + frameInfo.rowLength = reader.readInt(); + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader); + } + } + if (frameInfo.rowLength === 0) { + return this.push({ + header: frameInfo.header, + result: { rows: utils.emptyArray, meta: frameInfo.meta, flags: frameInfo.flagsInfo } + }); + } + const meta = frameInfo.meta; + frameInfo.rowIndex = frameInfo.rowIndex || 0; + for (let i = frameInfo.rowIndex; i < frameInfo.rowLength; i++) { + const rowOffset = reader.offset; + const row = new types.Row(meta.columns); + let cellBuffer; + for (let j = 0; j < meta.columns.length; j++) { + const c = meta.columns[j]; + try { + cellBuffer = reader.readBytes(); + } + catch (e) { + return this.handleParsingError(e, frameInfo, reader, rowOffset, i); + } + try { + row[c.name] = this.encoder.decode(cellBuffer, c.type); + } + catch (e) { + //Something went wrong while decoding, we are not going to be able to recover + return this.handleParsingError(e, frameInfo, null); + } + } + this.push({ + header: frameInfo.header, + row: row, + meta: frameInfo.meta, + byRow: frameInfo.byRow, + length: frameInfo.rowLength, + flags: frameInfo.flagsInfo + }); + } + if (frameInfo.byRow) { + // Use an event item to identify that all the streaming rows have finished processing + this.push({ + header: frameInfo.header, + byRowCompleted: true, + meta: frameInfo.meta, + length: frameInfo.rowLength, + flags: frameInfo.flagsInfo + }); + } + } + /** + * Sets parser options (ie: how to yield the results as they are parsed) + * @param {Number} id Id of the stream + * @param options + */ + setOptions(id: number, options) { + if (this.frames[id.toString()]) { + throw new types.DriverError('There was already state for this frame'); + } + this.frames[id.toString()] = options; + } + /** + * Manually clears the frame options. + * This class already clears the provided options when the frame ends, so it's usually not required to invoke this + * method. + * When manually setting the options for continuous paging, it's possible that the frame options are set while + * it's being cancelled. + * @param {Number} id The streamId + */ + clearOptions(id: number) { + delete this.frames[id.toString()]; + } + /** + * Gets the frame info from the internal state. + * In case it is not there, it creates it. + * In case the frame ended + */ + frameState(item) { + let frameInfo = this.frames[item.header.streamId]; + if (!frameInfo) { + frameInfo = {}; + if (!item.frameEnded) { + //store it in the frames + this.frames[item.header.streamId] = frameInfo; + } + } + else if (item.frameEnded) { + //if it was already stored, remove it + delete this.frames[item.header.streamId]; + } + frameInfo.header = item.header; + return frameInfo; + } + /** + * Handles parsing error: pushing an error if its unexpected or buffer the cell if its streaming + * @param {Error} e + * @param frameInfo + * @param {FrameReader} reader + * @param {Number} [originalOffset] + * @param {Number} [rowIndex] + */ + handleParsingError(e: Error, frameInfo, reader: FrameReader, originalOffset?: number, rowIndex?: number) { + if (reader && frameInfo.isStreaming && (e instanceof RangeError)) { + //A controlled error, buffer from offset and move on + return this.bufferResultCell(frameInfo, reader, originalOffset, rowIndex, e["expectedLength"]); + } + frameInfo.parsingError = true; + frameInfo.cellBuffer = null; + this.push({ header: frameInfo.header, error: e }); + } + /** + * When streaming, it buffers data since originalOffset. + * @param frameInfo + * @param {FrameReader} reader + * @param {Number} [originalOffset] + * @param {Number} [rowIndex] + * @param {Number} [expectedLength] + */ + bufferResultCell(frameInfo, reader: FrameReader, originalOffset: number, rowIndex: number, expectedLength: number) { + if (!originalOffset && originalOffset !== 0) { + originalOffset = reader.offset; + } + frameInfo.rowIndex = rowIndex; + const buffer = reader.slice(originalOffset); + frameInfo.cellBuffer = { + parts: [buffer], + byteLength: buffer.length, + expectedLength: expectedLength + }; + } +} + +/** + * Represents a writable streams that emits results + */ +class ResultEmitter extends Writable { + rowBuffer: object; + constructor(options) { + super(options); + /** + * Stores the rows for frames that needs to be yielded as one result with many rows + */ + this.rowBuffer = {}; + } + _write(item, encoding, callback) { + let error = null; + try { + this.each(item); + } + catch (err) { + error = err; + } + callback(error); + } + /** + * Analyzes the item and emit the corresponding event + */ + each(item) { + if (item.error || item.result) { + //Its either an error or an empty array rows + //no transformation needs to be made + return this.emit('result', item.header, item.error, item.result); + } + if (item.frameEnded) { + return this.emit('frameEnded', item.header); + } + if (item.lastContinuousPage) { + return this.emit('lastContinuousPage', item.header); + } + if (item.byRowCompleted) { + return this.emit('byRowCompleted', item.header, item.row, item.meta, item.flags); + } + if (item.byRow) { + //it should be yielded by row + return this.emit('row', item.header, item.row, item.meta, item.length, item.flags); + } + if (item.row) { + //it should be yielded as a result + //it needs to be buffered to an array of rows + return this.bufferAndEmit(item); + } + if (item.event) { + //its an event from Cassandra + return this.emit('nodeEvent', item.header, item.event); + } + //its a raw response (object with flags) + return this.emit('result', item.header, null, item); + } + /** + * Buffers the rows until the result set is completed and emits the result event. + */ + bufferAndEmit(item) { + let rows = this.rowBuffer[item.header.streamId]; + if (!rows) { + rows = this.rowBuffer[item.header.streamId] = []; + } + rows.push(item.row); + if (rows.length === item.length) { + this.emit('result', item.header, null, { rows: rows, meta: item.meta, flags: item.flags }); + delete this.rowBuffer[item.header.streamId]; + } + } +} + +export { + Parser, Protocol, ResultEmitter +}; +export default { + Protocol, + Parser, + ResultEmitter +}; \ No newline at end of file diff --git a/lib/token.js b/lib/token.ts similarity index 88% rename from lib/token.js rename to lib/token.ts index dabed71f1..ea0aff69b 100644 --- a/lib/token.js +++ b/lib/token.ts @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import util from "util"; +import type { DataTypeInfo } from "./encoder"; +import types from "./types/index"; -const types = require('./types'); -const util = require('util'); const _Murmur3TokenType = types.dataTypes.getByName('bigint'); const _RandomTokenType = types.dataTypes.getByName('varint'); @@ -26,25 +26,28 @@ const _OrderedTokenType = types.dataTypes.getByName('blob'); * Represents a token on the Cassandra ring. */ class Token { + protected _value: any; + /** @internal */ constructor(value) { this._value = value; } /** - * @returns {{code: number, info: *|Object}} The type info for the + * @returns {DataTypeInfo} The type info for the * type of the value of the token. */ - getType() { + getType(): DataTypeInfo { throw new Error('You must implement a getType function for this Token instance'); } /** * @returns {*} The raw value of the token. */ - getValue() { + getValue(): any { return this._value; } + /** @internal */ toString() { return this._value.toString(); } @@ -56,14 +59,15 @@ class Token { * @param {Token} other * @returns {Number} */ - compare(other) { + compare(other: Token): number { return this._value.compare(other._value); } - equals(other) { + equals(other: Token): boolean { return this.compare(other) === 0; } + /** @internal */ inspect() { return this.constructor.name + ' { ' + this.toString() + ' }'; } @@ -74,6 +78,7 @@ class Token { * is Murmur3Partitioner. * * The raw token type is a varint (represented by MutableLong). + * @internal @ignore */ class Murmur3Token extends Token { constructor(value) { @@ -90,6 +95,7 @@ class Murmur3Token extends Token { * is RandomPartitioner. * * The raw token type is a bigint (represented by Number). + * @internal @ignore */ class RandomToken extends Token { constructor(value) { @@ -106,6 +112,7 @@ class RandomToken extends Token { * is ByteOrderedPartitioner. * * The raw token type is a blob (represented by Buffer or Array). + * @internal @ignore */ class ByteOrderedToken extends Token { constructor(value) { @@ -133,6 +140,11 @@ class ByteOrderedToken extends Token { * in a range, see {@link unwrap}. */ class TokenRange { + end: Token; + start: Token; + private _tokenizer: any; + + /** @internal */ constructor(start, end, tokenizer) { this.start = start; this.end = end; @@ -150,7 +162,7 @@ class TokenRange { * @returns {TokenRange[]} Split ranges. * @throws {Error} If splitting an empty range. */ - splitEvenly(numberOfSplits) { + splitEvenly(numberOfSplits: number): TokenRange[] { if (numberOfSplits < 1) { throw new Error(util.format("numberOfSplits (%d) must be greater than 0.", numberOfSplits)); } @@ -179,7 +191,7 @@ class TokenRange { * * @returns {boolean} Whether this range is empty. */ - isEmpty() { + isEmpty(): boolean { return this.start.equals(this.end) && !this.start.equals(this._tokenizer.minToken()); } @@ -190,7 +202,7 @@ class TokenRange { * * @returns {boolean} Whether this range wraps around. */ - isWrappedAround() { + isWrappedAround(): boolean { return this.start.compare(this.end) > 0 && !this.end.equals(this._tokenizer.minToken()); } @@ -205,7 +217,7 @@ class TokenRange { * * @returns {TokenRange[]} The list of non-wrapping ranges. */ - unwrap() { + unwrap(): TokenRange[] { if (this.isWrappedAround()) { return [ new TokenRange(this.start, this._tokenizer.minToken(), this._tokenizer), @@ -218,10 +230,10 @@ class TokenRange { /** * Whether this range contains a given Token. * - * @param {*} token Token to check for. + * @param {Token} token Token to check for. * @returns {boolean} Whether or not the Token is in this range. */ - contains(token) { + contains(token: Token): boolean { if (this.isEmpty()) { return false; } @@ -250,7 +262,7 @@ class TokenRange { * @param {TokenRange} other Range to compare with. * @returns {boolean} Whether or not the ranges are equal. */ - equals(other) { + equals(other: TokenRange): boolean { if (other === this) { return true; } else if (other instanceof TokenRange) { @@ -266,11 +278,12 @@ class TokenRange { * @param {TokenRange} other Range to compare with. * @returns {Number} */ - compare(other) { + compare(other: TokenRange): number { const compareStart = this.start.compare(other.start); return compareStart !== 0 ? compareStart : this.end.compare(other.end); } + /** @internal */ toString() { return util.format(']%s, %s]', this.start.toString(), @@ -279,8 +292,17 @@ class TokenRange { } } -exports.Token = Token; -exports.TokenRange = TokenRange; -exports.ByteOrderedToken = ByteOrderedToken; -exports.Murmur3Token = Murmur3Token; -exports.RandomToken = RandomToken; \ No newline at end of file +export { + ByteOrderedToken, + Murmur3Token, + RandomToken, Token, + TokenRange +}; + +export default { + Token, + TokenRange, + ByteOrderedToken, + Murmur3Token, + RandomToken +}; \ No newline at end of file diff --git a/lib/tokenizer.js b/lib/tokenizer.ts similarity index 91% rename from lib/tokenizer.js rename to lib/tokenizer.ts index 57340e651..796e8d5ea 100644 --- a/lib/tokenizer.js +++ b/lib/tokenizer.ts @@ -13,13 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +/* eslint-disable @typescript-eslint/no-unused-vars */ +import token, { ByteOrderedToken, Murmur3Token, RandomToken, Token } from "./token"; +import { Integer } from "./types/index"; +import MutableLong from "./types/mutable-long"; +import utils from "./utils"; -const types = require('./types'); -const token = require('./token'); -const utils = require('./utils'); -const MutableLong = require('./types/mutable-long'); -const { Integer } = types; // Murmur3 constants //-0x783C846EEEBDAC2B @@ -37,6 +36,7 @@ const mconst6 = MutableLong.fromNumber(0x38495ab5); /** * Represents a set of methods that are able to generate and parse tokens for the C* partitioner. * @abstract + * @internal @ignore */ class Tokenizer { constructor() { @@ -49,7 +49,7 @@ class Tokenizer { * @param {Buffer|Array} value * @returns {Token} Computed token */ - hash(value) { + hash(value: Buffer | Array): Token { throw new Error('You must implement a hash function for the tokenizer'); } @@ -58,7 +58,7 @@ class Tokenizer { * @abstract * @param {String} value */ - parse(value) { + parse(value: string): Token { throw new Error('You must implement a parse function for the tokenizer'); } @@ -72,7 +72,7 @@ class Tokenizer { * @param {Token} end End token * @param {Number} numberOfSplits Number of splits to make. */ - split(start, end, numberOfSplits) { + split(start: Token, end: Token, numberOfSplits: number) { throw new Error('You must implement a split function for the tokenizer'); } @@ -87,7 +87,7 @@ class Tokenizer { * @param {Number} numberOfSplits The number of splits to make * @returns {Array} The evenly-split points on the range */ - splitBase(start, range, ringEnd, ringLength, numberOfSplits) { + splitBase(start: Integer, range: Integer, ringEnd: Integer, ringLength: Integer, numberOfSplits: number): Array { const numberOfSplitsInt = Integer.fromInt(numberOfSplits); const divider = range.divide(numberOfSplitsInt); let remainder = range.modulo(numberOfSplitsInt); @@ -115,15 +115,21 @@ class Tokenizer { * Return internal string based representation of a Token. * @param {Token} token */ - stringify(token) { + stringify(token: Token) { return token.getValue().toString(); } } /** * Uniformly distributes data across the cluster based on Cassandra flavored Murmur3 hashed values. + * @ignore @internal */ class Murmur3Tokenizer extends Tokenizer { + _minToken: Murmur3Token; + _maxToken: Murmur3Token; + _maxValue: Integer; + _minValue: Integer; + _ringLength: Integer; constructor() { super(); @@ -133,7 +139,7 @@ class Murmur3Tokenizer extends Tokenizer { * @param {Buffer} value * @return {Murmur3Token} */ - hash(value) { + hash(value: Buffer): Murmur3Token { // This is an adapted version of the MurmurHash.hash3_x64_128 from Cassandra used // for M3P. Compared to that methods, there's a few inlining of arguments and we // only return the first 64-bits of the result since that's all M3 partitioner uses. @@ -239,12 +245,12 @@ class Murmur3Tokenizer extends Tokenizer { /** * - * @param {Array} key + * @param {Buffer} key * @param {Number} offset * @param {Number} index * @return {MutableLong} */ - getBlock(key, offset, index) { + getBlock(key: Buffer, offset: number, index: number): MutableLong { const i8 = index << 3; const blockOffset = offset + i8; return new MutableLong( @@ -259,13 +265,13 @@ class Murmur3Tokenizer extends Tokenizer { * @param {MutableLong} v * @param {Number} n */ - rotl64(v, n) { + rotl64(v: MutableLong, n: number) { const left = v.clone().shiftLeft(n); v.shiftRightUnsigned(64 - n).or(left); } /** @param {MutableLong} k */ - fmix(k) { + fmix(k: MutableLong) { k.xor(new MutableLong(k.getUint16(2) >>> 1 | ((k.getUint16(3) << 15) & 0xffff), k.getUint16(3) >>> 1, 0, 0)); k.multiply(mconst3); const other = new MutableLong( @@ -284,7 +290,7 @@ class Murmur3Tokenizer extends Tokenizer { * @param {String} value * @returns {Murmur3Token} */ - parse(value) { + parse(value: string): Murmur3Token { return new token.Murmur3Token(MutableLong.fromString(value)); } @@ -353,8 +359,14 @@ class Murmur3Tokenizer extends Tokenizer { /** * Uniformly distributes data across the cluster based on MD5 hash values. + * @ignore @internal */ class RandomTokenizer extends Tokenizer { + _crypto: any; + _minToken: any; + _maxValue: any; + _maxToken: any; + _ringLength: any; constructor() { super(); // eslint-disable-next-line @@ -365,7 +377,7 @@ class RandomTokenizer extends Tokenizer { * @param {Buffer|Array} value * @returns {RandomToken} */ - hash(value) { + hash(value: Buffer | Array): RandomToken { if (Array.isArray(value)) { value = utils.allocBufferFromArray(value); } @@ -376,7 +388,7 @@ class RandomTokenizer extends Tokenizer { /** * @returns {Token} */ - parse(value) { + parse(value): Token { return new token.RandomToken(Integer.fromString(value)); } @@ -427,16 +439,20 @@ class RandomTokenizer extends Tokenizer { } } +/** + * @ignore @internal + */ class ByteOrderedTokenizer extends Tokenizer { + _minToken: any; constructor() { super(); } /** - * @param {Buffer} value + * @param {Buffer | Array} value * @returns {ByteOrderedToken} */ - hash(value) { + hash(value: Buffer | Array): ByteOrderedToken { // strip any trailing zeros as tokens with trailing zeros are equivalent // to those who don't have them. if (Array.isArray(value)) { @@ -572,13 +588,21 @@ class ByteOrderedTokenizer extends Tokenizer { * @param {Number} value * @return {MutableLong} */ -function fromSignedByte(value) { +function fromSignedByte(value: number): MutableLong { if (value < 128) { return new MutableLong(value, 0, 0, 0); } return new MutableLong((value - 256) & 0xffff, 0xffff, 0xffff, 0xffff); } -exports.Murmur3Tokenizer = Murmur3Tokenizer; -exports.RandomTokenizer = RandomTokenizer; -exports.ByteOrderedTokenizer = ByteOrderedTokenizer; +export { + ByteOrderedTokenizer, Murmur3Tokenizer, + RandomTokenizer, Tokenizer +}; + +export default { + Murmur3Tokenizer, + RandomTokenizer, + ByteOrderedTokenizer, + Tokenizer +}; \ No newline at end of file diff --git a/lib/tracker/index.d.ts b/lib/tracker/index.d.ts deleted file mode 100644 index e39e8bae3..000000000 --- a/lib/tracker/index.d.ts +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { ExecutionOptions, Host } from '../../'; - -export namespace tracker { - interface RequestTracker { - onError( - host: Host, - query: string | Array<{ query: string, params?: any }>, - parameters: any[] | { [key: string]: any } | null, - executionOptions: ExecutionOptions, - requestLength: number, - err: Error, - latency: number[]): void; - - onSuccess( - host: Host, - query: string | Array<{ query: string, params?: any }>, - parameters: any[] | { [key: string]: any } | null, - executionOptions: ExecutionOptions, - requestLength: number, - responseLength: number, - latency: number[]): void; - - shutdown(): void; - } - - class RequestLogger implements RequestTracker { - constructor(options: { - slowThreshold?: number; - logNormalRequests?: boolean; - logErroredRequests?: boolean; - messageMaxQueryLength?: number; - messageMaxParameterValueLength?: number; - messageMaxErrorStackTraceLength?: number; - }); - - onError(host: Host, query: string | Array<{ query: string; params?: any }>, parameters: any[] | { [p: string]: any } | null, executionOptions: ExecutionOptions, requestLength: number, err: Error, latency: number[]): void; - - onSuccess(host: Host, query: string | Array<{ query: string; params?: any }>, parameters: any[] | { [p: string]: any } | null, executionOptions: ExecutionOptions, requestLength: number, responseLength: number, latency: number[]): void; - - shutdown(): void; - } -} \ No newline at end of file diff --git a/lib/tracker/index.js b/lib/tracker/index.ts similarity index 76% rename from lib/tracker/index.js rename to lib/tracker/index.ts index 848d5bb3b..e378c57b2 100644 --- a/lib/tracker/index.js +++ b/lib/tracker/index.ts @@ -14,12 +14,19 @@ * limitations under the License. */ -'use strict'; + /** * Tracker module. * @module tracker */ -exports.RequestLogger = require('./request-logger'); -exports.RequestTracker = require('./request-tracker'); \ No newline at end of file +import RequestLogger from "./request-logger"; +import RequestTracker from "./request-tracker"; +export { + RequestLogger, RequestTracker +}; +export default { + RequestTracker, + RequestLogger +}; \ No newline at end of file diff --git a/lib/tracker/request-logger.js b/lib/tracker/request-logger.ts similarity index 76% rename from lib/tracker/request-logger.js rename to lib/tracker/request-logger.ts index da3d2085a..5f40732da 100644 --- a/lib/tracker/request-logger.js +++ b/lib/tracker/request-logger.ts @@ -13,13 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import events from "events"; +import { format } from "util"; +import errors from "../errors"; +import type { ExecutionOptions } from "../execution-options"; +import type { Host } from "../host"; +import RequestTracker from "./request-tracker"; -'use strict'; - -const events = require('events'); -const RequestTracker = require('./request-tracker'); -const errors = require('../errors'); -const { format } = require('util'); const nanosToMillis = 1000000; const defaultMessageMaxQueryLength = 500; @@ -38,6 +38,25 @@ const defaultMaxErrorStackTraceLength = 200; * const client = new Client({ contactPoints, requestTracker: requestLogger }); */ class RequestLogger extends RequestTracker { + private _options: { slowThreshold?: number; requestSizeThreshold?: number; logNormalRequests?: boolean; logErroredRequests?: boolean; messageMaxQueryLength?: number; messageMaxParameterValueLength?: number; messageMaxErrorStackTraceLength?: number; }; + /** + * Determines whether it should emit 'normal' events for every EXECUTE, QUERY and BATCH request executed + * successfully, useful only for debugging + * @type {Boolean} + */ + private logNormalRequests: boolean; + /** + * Determines whether it should emit 'failure' events for every EXECUTE, QUERY and BATCH request execution that + * resulted in an error + * @type {Boolean} + */ + private logErroredRequests: boolean; + /** + * The object instance that emits 'slow', 'large', 'normal' and + * 'failure' events. + * @type {EventEmitter} + */ + private emitter: events; /** * Creates a new instance of {@link RequestLogger}. @@ -59,7 +78,7 @@ class RequestLogger extends RequestTracker { * @param {Number} [options.messageMaxErrorStackTraceLength] The maximum amount of characters of the stack trace * that will be included in the message. Defaults to 200. */ - constructor(options) { + constructor(options: { slowThreshold?: number; requestSizeThreshold?: number; logNormalRequests?: boolean; logErroredRequests?: boolean; messageMaxQueryLength?: number; messageMaxParameterValueLength?: number; messageMaxErrorStackTraceLength?: number; }) { super(); if (!options) { throw new errors.ArgumentError('RequestLogger options parameter is required'); @@ -67,25 +86,10 @@ class RequestLogger extends RequestTracker { this._options = options; - /** - * Determines whether it should emit 'normal' events for every EXECUTE, QUERY and BATCH request executed - * successfully, useful only for debugging - * @type {Boolean} - */ this.logNormalRequests = this._options.logNormalRequests; - /** - * Determines whether it should emit 'failure' events for every EXECUTE, QUERY and BATCH request execution that - * resulted in an error - * @type {Boolean} - */ this.logErroredRequests = this._options.logErroredRequests; - /** - * The object instance that emits 'slow', 'large', 'normal' and - * 'failure' events. - * @type {EventEmitter} - */ this.emitter = new events.EventEmitter(); } @@ -93,15 +97,15 @@ class RequestLogger extends RequestTracker { * Logs message if request execution was deemed too slow, large or if normal requests are logged. * @override */ - onSuccess(host, query, parameters, execOptions, requestLength, responseLength, latency) { + onSuccess(host: Host, query: string | Array<{ query: string; params?: any }>, parameters: any[] | { [p: string]: any } | null, executionOptions: ExecutionOptions, requestLength: number, responseLength: number, latency: number[]) { if (this._options.slowThreshold > 0 && toMillis(latency) > this._options.slowThreshold) { - this._logSlow(host, query, parameters, execOptions, requestLength, responseLength, latency); + this._logSlow(host, query, parameters, executionOptions, requestLength, responseLength, latency); } else if (this._options.requestSizeThreshold > 0 && requestLength > this._options.requestSizeThreshold) { - this._logLargeRequest(host, query, parameters, execOptions, requestLength, responseLength, latency); + this._logLargeRequest(host, query, parameters, executionOptions, requestLength, responseLength, latency); } else if (this.logNormalRequests) { - this._logNormalRequest(host, query, parameters, execOptions, requestLength, responseLength, latency); + this._logNormalRequest(host, query, parameters, executionOptions, requestLength, responseLength, latency); } } @@ -109,35 +113,35 @@ class RequestLogger extends RequestTracker { * Logs message if request execution was too large and/or encountered an error. * @override */ - onError(host, query, parameters, execOptions, requestLength, err, latency) { + onError(host: Host, query: string | Array<{ query: string; params?: any }>, parameters: any[] | { [p: string]: any } | null, executionOptions: ExecutionOptions, requestLength: number, err: Error, latency: number[]) { if (this._options.requestSizeThreshold > 0 && requestLength > this._options.requestSizeThreshold) { - this._logLargeErrorRequest(host, query, parameters, execOptions, requestLength, err, latency); + this._logLargeErrorRequest(host, query, parameters, executionOptions, requestLength, err, latency); } else if (this.logErroredRequests) { - this._logErrorRequest(host, query, parameters, execOptions, requestLength, err, latency); + this._logErrorRequest(host, query, parameters, executionOptions, requestLength, err, latency); } } - _logSlow(host, query, parameters, execOptions, requestLength, responseLength, latency) { + private _logSlow(host, query, parameters, execOptions, requestLength, responseLength, latency) { const message = format('[%s] Slow request, took %d ms (%s): %s', host.address, Math.floor(toMillis(latency)), getPayloadSizes(requestLength, responseLength), getStatementInfo(query, parameters, execOptions, this._options)); this.emitter.emit('slow', message); } - _logLargeRequest(host, query, parameters, execOptions, requestLength, responseLength, latency) { + private _logLargeRequest(host, query, parameters, execOptions, requestLength, responseLength, latency) { const message = format('[%s] Request exceeded length, %s (took %d ms): %s', host.address, getPayloadSizes(requestLength, responseLength), ~~toMillis(latency), getStatementInfo(query, parameters, execOptions, this._options)); this.emitter.emit('large', message); } - _logNormalRequest(host, query, parameters, execOptions, requestLength, responseLength, latency) { + private _logNormalRequest(host, query, parameters, execOptions, requestLength, responseLength, latency) { const message = format('[%s] Request completed normally, took %d ms (%s): %s', host.address, ~~toMillis(latency), getPayloadSizes(requestLength, responseLength), getStatementInfo(query, parameters, execOptions, this._options)); this.emitter.emit('normal', message); } - _logLargeErrorRequest(host, query, parameters, execOptions, requestLength, err, latency) { + private _logLargeErrorRequest(host, query, parameters, execOptions, requestLength, err, latency) { const maxStackTraceLength = this._options.messageMaxErrorStackTraceLength || defaultMaxErrorStackTraceLength; const message = format('[%s] Request exceeded length and execution failed, %s (took %d ms): %s; error: %s', host.address, getPayloadSizes(requestLength), ~~toMillis(latency), @@ -147,7 +151,7 @@ class RequestLogger extends RequestTracker { this.emitter.emit('large', message); } - _logErrorRequest(host, query, parameters, execOptions, requestLength, err, latency) { + private _logErrorRequest(host, query, parameters, execOptions, requestLength, err, latency) { const maxStackTraceLength = this._options.messageMaxErrorStackTraceLength || defaultMaxErrorStackTraceLength; const message = format('[%s] Request execution failed, took %d ms (%s): %s; error: %s', host.address, ~~toMillis(latency), getPayloadSizes(requestLength), @@ -279,7 +283,7 @@ function formatParam(value, maxLength) { return value.toString().substr(0, maxLength); } -function getPayloadSizes(requestLength, responseLength) { +function getPayloadSizes(requestLength, responseLength?) { let message = 'request size ' + formatSize(requestLength); if (responseLength !== undefined) { message += ' / response size ' + formatSize(responseLength); @@ -291,4 +295,4 @@ function formatSize(length) { return length > 1000 ? Math.round(length / 1024) + ' KB' : length + ' bytes'; } -module.exports = RequestLogger; \ No newline at end of file +export default RequestLogger; \ No newline at end of file diff --git a/lib/tracker/request-tracker.js b/lib/tracker/request-tracker.ts similarity index 80% rename from lib/tracker/request-tracker.js rename to lib/tracker/request-tracker.ts index 3a00732e1..4ca0a3149 100644 --- a/lib/tracker/request-tracker.js +++ b/lib/tracker/request-tracker.ts @@ -13,8 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import type { ExecutionOptions } from "../execution-options"; +import type { Host } from "../host"; + -'use strict'; /** * Tracks request execution for a {@link Client}. @@ -41,9 +44,14 @@ class RequestTracker { * @param {Array} latency An array containing [seconds, nanoseconds] tuple, where nanoseconds is the * remaining part of the real time that can't be represented in second precision (see process.hrtime()). */ - onSuccess(host, query, parameters, executionOptions, requestLength, responseLength, latency) { - - } + onSuccess?( + host: Host, + query: string | Array<{ query: string, params?: any }>, + parameters: any[] | { [key: string]: any } | null, + executionOptions: ExecutionOptions, + requestLength: number, + responseLength: number, + latency: number[]): void {} /** * Invoked each time a query or batch request fails. @@ -59,16 +67,19 @@ class RequestTracker { * @param {Array} latency An array containing [seconds, nanoseconds] tuple, where nanoseconds is the * remaining part of the real time that can't be represented in second precision (see process.hrtime()). */ - onError(host, query, parameters, executionOptions, requestLength, err, latency) { - - } + onError?( + host: Host, + query: string | Array<{ query: string, params?: any }>, + parameters: any[] | { [key: string]: any } | null, + executionOptions: ExecutionOptions, + requestLength: number, + err: Error, + latency: number[]): void {} /** * Invoked when the Client is being shutdown. */ - shutdown() { - - } + shutdown?(): void {} } -module.exports = RequestTracker; \ No newline at end of file +export default RequestTracker; \ No newline at end of file diff --git a/lib/types/big-decimal.js b/lib/types/big-decimal.js deleted file mode 100644 index 39ec9f7bc..000000000 --- a/lib/types/big-decimal.js +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const Integer = require('./integer'); -const utils = require('../utils'); - -/** @module types */ -/** - * Constructs an immutable arbitrary-precision signed decimal number. - * A BigDecimal consists of an [arbitrary precision integer]{@link module:types~Integer} - * unscaled value and a 32-bit integer scale. If zero - * or positive, the scale is the number of digits to the right of the - * decimal point. If negative, the unscaled value of the number is - * multiplied by ten to the power of the negation of the scale. The - * value of the number represented by the BigDecimal is - * therefore (unscaledValue × 10-scale). - * @class - * @classdesc The BigDecimal class provides operations for - * arithmetic, scale manipulation, rounding, comparison and - * format conversion. The {@link #toString} method provides a - * canonical representation of a BigDecimal. - * @param {Integer|Number} unscaledValue The integer part of the decimal. - * @param {Number} scale The scale of the decimal. - * @constructor - */ -function BigDecimal(unscaledValue, scale) { - if (typeof unscaledValue === 'number') { - unscaledValue = Integer.fromNumber(unscaledValue); - } - /** - * @type {Integer} - * @private - */ - this._intVal = unscaledValue; - /** - * @type {Number} - * @private - */ - this._scale = scale; -} - -/** - * Returns the BigDecimal representation of a buffer composed of the scale (int32BE) and the unsigned value (varint BE) - * @param {Buffer} buf - * @returns {BigDecimal} - */ -BigDecimal.fromBuffer = function (buf) { - const scale = buf.readInt32BE(0); - const unscaledValue = Integer.fromBuffer(buf.slice(4)); - return new BigDecimal(unscaledValue, scale); -}; - -/** - * Returns a buffer representation composed of the scale as a BE int 32 and the unsigned value as a BE varint - * @param {BigDecimal} value - * @returns {Buffer} - */ -BigDecimal.toBuffer = function (value) { - const unscaledValueBuffer = Integer.toBuffer(value._intVal); - const scaleBuffer = utils.allocBufferUnsafe(4); - scaleBuffer.writeInt32BE(value._scale, 0); - return Buffer.concat([scaleBuffer, unscaledValueBuffer], scaleBuffer.length + unscaledValueBuffer.length); -}; - -/** - * Returns a BigDecimal representation of the string - * @param {String} value - * @returns {BigDecimal} - */ -BigDecimal.fromString = function (value) { - if (!value) { - throw new TypeError('Invalid null or undefined value'); - } - value = value.trim(); - const scaleIndex = value.indexOf('.'); - let scale = 0; - if (scaleIndex >= 0) { - scale = value.length - 1 - scaleIndex; - value = value.substr(0, scaleIndex) + value.substr(scaleIndex + 1); - } - return new BigDecimal(Integer.fromString(value), scale); -}; - -/** - * Returns a BigDecimal representation of the Number - * @param {Number} value - * @returns {BigDecimal} - */ -BigDecimal.fromNumber = function (value) { - if (isNaN(value)) { - return new BigDecimal(Integer.ZERO, 0); - } - let textValue = value.toString(); - if (textValue.indexOf('e') >= 0) { - //get until scale 20 - textValue = value.toFixed(20); - } - return BigDecimal.fromString(textValue); -}; - -/** - * Returns true if the value of the BigDecimal instance and other are the same - * @param {BigDecimal} other - * @returns {Boolean} - */ -BigDecimal.prototype.equals = function (other) { - return ((other instanceof BigDecimal) && this.compare(other) === 0); -}; - -BigDecimal.prototype.inspect = function () { - return this.constructor.name + ': ' + this.toString(); -}; - -/** - * @param {BigDecimal} other - * @returns {boolean} - */ -BigDecimal.prototype.notEquals = function (other) { - return !this.equals(other); -}; - -/** - * Compares this BigDecimal with the given one. - * @param {BigDecimal} other Integer to compare against. - * @return {number} 0 if they are the same, 1 if the this is greater, and -1 - * if the given one is greater. - */ -BigDecimal.prototype.compare = function (other) { - const diff = this.subtract(other); - if (diff.isNegative()) { - return -1; - } - if (diff.isZero()) { - return 0; - } - return +1; -}; - -/** - * Returns the difference of this and the given BigDecimal. - * @param {BigDecimal} other The BigDecimal to subtract from this. - * @return {!BigDecimal} The BigDecimal result. - */ -BigDecimal.prototype.subtract = function (other) { - const first = this; - if (first._scale === other._scale) { - return new BigDecimal(first._intVal.subtract(other._intVal), first._scale); - } - let diffScale; - let unscaledValue; - if (first._scale < other._scale) { - //The scale of this is lower - diffScale = other._scale - first._scale; - //multiple this unScaledValue to compare in the same scale - unscaledValue = first._intVal - .multiply(Integer.fromNumber(Math.pow(10, diffScale))) - .subtract(other._intVal); - return new BigDecimal(unscaledValue, other._scale); - } - //The scale of this is higher - diffScale = first._scale - other._scale; - //multiple this unScaledValue to compare in the same scale - unscaledValue = first._intVal - .subtract( - other._intVal.multiply(Integer.fromNumber(Math.pow(10, diffScale)))); - return new BigDecimal(unscaledValue, first._scale); -}; - -/** - * Returns the sum of this and the given BigDecimal. - * @param {BigDecimal} other The BigDecimal to sum to this. - * @return {!BigDecimal} The BigDecimal result. - */ -BigDecimal.prototype.add = function (other) { - const first = this; - if (first._scale === other._scale) { - return new BigDecimal(first._intVal.add(other._intVal), first._scale); - } - let diffScale; - let unscaledValue; - if (first._scale < other._scale) { - //The scale of this is lower - diffScale = other._scale - first._scale; - //multiple this unScaledValue to compare in the same scale - unscaledValue = first._intVal - .multiply(Integer.fromNumber(Math.pow(10, diffScale))) - .add(other._intVal); - return new BigDecimal(unscaledValue, other._scale); - } - //The scale of this is higher - diffScale = first._scale - other._scale; - //multiple this unScaledValue to compare in the same scale - unscaledValue = first._intVal - .add( - other._intVal.multiply(Integer.fromNumber(Math.pow(10, diffScale)))); - return new BigDecimal(unscaledValue, first._scale); -}; - -/** - * Returns true if the current instance is greater than the other - * @param {BigDecimal} other - * @returns {boolean} - */ -BigDecimal.prototype.greaterThan = function (other) { - return this.compare(other) === 1; -}; - -/** @return {boolean} Whether this value is negative. */ -BigDecimal.prototype.isNegative = function () { - return this._intVal.isNegative(); -}; - -/** @return {boolean} Whether this value is zero. */ -BigDecimal.prototype.isZero = function () { - return this._intVal.isZero(); -}; - -/** - * Returns the string representation of this BigDecimal - * @returns {string} - */ -BigDecimal.prototype.toString = function () { - let intString = this._intVal.toString(); - if (this._scale === 0) { - return intString; - } - let signSymbol = ''; - if (intString.charAt(0) === '-') { - signSymbol = '-'; - intString = intString.substr(1); - } - let separatorIndex = intString.length - this._scale; - if (separatorIndex <= 0) { - //add zeros at the beginning, plus an additional zero - intString = utils.stringRepeat('0', (-separatorIndex) + 1) + intString; - separatorIndex = intString.length - this._scale; - } - return signSymbol + intString.substr(0, separatorIndex) + '.' + intString.substr(separatorIndex); -}; - -/** - * Returns a Number representation of this BigDecimal. - * @returns {Number} - */ -BigDecimal.prototype.toNumber = function () { - return parseFloat(this.toString()); -}; - -/** - * Returns the string representation. - * Method used by the native JSON.stringify() to serialize this instance. - */ -BigDecimal.prototype.toJSON = function () { - return this.toString(); -}; - - -module.exports = BigDecimal; \ No newline at end of file diff --git a/lib/types/big-decimal.ts b/lib/types/big-decimal.ts new file mode 100644 index 000000000..1ded9b89f --- /dev/null +++ b/lib/types/big-decimal.ts @@ -0,0 +1,277 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import utils from "../utils"; +import Integer from "./integer"; + +/** @module types */ +/** + * A BigDecimal consists of an [arbitrary precision integer]{@link module:types~Integer} + * unscaled value and a 32-bit integer scale. If zero + * or positive, the scale is the number of digits to the right of the + * decimal point. If negative, the unscaled value of the number is + * multiplied by ten to the power of the negation of the scale. The + * value of the number represented by the BigDecimal is + * therefore (unscaledValue × 10-scale). + * @class + * @classdesc The BigDecimal class provides operations for + * arithmetic, scale manipulation, rounding, comparison and + * format conversion. The {@link #toString} method provides a + * canonical representation of a BigDecimal. + */ +class BigDecimal { + private _intVal: Integer; + private _scale: number; + + /** + * Constructs an immutable arbitrary-precision signed decimal number. + * A BigDecimal consists of an [arbitrary precision integer]{@link module:types~Integer} + * unscaled value and a 32-bit integer scale. If zero + * or positive, the scale is the number of digits to the right of the + * decimal point. If negative, the unscaled value of the number is + * multiplied by ten to the power of the negation of the scale. The + * value of the number represented by the BigDecimal is + * therefore (unscaledValue × 10-scale). + * @param {Integer|Number} unscaledValue The integer part of the decimal. + * @param {Number} scale The scale of the decimal. + * @constructor + */ + constructor(unscaledValue: Integer | number, scale: number) { + if (typeof unscaledValue === 'number') { + unscaledValue = Integer.fromNumber(unscaledValue); + } + this._intVal = unscaledValue; + this._scale = scale; + } + + /** + * Returns the BigDecimal representation of a buffer composed of the scale (int32BE) and the unsigned value (varint BE) + * @param {Buffer} buf + * @returns {BigDecimal} + */ + static fromBuffer(buf: Buffer): BigDecimal { + const scale = buf.readInt32BE(0); + const unscaledValue = Integer.fromBuffer(buf.slice(4)); + return new BigDecimal(unscaledValue, scale); + } + + /** + * Returns a buffer representation composed of the scale as a BE int 32 and the unsigned value as a BE varint + * @param {BigDecimal} value + * @returns {Buffer} + */ + static toBuffer(value: BigDecimal): Buffer { + const unscaledValueBuffer = Integer.toBuffer(value._intVal); + const scaleBuffer = utils.allocBufferUnsafe(4); + scaleBuffer.writeInt32BE(value._scale, 0); + return Buffer.concat([scaleBuffer, unscaledValueBuffer], scaleBuffer.length + unscaledValueBuffer.length); + } + + /** + * Returns a BigDecimal representation of the string + * @param {String} value + * @returns {BigDecimal} + */ + static fromString(value: string): BigDecimal { + if (!value) { + throw new TypeError('Invalid null or undefined value'); + } + value = value.trim(); + const scaleIndex = value.indexOf('.'); + let scale = 0; + if (scaleIndex >= 0) { + scale = value.length - 1 - scaleIndex; + value = value.substr(0, scaleIndex) + value.substr(scaleIndex + 1); + } + return new BigDecimal(Integer.fromString(value), scale); + } + + /** + * Returns a BigDecimal representation of the Number + * @param {Number} value + * @returns {BigDecimal} + */ + static fromNumber(value: number): BigDecimal { + if (isNaN(value)) { + return new BigDecimal(Integer.ZERO, 0); + } + let textValue = value.toString(); + if (textValue.indexOf('e') >= 0) { + //get until scale 20 + textValue = value.toFixed(20); + } + return BigDecimal.fromString(textValue); + } + + /** + * Returns true if the value of the BigDecimal instance and other are the same + * @param {BigDecimal} other + * @returns {Boolean} + */ + equals(other: BigDecimal): boolean { + return ((other instanceof BigDecimal) && this.compare(other) === 0); + } + + /** @internal */ + inspect(): string { + return this.constructor.name + ': ' + this.toString(); + } + + /** + * @param {BigDecimal} other + * @returns {boolean} + */ + notEquals(other: BigDecimal): boolean { + return !this.equals(other); + } + + /** + * Compares this BigDecimal with the given one. + * @param {BigDecimal} other Integer to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: BigDecimal): number { + const diff = this.subtract(other); + if (diff.isNegative()) { + return -1; + } + if (diff.isZero()) { + return 0; + } + return +1; + } + + /** + * Returns the difference of this and the given BigDecimal. + * @param {BigDecimal} other The BigDecimal to subtract from this. + * @return {!BigDecimal} The BigDecimal result. + */ + subtract(other: BigDecimal): BigDecimal { + const first = this; + if (first._scale === other._scale) { + return new BigDecimal(first._intVal.subtract(other._intVal), first._scale); + } + let diffScale; + let unscaledValue; + if (first._scale < other._scale) { + //The scale of this is lower + diffScale = other._scale - first._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .multiply(Integer.fromNumber(Math.pow(10, diffScale))) + .subtract(other._intVal); + return new BigDecimal(unscaledValue, other._scale); + } + //The scale of this is higher + diffScale = first._scale - other._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .subtract( + other._intVal.multiply(Integer.fromNumber(Math.pow(10, diffScale)))); + return new BigDecimal(unscaledValue, first._scale); + } + + /** + * Returns the sum of this and the given BigDecimal. + * @param {BigDecimal} other The BigDecimal to sum to this. + * @return {!BigDecimal} The BigDecimal result. + */ + add(other: BigDecimal): BigDecimal { + const first = this; + if (first._scale === other._scale) { + return new BigDecimal(first._intVal.add(other._intVal), first._scale); + } + let diffScale; + let unscaledValue; + if (first._scale < other._scale) { + //The scale of this is lower + diffScale = other._scale - first._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .multiply(Integer.fromNumber(Math.pow(10, diffScale))) + .add(other._intVal); + return new BigDecimal(unscaledValue, other._scale); + } + //The scale of this is higher + diffScale = first._scale - other._scale; + //multiple this unScaledValue to compare in the same scale + unscaledValue = first._intVal + .add( + other._intVal.multiply(Integer.fromNumber(Math.pow(10, diffScale)))); + return new BigDecimal(unscaledValue, first._scale); + } + + /** + * Returns true if the current instance is greater than the other + * @param {BigDecimal} other + * @returns {boolean} + */ + greaterThan(other: BigDecimal): boolean { + return this.compare(other) === 1; + } + + /** @return {boolean} Whether this value is negative. */ + isNegative(): boolean { + return this._intVal.isNegative(); + } + + /** @return {boolean} Whether this value is zero. */ + isZero(): boolean { + return this._intVal.isZero(); + } + + /** + * Returns the string representation of this BigDecimal + * @returns {string} + */ + toString(): string { + let intString = this._intVal.toString(); + if (this._scale === 0) { + return intString; + } + let signSymbol = ''; + if (intString.charAt(0) === '-') { + signSymbol = '-'; + intString = intString.substr(1); + } + let separatorIndex = intString.length - this._scale; + if (separatorIndex <= 0) { + //add zeros at the beginning, plus an additional zero + intString = utils.stringRepeat('0', (-separatorIndex) + 1) + intString; + separatorIndex = intString.length - this._scale; + } + return signSymbol + intString.substr(0, separatorIndex) + '.' + intString.substr(separatorIndex); + } + + /** + * Returns a Number representation of this BigDecimal. + * @returns {Number} + */ + toNumber(): number { + return parseFloat(this.toString()); + } + + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string { + return this.toString(); + } +} + +export default BigDecimal; \ No newline at end of file diff --git a/lib/types/duration.js b/lib/types/duration.js deleted file mode 100644 index 4dc084f04..000000000 --- a/lib/types/duration.js +++ /dev/null @@ -1,518 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const Long = require('long'); -const util = require('util'); -const utils = require('../utils'); -const VIntCoding = utils.VIntCoding; - -/** @module types */ - -// Reuse the same buffers that should perform slightly better than built-in buffer pool -const reusableBuffers = { - months: utils.allocBuffer(9), - days: utils.allocBuffer(9), - nanoseconds: utils.allocBuffer(9) -}; - -const maxInt32 = 0x7FFFFFFF; -const longOneThousand = Long.fromInt(1000); -const nanosPerMicro = longOneThousand; -const nanosPerMilli = longOneThousand.multiply(nanosPerMicro); -const nanosPerSecond = longOneThousand.multiply(nanosPerMilli); -const nanosPerMinute = Long.fromInt(60).multiply(nanosPerSecond); -const nanosPerHour = Long.fromInt(60).multiply(nanosPerMinute); -const daysPerWeek = 7; -const monthsPerYear = 12; -const standardRegex = /(\d+)(y|mo|w|d|h|s|ms|us|µs|ns|m)/gi; -const iso8601Regex = /P((\d+)Y)?((\d+)M)?((\d+)D)?(T((\d+)H)?((\d+)M)?((\d+)S)?)?/; -const iso8601WeekRegex = /P(\d+)W/; -const iso8601AlternateRegex = /P(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})/; - -/** - * Creates a new instance of {@link Duration}. - * @classdesc - * Represents a duration. A duration stores separately months, days, and seconds due to the fact that the number of - * days in a month varies, and a day can have 23 or 25 hours if a daylight saving is involved. - * @param {Number} months The number of months. - * @param {Number} days The number of days. - * @param {Number|Long} nanoseconds The number of nanoseconds. - * @constructor - */ -function Duration(months, days, nanoseconds) { - /** - * Gets the number of months. - * @type {Number} - */ - this.months = months; - /** - * Gets the number of days. - * @type {Number} - */ - this.days = days; - /** - * Gets the number of nanoseconds represented as a int64. - * @type {Long} - */ - this.nanoseconds = typeof nanoseconds === 'number' ? Long.fromNumber(nanoseconds) : nanoseconds; -} - -Duration.prototype.equals = function (other) { - if (!(other instanceof Duration)) { - return false; - } - return this.months === other.months && - this.days === other.days && - this.nanoseconds.equals(other.nanoseconds); -}; - -/** - * Serializes the duration and returns the representation of the value in bytes. - * @returns {Buffer} - */ -Duration.prototype.toBuffer = function () { - const lengthMonths = VIntCoding.writeVInt(Long.fromNumber(this.months), reusableBuffers.months); - const lengthDays = VIntCoding.writeVInt(Long.fromNumber(this.days), reusableBuffers.days); - const lengthNanoseconds = VIntCoding.writeVInt(this.nanoseconds, reusableBuffers.nanoseconds); - const buffer = utils.allocBufferUnsafe(lengthMonths + lengthDays + lengthNanoseconds); - reusableBuffers.months.copy(buffer, 0, 0, lengthMonths); - let offset = lengthMonths; - reusableBuffers.days.copy(buffer, offset, 0, lengthDays); - offset += lengthDays; - reusableBuffers.nanoseconds.copy(buffer, offset, 0, lengthNanoseconds); - return buffer; -}; - -/** - * Returns the string representation of the value. - * @return {string} - */ -Duration.prototype.toString = function () { - let value = ''; - function append(dividend, divisor, unit) { - if (dividend === 0 || dividend < divisor) { - return dividend; - } - // string concatenation is supposed to be fasted than join() - value += (dividend / divisor).toFixed(0) + unit; - return dividend % divisor; - } - function append64(dividend, divisor, unit) { - if (dividend.equals(Long.ZERO) || dividend.lessThan(divisor)) { - return dividend; - } - // string concatenation is supposed to be fasted than join() - value += dividend.divide(divisor).toString() + unit; - return dividend.modulo(divisor); - } - if (this.months < 0 || this.days < 0 || this.nanoseconds.isNegative()) { - value = '-'; - } - let remainder = append(Math.abs(this.months), monthsPerYear, "y"); - append(remainder, 1, "mo"); - append(Math.abs(this.days), 1, "d"); - - if (!this.nanoseconds.equals(Long.ZERO)) { - const nanos = this.nanoseconds.isNegative() ? this.nanoseconds.negate() : this.nanoseconds; - remainder = append64(nanos, nanosPerHour, "h"); - remainder = append64(remainder, nanosPerMinute, "m"); - remainder = append64(remainder, nanosPerSecond, "s"); - remainder = append64(remainder, nanosPerMilli, "ms"); - remainder = append64(remainder, nanosPerMicro, "us"); - append64(remainder, Long.ONE, "ns"); - } - return value; -}; - -/** - * Creates a new {@link Duration} instance from the binary representation of the value. - * @param {Buffer} buffer - * @returns {Duration} - */ -Duration.fromBuffer = function (buffer) { - const offset = { value: 0 }; - const months = VIntCoding.readVInt(buffer, offset).toNumber(); - const days = VIntCoding.readVInt(buffer, offset).toNumber(); - const nanoseconds = VIntCoding.readVInt(buffer, offset); - return new Duration(months, days, nanoseconds); -}; - -/** - * Creates a new {@link Duration} instance from the string representation of the value. - *

- * Accepted formats: - *

- *
    - *
  • multiple digits followed by a time unit like: 12h30m where the time unit can be: - *
      - *
    • {@code y}: years
    • - *
    • {@code m}: months
    • - *
    • {@code w}: weeks
    • - *
    • {@code d}: days
    • - *
    • {@code h}: hours
    • - *
    • {@code m}: minutes
    • - *
    • {@code s}: seconds
    • - *
    • {@code ms}: milliseconds
    • - *
    • {@code us} or {@code µs}: microseconds
    • - *
    • {@code ns}: nanoseconds
    • - *
    - *
  • - *
  • ISO 8601 format: P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W
  • - *
  • ISO 8601 alternative format: P[YYYY]-[MM]-[DD]T[hh]:[mm]:[ss]
  • - *
- * @param {String} input - * @returns {Duration} - */ -Duration.fromString = function (input) { - const isNegative = input.charAt(0) === '-'; - const source = isNegative ? input.substr(1) : input; - if (source.charAt(0) === 'P') { - if (source.charAt(source.length - 1) === 'W') { - return parseIso8601WeekFormat(isNegative, source); - } - if (source.indexOf('-') > 0) { - return parseIso8601AlternativeFormat(isNegative, source); - } - return parseIso8601Format(isNegative, source); - } - return parseStandardFormat(isNegative, source); -}; - -/** - * @param {Boolean} isNegative - * @param {String} source - * @returns {Duration} - * @private - */ -function parseStandardFormat(isNegative, source) { - const builder = new Builder(isNegative); - standardRegex.lastIndex = 0; - let matches; - while ((matches = standardRegex.exec(source)) && matches.length <= 3) { - builder.add(matches[1], matches[2]); - } - return builder.build(); -} - -/** - * @param {Boolean} isNegative - * @param {String} source - * @returns {Duration} - * @private - */ -function parseIso8601Format(isNegative, source) { - const matches = iso8601Regex.exec(source); - if (!matches || matches[0] !== source) { - throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); - } - const builder = new Builder(isNegative); - if (matches[1]) { - builder.addYears(matches[2]); - } - if (matches[3]) { - builder.addMonths(matches[4]); - } - if (matches[5]) { - builder.addDays(matches[6]); - } - if (matches[7]) { - if (matches[8]) { - builder.addHours(matches[9]); - } - if (matches[10]) { - builder.addMinutes(matches[11]); - } - if (matches[12]) { - builder.addSeconds(matches[13]); - } - } - return builder.build(); -} - -/** - * @param {Boolean} isNegative - * @param {String} source - * @returns {Duration} - * @private - */ -function parseIso8601WeekFormat(isNegative, source) { - const matches = iso8601WeekRegex.exec(source); - if (!matches || matches[0] !== source) { - throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); - } - return new Builder(isNegative) - .addWeeks(matches[1]) - .build(); -} - -/** - * @param {Boolean} isNegative - * @param {String} source - * @returns {Duration} - * @private - */ -function parseIso8601AlternativeFormat(isNegative, source) { - const matches = iso8601AlternateRegex.exec(source); - if (!matches || matches[0] !== source) { - throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); - } - return new Builder(isNegative).addYears(matches[1]) - .addMonths(matches[2]) - .addDays(matches[3]) - .addHours(matches[4]) - .addMinutes(matches[5]) - .addSeconds(matches[6]) - .build(); -} - -/** - * @param {Boolean} isNegative - * @private - * @constructor - */ -function Builder(isNegative) { - this._isNegative = isNegative; - this._unitIndex = 0; - this._months = 0; - this._days = 0; - this._nanoseconds = Long.ZERO; - this._addMethods = { - 'y': this.addYears, - 'mo': this.addMonths, - 'w': this.addWeeks, - 'd': this.addDays, - 'h': this.addHours, - 'm': this.addMinutes, - 's': this.addSeconds, - 'ms': this.addMillis, - // µs - '\u00B5s': this.addMicros, - 'us': this.addMicros, - 'ns': this.addNanos - }; - this._unitByIndex = [ - null, 'years', 'months', 'weeks', 'days', 'hours', 'minutes', 'seconds', 'milliseconds', 'microseconds', - 'nanoseconds' - ]; -} - -Builder.prototype._validateOrder = function (unitIndex) { - if (unitIndex === this._unitIndex) { - throw new TypeError(util.format("Invalid duration. The %s are specified multiple times", this._getUnitName(unitIndex))); - } - - if (unitIndex <= this._unitIndex) { - throw new TypeError(util.format("Invalid duration. The %s should be after %s", - this._getUnitName(this._unitIndex), - this._getUnitName(unitIndex))); - } - this._unitIndex = unitIndex; -}; - -/** - * @param {Number} units - * @param {Number} monthsPerUnit - */ -Builder.prototype._validateMonths = function(units, monthsPerUnit) { - this._validate32(units, (maxInt32 - this._months) / monthsPerUnit, "months"); -}; - -/** - * @param {Number} units - * @param {Number} daysPerUnit - */ -Builder.prototype._validateDays = function(units, daysPerUnit) { - this._validate32(units, (maxInt32 - this._days) / daysPerUnit, "days"); -}; - -/** - * @param {Long} units - * @param {Long} nanosPerUnit - */ -Builder.prototype._validateNanos = function(units, nanosPerUnit) { - this._validate64(units, Long.MAX_VALUE.subtract(this._nanoseconds).divide(nanosPerUnit), "nanoseconds"); -}; - -/** - * @param {Number} units - * @param {Number} limit - * @param {String} unitName - */ -Builder.prototype._validate32 = function(units, limit, unitName) { - if (units > limit) { - throw new TypeError(util.format('Invalid duration. The total number of %s must be less or equal to %s', - unitName, - maxInt32)); - } -}; - -/** - * @param {Long} units - * @param {Long} limit - * @param {String} unitName - */ -Builder.prototype._validate64 = function(units, limit, unitName) { - if (units.greaterThan(limit)) { - throw new TypeError(util.format('Invalid duration. The total number of %s must be less or equal to %s', - unitName, - Long.MAX_VALUE.toString())); - } -}; - -Builder.prototype._getUnitName = function(unitIndex) { - const name = this._unitByIndex[+unitIndex]; - if (!name) { - throw new Error('unknown unit index: ' + unitIndex); - } - return name; -}; - -Builder.prototype.add = function (textValue, symbol) { - const addMethod = this._addMethods[symbol.toLowerCase()]; - if (!addMethod) { - throw new TypeError(util.format("Unknown duration symbol '%s'", symbol)); - } - return addMethod.call(this, textValue); -}; - -/** - * @param {String|Number} years - * @return {Builder} - */ -Builder.prototype.addYears = function (years) { - const value = +years; - this._validateOrder(1); - this._validateMonths(value, monthsPerYear); - this._months += value * monthsPerYear; - return this; -}; - -/** - * @param {String|Number} months - * @return {Builder} - */ -Builder.prototype.addMonths = function(months) { - const value = +months; - this._validateOrder(2); - this._validateMonths(value, 1); - this._months += value; - return this; -}; - -/** - * @param {String|Number} weeks - * @return {Builder} - */ -Builder.prototype.addWeeks = function(weeks) { - const value = +weeks; - this._validateOrder(3); - this._validateDays(value, daysPerWeek); - this._days += value * daysPerWeek; - return this; -}; - -/** - * @param {String|Number} days - * @return {Builder} - */ -Builder.prototype.addDays = function(days) { - const value = +days; - this._validateOrder(4); - this._validateDays(value, 1); - this._days += value; - return this; -}; - -/** - * @param {String|Long} hours - * @return {Builder} - */ -Builder.prototype.addHours = function(hours) { - const value = typeof hours === 'string' ? Long.fromString(hours) : hours; - this._validateOrder(5); - this._validateNanos(value, nanosPerHour); - this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerHour)); - return this; -}; - -/** - * @param {String|Long} minutes - * @return {Builder} - */ -Builder.prototype.addMinutes = function(minutes) { - const value = typeof minutes === 'string' ? Long.fromString(minutes) : minutes; - this._validateOrder(6); - this._validateNanos(value, nanosPerMinute); - this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMinute)); - return this; -}; - -/** - * @param {String|Long} seconds - * @return {Builder} - */ -Builder.prototype.addSeconds = function(seconds) { - const value = typeof seconds === 'string' ? Long.fromString(seconds) : seconds; - this._validateOrder(7); - this._validateNanos(value, nanosPerSecond); - this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerSecond)); - return this; -}; - -/** - * @param {String|Long} millis - * @return {Builder} - */ -Builder.prototype.addMillis = function(millis) { - const value = typeof millis === 'string' ? Long.fromString(millis) : millis; - this._validateOrder(8); - this._validateNanos(value, nanosPerMilli); - this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMilli)); - return this; -}; - -/** - * @param {String|Long} micros - * @return {Builder} - */ -Builder.prototype.addMicros = function(micros) { - const value = typeof micros === 'string' ? Long.fromString(micros) : micros; - this._validateOrder(9); - this._validateNanos(value, nanosPerMicro); - this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMicro)); - return this; -}; - -/** - * @param {String|Long} nanos - * @return {Builder} - */ -Builder.prototype.addNanos = function(nanos) { - const value = typeof nanos === 'string' ? Long.fromString(nanos) : nanos; - this._validateOrder(10); - this._validateNanos(value, Long.ONE); - this._nanoseconds = this._nanoseconds.add(value); - return this; -}; - -/** @return {Duration} */ -Builder.prototype.build = function () { - return (this._isNegative ? - new Duration(-this._months, -this._days, this._nanoseconds.negate()) : - new Duration(this._months, this._days, this._nanoseconds)); -}; - -module.exports = Duration; diff --git a/lib/types/duration.ts b/lib/types/duration.ts new file mode 100644 index 000000000..2065887a7 --- /dev/null +++ b/lib/types/duration.ts @@ -0,0 +1,528 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import Long from "long"; +import util from "util"; +import utils from "../utils"; + +const VIntCoding = utils.VIntCoding; + + +/** @module types */ + +// Reuse the same buffers that should perform slightly better than built-in buffer pool +const reusableBuffers = { + months: utils.allocBuffer(9), + days: utils.allocBuffer(9), + nanoseconds: utils.allocBuffer(9) +}; + +const maxInt32 = 0x7FFFFFFF; +const longOneThousand = Long.fromInt(1000); +const nanosPerMicro = longOneThousand; +const nanosPerMilli = longOneThousand.multiply(nanosPerMicro); +const nanosPerSecond = longOneThousand.multiply(nanosPerMilli); +const nanosPerMinute = Long.fromInt(60).multiply(nanosPerSecond); +const nanosPerHour = Long.fromInt(60).multiply(nanosPerMinute); +const daysPerWeek = 7; +const monthsPerYear = 12; +const standardRegex = /(\d+)(y|mo|w|d|h|s|ms|us|µs|ns|m)/gi; +const iso8601Regex = /P((\d+)Y)?((\d+)M)?((\d+)D)?(T((\d+)H)?((\d+)M)?((\d+)S)?)?/; +const iso8601WeekRegex = /P(\d+)W/; +const iso8601AlternateRegex = /P(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})/; + +/** + * Creates a new instance of {@link Duration}. + * @classdesc + * Represents a duration. A duration stores separately months, days, and seconds due to the fact that the number of + * days in a month varies, and a day can have 23 or 25 hours if a daylight saving is involved. + * @param {Number} months The number of months. + * @param {Number} days The number of days. + * @param {Number|Long} nanoseconds The number of nanoseconds. + * @constructor + */ +class Duration { + private months: number; + private days: number; + private nanoseconds: Long; + + constructor(months: number, days: number, nanoseconds: number | Long) { + this.months = months; + this.days = days; + this.nanoseconds = typeof nanoseconds === 'number' ? Long.fromNumber(nanoseconds) : nanoseconds; + } + + /** + * Returns true if the value of the Duration instance and other are the same + * @param {Duration} other + * @returns {Boolean} + */ + equals(other: Duration): boolean { + if (!(other instanceof Duration)) { + return false; + } + return this.months === other.months && + this.days === other.days && + this.nanoseconds.equals(other.nanoseconds); + } + + /** + * Serializes the duration and returns the representation of the value in bytes. + * @returns {Buffer} + */ + toBuffer(): Buffer { + const lengthMonths = VIntCoding.writeVInt(Long.fromNumber(this.months), reusableBuffers.months); + const lengthDays = VIntCoding.writeVInt(Long.fromNumber(this.days), reusableBuffers.days); + const lengthNanoseconds = VIntCoding.writeVInt(this.nanoseconds, reusableBuffers.nanoseconds); + const buffer = utils.allocBufferUnsafe(lengthMonths + lengthDays + lengthNanoseconds); + reusableBuffers.months.copy(buffer, 0, 0, lengthMonths); + let offset = lengthMonths; + reusableBuffers.days.copy(buffer, offset, 0, lengthDays); + offset += lengthDays; + reusableBuffers.nanoseconds.copy(buffer, offset, 0, lengthNanoseconds); + return buffer; + } + + /** + * Returns the string representation of the value. + * @return {string} + */ + toString(): string { + let value = ''; + function append(dividend, divisor, unit) { + if (dividend === 0 || dividend < divisor) { + return dividend; + } + // string concatenation is supposed to be fasted than join() + value += (dividend / divisor).toFixed(0) + unit; + return dividend % divisor; + } + function append64(dividend, divisor, unit) { + if (dividend.equals(Long.ZERO) || dividend.lessThan(divisor)) { + return dividend; + } + // string concatenation is supposed to be fasted than join() + value += dividend.divide(divisor).toString() + unit; + return dividend.modulo(divisor); + } + if (this.months < 0 || this.days < 0 || this.nanoseconds.isNegative()) { + value = '-'; + } + let remainder = append(Math.abs(this.months), monthsPerYear, "y"); + append(remainder, 1, "mo"); + append(Math.abs(this.days), 1, "d"); + + if (!this.nanoseconds.equals(Long.ZERO)) { + const nanos = this.nanoseconds.isNegative() ? this.nanoseconds.negate() : this.nanoseconds; + remainder = append64(nanos, nanosPerHour, "h"); + remainder = append64(remainder, nanosPerMinute, "m"); + remainder = append64(remainder, nanosPerSecond, "s"); + remainder = append64(remainder, nanosPerMilli, "ms"); + remainder = append64(remainder, nanosPerMicro, "us"); + append64(remainder, Long.ONE, "ns"); + } + return value; + } + + /** + * Creates a new {@link Duration} instance from the binary representation of the value. + * @param {Buffer} buffer + * @returns {Duration} + */ + static fromBuffer(buffer: Buffer): Duration { + const offset = { value: 0 }; + const months = VIntCoding.readVInt(buffer, offset).toNumber(); + const days = VIntCoding.readVInt(buffer, offset).toNumber(); + const nanoseconds = VIntCoding.readVInt(buffer, offset); + return new Duration(months, days, nanoseconds); + } + + /** + * Creates a new {@link Duration} instance from the string representation of the value. + *

+ * Accepted formats: + *

+ *
    + *
  • multiple digits followed by a time unit like: 12h30m where the time unit can be: + *
      + *
    • {@code y}: years
    • + *
    • {@code m}: months
    • + *
    • {@code w}: weeks
    • + *
    • {@code d}: days
    • + *
    • {@code h}: hours
    • + *
    • {@code m}: minutes
    • + *
    • {@code s}: seconds
    • + *
    • {@code ms}: milliseconds
    • + *
    • {@code us} or {@code µs}: microseconds
    • + *
    • {@code ns}: nanoseconds
    • + *
    + *
  • + *
  • ISO 8601 format: P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W
  • + *
  • ISO 8601 alternative format: P[YYYY]-[MM]-[DD]T[hh]:[mm]:[ss]
  • + *
+ * @param {String} input + * @returns {Duration} + */ + static fromString(input: string): Duration { + const isNegative = input.charAt(0) === '-'; + const source = isNegative ? input.substr(1) : input; + if (source.charAt(0) === 'P') { + if (source.charAt(source.length - 1) === 'W') { + return Duration.parseIso8601WeekFormat(isNegative, source); + } + if (source.indexOf('-') > 0) { + return Duration.parseIso8601AlternativeFormat(isNegative, source); + } + return Duration.parseIso8601Format(isNegative, source); + } + return Duration.parseStandardFormat(isNegative, source); + } + + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseStandardFormat(isNegative: boolean, source: string): Duration { + const builder = new Builder(isNegative); + standardRegex.lastIndex = 0; + let matches; + while ((matches = standardRegex.exec(source)) && matches.length <= 3) { + builder.add(matches[1], matches[2]); + } + return builder.build(); + } + + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseIso8601Format(isNegative: boolean, source: string): Duration { + const matches = iso8601Regex.exec(source); + if (!matches || matches[0] !== source) { + throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); + } + const builder = new Builder(isNegative); + if (matches[1]) { + builder.addYears(matches[2]); + } + if (matches[3]) { + builder.addMonths(matches[4]); + } + if (matches[5]) { + builder.addDays(matches[6]); + } + if (matches[7]) { + if (matches[8]) { + builder.addHours(matches[9]); + } + if (matches[10]) { + builder.addMinutes(matches[11]); + } + if (matches[12]) { + builder.addSeconds(matches[13]); + } + } + return builder.build(); + } + + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseIso8601WeekFormat(isNegative: boolean, source: string): Duration { + const matches = iso8601WeekRegex.exec(source); + if (!matches || matches[0] !== source) { + throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); + } + return new Builder(isNegative) + .addWeeks(matches[1]) + .build(); + } + + /** + * @param {Boolean} isNegative + * @param {String} source + * @returns {Duration} + * @private + */ + private static parseIso8601AlternativeFormat(isNegative: boolean, source: string): Duration { + const matches = iso8601AlternateRegex.exec(source); + if (!matches || matches[0] !== source) { + throw new TypeError(util.format("Unable to convert '%s' to a duration", source)); + } + return new Builder(isNegative).addYears(matches[1]) + .addMonths(matches[2]) + .addDays(matches[3]) + .addHours(matches[4]) + .addMinutes(matches[5]) + .addSeconds(matches[6]) + .build(); + } +} + +/** + * @param {Boolean} isNegative + * @private + * @constructor + */ +class Builder { + private _isNegative: boolean; + private _unitIndex: number; + private _months: number; + private _days: number; + private _nanoseconds: Long; + private _addMethods: { [key: string]: (value: string | number | Long) => Builder }; + private _unitByIndex: (string | null)[]; + + constructor(isNegative: boolean) { + this._isNegative = isNegative; + this._unitIndex = 0; + this._months = 0; + this._days = 0; + this._nanoseconds = Long.ZERO; + this._addMethods = { + 'y': this.addYears, + 'mo': this.addMonths, + 'w': this.addWeeks, + 'd': this.addDays, + 'h': this.addHours, + 'm': this.addMinutes, + 's': this.addSeconds, + 'ms': this.addMillis, + // µs + '\u00B5s': this.addMicros, + 'us': this.addMicros, + 'ns': this.addNanos + }; + this._unitByIndex = [ + null, 'years', 'months', 'weeks', 'days', 'hours', 'minutes', 'seconds', 'milliseconds', 'microseconds', + 'nanoseconds' + ]; + } + + private _validateOrder(unitIndex: number) { + if (unitIndex === this._unitIndex) { + throw new TypeError(util.format("Invalid duration. The %s are specified multiple times", this._getUnitName(unitIndex))); + } + + if (unitIndex <= this._unitIndex) { + throw new TypeError(util.format("Invalid duration. The %s should be after %s", + this._getUnitName(this._unitIndex), + this._getUnitName(unitIndex))); + } + this._unitIndex = unitIndex; + } + + /** + * @param {Number} units + * @param {Number} monthsPerUnit + */ + private _validateMonths(units: number, monthsPerUnit: number) { + this._validate32(units, (maxInt32 - this._months) / monthsPerUnit, "months"); + } + + /** + * @param {Number} units + * @param {Number} daysPerUnit + */ + private _validateDays(units: number, daysPerUnit: number) { + this._validate32(units, (maxInt32 - this._days) / daysPerUnit, "days"); + } + + /** + * @param {Long} units + * @param {Long} nanosPerUnit + */ + private _validateNanos(units: Long, nanosPerUnit: Long) { + this._validate64(units, Long.MAX_VALUE.subtract(this._nanoseconds).divide(nanosPerUnit), "nanoseconds"); + } + + /** + * @param {Number} units + * @param {Number} limit + * @param {String} unitName + */ + private _validate32(units: number, limit: number, unitName: string) { + if (units > limit) { + throw new TypeError(util.format('Invalid duration. The total number of %s must be less or equal to %s', + unitName, + maxInt32)); + } + } + + /** + * @param {Long} units + * @param {Long} limit + * @param {String} unitName + */ + private _validate64(units: Long, limit: Long, unitName: string) { + if (units.greaterThan(limit)) { + throw new TypeError(util.format('Invalid duration. The total number of %s must be less or equal to %s', + unitName, + Long.MAX_VALUE.toString())); + } + } + + private _getUnitName(unitIndex: number) { + const name = this._unitByIndex[+unitIndex]; + if (!name) { + throw new Error('unknown unit index: ' + unitIndex); + } + return name; + } + + add(textValue: string | number, symbol: string): Builder { + const addMethod = this._addMethods[symbol.toLowerCase()]; + if (!addMethod) { + throw new TypeError(util.format("Unknown duration symbol '%s'", symbol)); + } + return addMethod.call(this, textValue); + } + + /** + * @param {String|Number} years + * @return {Builder} + */ + addYears(years: string | number): Builder { + const value = +years; + this._validateOrder(1); + this._validateMonths(value, monthsPerYear); + this._months += value * monthsPerYear; + return this; + } + + /** + * @param {String|Number} months + * @return {Builder} + */ + addMonths(months: string | number): Builder { + const value = +months; + this._validateOrder(2); + this._validateMonths(value, 1); + this._months += value; + return this; + } + + /** + * @param {String|Number} weeks + * @return {Builder} + */ + addWeeks(weeks: string | number): Builder { + const value = +weeks; + this._validateOrder(3); + this._validateDays(value, daysPerWeek); + this._days += value * daysPerWeek; + return this; + } + + /** + * @param {String|Number} days + * @return {Builder} + */ + addDays(days: string | number): Builder { + const value = +days; + this._validateOrder(4); + this._validateDays(value, 1); + this._days += value; + return this; + } + + /** + * @param {String|Long} hours + * @return {Builder} + */ + addHours(hours: string | Long): Builder { + const value = typeof hours === 'string' ? Long.fromString(hours) : hours; + this._validateOrder(5); + this._validateNanos(value, nanosPerHour); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerHour)); + return this; + } + + /** + * @param {String|Long} minutes + * @return {Builder} + */ + addMinutes(minutes: string | Long): Builder { + const value = typeof minutes === 'string' ? Long.fromString(minutes) : minutes; + this._validateOrder(6); + this._validateNanos(value, nanosPerMinute); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMinute)); + return this; + } + + /** + * @param {String|Long} seconds + * @return {Builder} + */ + addSeconds(seconds: string | Long): Builder { + const value = typeof seconds === 'string' ? Long.fromString(seconds) : seconds; + this._validateOrder(7); + this._validateNanos(value, nanosPerSecond); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerSecond)); + return this; + } + + /** + * @param {String|Long} millis + * @return {Builder} + */ + addMillis(millis: string | Long): Builder { + const value = typeof millis === 'string' ? Long.fromString(millis) : millis; + this._validateOrder(8); + this._validateNanos(value, nanosPerMilli); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMilli)); + return this; + } + + /** + * @param {String|Long} micros + * @return {Builder} + */ + addMicros(micros: string | Long): Builder { + const value = typeof micros === 'string' ? Long.fromString(micros) : micros; + this._validateOrder(9); + this._validateNanos(value, nanosPerMicro); + this._nanoseconds = this._nanoseconds.add(value.multiply(nanosPerMicro)); + return this; + } + + /** + * @param {String|Long} nanos + * @return {Builder} + */ + addNanos(nanos: string | Long): Builder { + const value = typeof nanos === 'string' ? Long.fromString(nanos) : nanos; + this._validateOrder(10); + this._validateNanos(value, Long.ONE); + this._nanoseconds = this._nanoseconds.add(value); + return this; + } + + /** @return {Duration} */ + build(): Duration { + return (this._isNegative ? + new Duration(-this._months, -this._days, this._nanoseconds.negate()) : + new Duration(this._months, this._days, this._nanoseconds)); + } +} + +export default Duration; \ No newline at end of file diff --git a/lib/types/index.d.ts b/lib/types/index.d.ts deleted file mode 100644 index 3826ed807..000000000 --- a/lib/types/index.d.ts +++ /dev/null @@ -1,468 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import _Long = require('long'); -import * as stream from 'stream'; -import { ValueCallback } from '../../'; - -export namespace types { - class Long extends _Long { - - } - - enum consistencies { - any = 0x00, - one = 0x01, - two = 0x02, - three = 0x03, - quorum = 0x04, - all = 0x05, - localQuorum = 0x06, - eachQuorum = 0x07, - serial = 0x08, - localSerial = 0x09, - localOne = 0x0a - } - - enum dataTypes { - custom = 0x0000, - ascii = 0x0001, - bigint = 0x0002, - blob = 0x0003, - boolean = 0x0004, - counter = 0x0005, - decimal = 0x0006, - double = 0x0007, - float = 0x0008, - int = 0x0009, - text = 0x000a, - timestamp = 0x000b, - uuid = 0x000c, - varchar = 0x000d, - varint = 0x000e, - timeuuid = 0x000f, - inet = 0x0010, - date = 0x0011, - time = 0x0012, - smallint = 0x0013, - tinyint = 0x0014, - duration = 0x0015, - list = 0x0020, - map = 0x0021, - set = 0x0022, - udt = 0x0030, - tuple = 0x0031, - } - - enum distance { - local = 0, - remote, - ignored - } - - enum responseErrorCodes { - serverError = 0x0000, - protocolError = 0x000A, - badCredentials = 0x0100, - unavailableException = 0x1000, - overloaded = 0x1001, - isBootstrapping = 0x1002, - truncateError = 0x1003, - writeTimeout = 0x1100, - readTimeout = 0x1200, - readFailure = 0x1300, - functionFailure = 0x1400, - writeFailure = 0x1500, - syntaxError = 0x2000, - unauthorized = 0x2100, - invalid = 0x2200, - configError = 0x2300, - alreadyExists = 0x2400, - unprepared = 0x2500, - clientWriteFailure = 0x8000 - } - - enum protocolVersion { - v1 = 0x01, - v2 = 0x02, - v3 = 0x03, - v4 = 0x04, - v5 = 0x05, - v6 = 0x06, - dseV1 = 0x41, - dseV2 = 0x42, - maxSupported = dseV2, - minSupported = v1 - } - - namespace protocolVersion { - function isSupported(version: protocolVersion): boolean; - } - - const unset: object; - - class BigDecimal { - constructor(unscaledValue: number, scale: number); - - static fromBuffer(buf: Buffer): BigDecimal; - - static fromString(value: string): BigDecimal; - - static toBuffer(value: BigDecimal): Buffer; - - static fromNumber(value: number): BigDecimal; - - add(other: BigDecimal): BigDecimal; - - compare(other: BigDecimal): number; - - equals(other: BigDecimal): boolean; - - greaterThan(other: BigDecimal): boolean; - - isNegative(): boolean; - - isZero(): boolean; - - notEquals(other: BigDecimal): boolean; - - subtract(other: BigDecimal): BigDecimal; - - toNumber(): number; - - toString(): string; - - toJSON(): string; - } - - class Duration { - constructor(month: number, days: number, nanoseconds: number | Long); - - static fromBuffer(buffer: Buffer): Duration; - - static fromString(input: string): Duration; - - equals(other: Duration): boolean; - - toBuffer(): Buffer; - - toString(): string; - } - - class InetAddress { - length: number; - - version: number; - - constructor(buffer: Buffer); - - static fromString(value: string): InetAddress; - - equals(other: InetAddress): boolean; - - getBuffer(): Buffer; - - toString(): string; - - toJSON(): string; - } - - class Integer { - static ONE: Integer; - static ZERO: Integer; - - constructor(bits: Array, sign: number); - - static fromBits(bits: Array): Integer; - - static fromBuffer(bits: Buffer): Integer; - - static fromInt(value: number): Integer; - - static fromNumber(value: number): Integer; - - static fromString(str: string, opt_radix?: number): Integer; - - static toBuffer(value: Integer): Buffer; - - abs(): Integer; - - add(other: Integer): Integer; - - compare(other: Integer): number; - - divide(other: Integer): Integer; - - equals(other: Integer): boolean; - - getBits(index: number): number; - - getBitsUnsigned(index: number): number; - - getSign(): number; - - greaterThan(other: Integer): boolean; - - greaterThanOrEqual(other: Integer): boolean; - - isNegative(): boolean; - - isOdd(): boolean; - - isZero(): boolean; - - lessThan(other: Integer): boolean; - - lessThanOrEqual(other: Integer): boolean; - - modulo(other: Integer): Integer; - - multiply(other: Integer): Integer; - - negate(): Integer; - - not(): Integer; - - notEquals(other: Integer): boolean; - - or(other: Integer): Integer; - - shiftLeft(numBits: number): Integer; - - shiftRight(numBits: number): Integer; - - shorten(numBits: number): Integer; - - subtract(other: Integer): Integer; - - toInt(): number; - - toJSON(): string; - - toNumber(): number; - - toString(opt_radix?: number): string; - - xor(other: Integer): Integer; - } - - class LocalDate { - year: number; - month: number; - day: number; - - constructor(year: number, month: number, day: number); - - static fromDate(date: Date): LocalDate; - - static fromString(value: string): LocalDate; - - static fromBuffer(buffer: Buffer): LocalDate; - - static now(): LocalDate; - - static utcNow(): LocalDate; - - equals(other: LocalDate): boolean; - - inspect(): string; - - toBuffer(): Buffer; - - toJSON(): string; - - toString(): string; - } - - class LocalTime { - hour: number; - minute: number; - nanosecond: number; - second: number; - - constructor(totalNanoseconds: Long); - - static fromBuffer(value: Buffer): LocalTime; - - static fromDate(date: Date, nanoseconds: number): LocalTime; - - static fromMilliseconds(milliseconds: number, nanoseconds?: number): LocalTime; - - static fromString(value: string): LocalTime; - - static now(nanoseconds?: number): LocalTime; - - compare(other: LocalTime): boolean; - - equals(other: LocalTime): boolean; - - getTotalNanoseconds(): Long; - - inspect(): string; - - toBuffer(): Buffer; - - toJSON(): string; - - toString(): string; - } - - interface ResultSet extends Iterable, AsyncIterable { - info: { - queriedHost: string, - triedHosts: { [key: string]: any; }, - speculativeExecutions: number, - achievedConsistency: consistencies, - traceId: Uuid, - warnings: string[], - customPayload: any - }; - - columns: Array<{ name: string, type: { code: dataTypes, info: any } }>; - nextPage: (() => void) | null; - pageState: string; - rowLength: number; - rows: Row[]; - - first(): Row; - - wasApplied(): boolean; - } - - interface ResultStream extends stream.Readable { - buffer: Buffer; - paused: boolean; - - add(chunk: Buffer): void; - } - - interface Row { - get(columnName: string | number): any; - - keys(): string[]; - - forEach(callback: (row: Row) => void): void; - - values(): any[]; - - [key: string]: any; - } - - class TimeUuid extends Uuid { - static now(): TimeUuid; - - static now(nodeId: string | Buffer, clockId?: string | Buffer): TimeUuid; - - static now(nodeId: string | Buffer, clockId: string | Buffer, callback: ValueCallback): void; - - static now(callback: ValueCallback): void; - - static fromDate(date: Date, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer): TimeUuid; - - static fromDate( - date: Date, - ticks: number, - nodeId: string | Buffer, - clockId: string | Buffer, - callback: ValueCallback): void; - - static fromString(value: string): TimeUuid; - - static max(date: Date, ticks: number): TimeUuid; - - static min(date: Date, ticks: number): TimeUuid; - - getDatePrecision(): { date: Date, ticks: number }; - - getDate(): Date; - } - - class Tuple { - elements: any[]; - length: number; - - constructor(...args: any[]); - - static fromArray(elements: any[]): Tuple; - - get(index: number): any; - - toString(): string; - - toJSON(): string; - - values(): any[]; - } - - class Uuid { - constructor(buffer: Buffer); - - static fromString(value: string): Uuid; - - static random(callback: ValueCallback): void; - - static random(): Uuid; - - equals(other: Uuid): boolean; - - getBuffer(): Buffer; - - toString(): string; - - toJSON(): string; - } - - class Vector { - static get [Symbol.species](): typeof Vector; - /** - * - * @param {Float32Array | Array} elements - * @param {string?} subtype - */ - constructor(elements: Float32Array | Array, subtype?: string | null); - elements: any[]; - /** - * Returns the number of the elements. - * @type Number - */ - length: number; - subtype: string; - /** - * Returns the string representation of the vector. - * @returns {string} - */ - toString(): string; - /** - * - * @param {number} index - */ - at(index: number): any; - /** - * - * @param {(value: any, index: number, array: any[]) => void} callback - */ - forEach(callback: (value: any, index: number, array: any[]) => void): void; - /** - * @returns {string | null} get the subtype string, e.g., "float", but it's optional so it can return null - */ - getSubtype(): string | null; - /** - * - * @returns {IterableIterator} - */ - [Symbol.iterator](): IterableIterator; - } -} \ No newline at end of file diff --git a/lib/types/index.js b/lib/types/index.ts similarity index 69% rename from lib/types/index.js rename to lib/types/index.ts index f28dbb551..8e70e2772 100644 --- a/lib/types/index.js +++ b/lib/types/index.ts @@ -13,21 +13,26 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const util = require('util'); -const errors = require('../errors'); -const TimeUuid = require('./time-uuid'); -const Uuid = require('./uuid'); -const protocolVersion = require('./protocol-version'); -const utils = require('../utils'); - -/** @module types */ -/** - * Long constructor, wrapper of the internal library used: {@link https://github.com/dcodeIO/long.js Long.js}. - * @constructor - */ -const Long = require('long'); +import Long from "long"; +import util from "util"; +import type { DataTypeInfo } from "../encoder"; +import errors from "../errors"; +import utils from "../utils"; +import BigDecimal from './big-decimal'; +import Duration from './duration'; +import InetAddress from './inet-address'; +import Integer from './integer'; +import LocalDate from './local-date'; +import LocalTime from './local-time'; +import protocolVersion from "./protocol-version"; +import ResultSet from './result-set'; +import ResultStream from './result-stream'; +import Row from './row'; +import TimeUuid from "./time-uuid"; +import Tuple from './tuple'; +import Uuid from "./uuid"; +import Vector from "./vector"; /** * Consistency levels @@ -44,19 +49,19 @@ const Long = require('long'); * @property {Number} localSerial Same as serial but confined to the data center. A write must be written conditionally to the commit log and memtable on a quorum of replica nodes in the same data center. * @property {Number} localOne Similar to One but only within the DC the coordinator is in. */ -const consistencies = { - any: 0x00, - one: 0x01, - two: 0x02, - three: 0x03, - quorum: 0x04, - all: 0x05, - localQuorum: 0x06, - eachQuorum: 0x07, - serial: 0x08, - localSerial: 0x09, - localOne: 0x0a -}; +enum consistencies { + any = 0x00, + one = 0x01, + two = 0x02, + three = 0x03, + quorum = 0x04, + all = 0x05, + localQuorum = 0x06, + eachQuorum = 0x07, + serial = 0x08, + localSerial = 0x09, + localOne = 0x0a +} /** * Mapping of consistency level codes to their string representation. @@ -105,78 +110,83 @@ consistencyToString[consistencies.localOne] = 'LOCAL_ONE'; * @property {Number} udt User-defined type. * @property {Number} tuple A sequence of values. */ -const dataTypes = { - custom: 0x0000, - ascii: 0x0001, - bigint: 0x0002, - blob: 0x0003, - boolean: 0x0004, - counter: 0x0005, - decimal: 0x0006, - double: 0x0007, - float: 0x0008, - int: 0x0009, - text: 0x000a, - timestamp: 0x000b, - uuid: 0x000c, - varchar: 0x000d, - varint: 0x000e, - timeuuid: 0x000f, - inet: 0x0010, - date: 0x0011, - time: 0x0012, - smallint: 0x0013, - tinyint: 0x0014, - duration: 0x0015, - list: 0x0020, - map: 0x0021, - set: 0x0022, - udt: 0x0030, - tuple: 0x0031, +enum dataTypes { + custom = 0x0000, + ascii = 0x0001, + bigint = 0x0002, + blob = 0x0003, + boolean = 0x0004, + counter = 0x0005, + decimal = 0x0006, + double = 0x0007, + float = 0x0008, + int = 0x0009, + text = 0x000a, + timestamp = 0x000b, + uuid = 0x000c, + varchar = 0x000d, + varint = 0x000e, + timeuuid = 0x000f, + inet = 0x0010, + date = 0x0011, + time = 0x0012, + smallint = 0x0013, + tinyint = 0x0014, + duration = 0x0015, + list = 0x0020, + map = 0x0021, + set = 0x0022, + udt = 0x0030, + tuple = 0x0031 +} + +namespace dataTypes { /** * Returns the typeInfo of a given type name * @param {string} name - * @returns {import('../encoder').ColumnInfo} + * @returns {DateTypeInfo} */ - getByName: function(name) { + export function getByName(name: string): DataTypeInfo { name = name.toLowerCase(); if (name.indexOf('<') > 0) { const listMatches = /^(list|set)<(.+)>$/.exec(name); if (listMatches) { - return { code: this[listMatches[1]], info: this.getByName(listMatches[2])}; + return { code: dataTypes[listMatches[1]], info: getByName(listMatches[2]) }; } const mapMatches = /^(map)< *(.+) *, *(.+)>$/.exec(name); if (mapMatches) { - return { code: this[mapMatches[1]], info: [this.getByName(mapMatches[2]), this.getByName(mapMatches[3])]}; + return { code: dataTypes[mapMatches[1]], info: [getByName(mapMatches[2]), getByName(mapMatches[3])] }; } const udtMatches = /^(udt)<(.+)>$/.exec(name); if (udtMatches) { //udt name as raw string - return { code: this[udtMatches[1]], info: udtMatches[2]}; + return { code: dataTypes[udtMatches[1]], info: udtMatches[2] }; } const tupleMatches = /^(tuple)<(.+)>$/.exec(name); if (tupleMatches) { //tuple info as an array of types - return { code: this[tupleMatches[1]], info: tupleMatches[2].split(',').map(function (x) { - return this.getByName(x.trim()); - }, this)}; + return { + code: dataTypes[tupleMatches[1]], info: tupleMatches[2].split(',').map(function (x) { + return getByName(x.trim()); + }, dataTypes) + }; } const vectorMatches = /^vector<\s*(.+)\s*,\s*(\d+)\s*>$/.exec(name); - if(vectorMatches){ + if (vectorMatches) { return { - code: this.custom, + code: dataTypes.custom, customTypeName: 'vector', - info: [this.getByName(vectorMatches[1]), parseInt(vectorMatches[2], 10)] + info: [getByName(vectorMatches[1]), parseInt(vectorMatches[2], 10)] }; } } - const typeInfo = { code: this[name]}; + const typeInfo = { code: dataTypes[name] }; if (typeof typeInfo.code !== 'number') { throw new TypeError('Data type with name ' + name + ' not valid'); } return typeInfo; } -}; +} /** * Map of Data types by code @@ -186,7 +196,7 @@ const dataTypes = { */ const _dataTypesByCode = (function () { /**@type {Record} */ - const result = {}; + const result: Record = {}; for (const key in dataTypes) { if (!dataTypes.hasOwnProperty(key)) { continue; @@ -207,11 +217,11 @@ const _dataTypesByCode = (function () { * @property {Number} remote A remote node. * @property {Number} ignored A node that is meant to be ignored. */ -const distance = { - local: 0, - remote: 1, - ignored: 2 -}; +enum distance { + local= 0, + remote= 1, + ignored= 2 +} /** * An integer byte that distinguish the actual message from and to Cassandra @@ -280,27 +290,27 @@ const protocolEvents = { * @property {Number} alreadyExists The query attempted to create a schema element (i.e. keyspace, table) that already exists. * @property {Number} unprepared Can be thrown while a prepared statement tries to be executed if the provided statement is not known by the coordinator. */ -const responseErrorCodes = { - serverError: 0x0000, - protocolError: 0x000A, - badCredentials: 0x0100, - unavailableException: 0x1000, - overloaded: 0x1001, - isBootstrapping: 0x1002, - truncateError: 0x1003, - writeTimeout: 0x1100, - readTimeout: 0x1200, - readFailure: 0x1300, - functionFailure: 0x1400, - writeFailure: 0x1500, - syntaxError: 0x2000, - unauthorized: 0x2100, - invalid: 0x2200, - configError: 0x2300, - alreadyExists: 0x2400, - unprepared: 0x2500, - clientWriteFailure: 0x8000, -}; +enum responseErrorCodes { + serverError = 0x0000, + protocolError = 0x000A, + badCredentials = 0x0100, + unavailableException = 0x1000, + overloaded = 0x1001, + isBootstrapping = 0x1002, + truncateError = 0x1003, + writeTimeout = 0x1100, + readTimeout = 0x1200, + readFailure = 0x1300, + functionFailure = 0x1400, + writeFailure = 0x1500, + syntaxError = 0x2000, + unauthorized = 0x2100, + invalid = 0x2200, + configError = 0x2300, + alreadyExists = 0x2400, + unprepared = 0x2500, + clientWriteFailure = 0x8000 +} /** * Type of result included in a response @@ -333,7 +343,7 @@ const frameFlags = { * Use this field if you want to set a parameter to unset. Valid for Cassandra 2.2 and above. *

*/ -const unset = Object.freeze({'unset': true}); +const unset = Object.freeze({'unset': true} as const); /** * A long representing the value 1000 @@ -356,7 +366,7 @@ let _timestampTicks = 0; * @param {Number} [offset] * @deprecated Use [TimeUuid]{@link module:types~TimeUuid} instead */ -function timeuuid(options, buffer, offset) { +function timeuuid(options: { msecs; node; clockseq; nsecs; }, buffer: Buffer, offset: number) { let date; let ticks; let nodeId; @@ -451,89 +461,95 @@ function getDataTypeNameByCode(item) { } //classes - -/** - * Represents a frame header that could be used to read from a Buffer or to write to a Buffer - * @ignore - * @param {Number} version Protocol version - * @param {Number} flags - * @param {Number} streamId - * @param {Number} opcode - * @param {Number} bodyLength - * @constructor - */ -function FrameHeader(version, flags, streamId, opcode, bodyLength) { - this.version = version; - this.flags = flags; - this.streamId = streamId; - this.opcode = opcode; - this.bodyLength = bodyLength; -} - -/** - * The length of the header of the frame based on the protocol version - * @returns {Number} - */ -FrameHeader.size = function (version) { - if (protocolVersion.uses2BytesStreamIds(version)) { - return 9; - } - return 8; -}; - -/** - * Gets the protocol version based on the first byte of the header - * @param {Buffer} buffer - * @returns {Number} - */ -FrameHeader.getProtocolVersion = function (buffer) { - return buffer[0] & 0x7F; -}; - -/** - * @param {Buffer} buf - * @param {Number} [offset] - * @returns {FrameHeader} - */ -FrameHeader.fromBuffer = function (buf, offset) { - let streamId = 0; - if (!offset) { - offset = 0; +/** @internal @ignore */ +class FrameHeader { + version: number; + flags: number; + streamId: number; + opcode: number; + bodyLength: number; + /** + * Represents a frame header that could be used to read from a Buffer or to write to a Buffer + * @ignore + * @param {Number} version Protocol version + * @param {Number} flags + * @param {Number} streamId + * @param {Number} opcode + * @param {Number} bodyLength + * @constructor + */ + constructor(version: number, flags: number, streamId: number, opcode: number, bodyLength: number) { + this.version = version; + this.flags = flags; + this.streamId = streamId; + this.opcode = opcode; + this.bodyLength = bodyLength; } - const version = buf[offset++] & 0x7F; - const flags = buf.readUInt8(offset++); - if (!protocolVersion.uses2BytesStreamIds(version)) { - streamId = buf.readInt8(offset++); + /** + * The length of the header of the frame based on the protocol version + * @returns {Number} + */ + static size(version): number { + if (protocolVersion.uses2BytesStreamIds(version)) { + return 9; + } + return 8; } - else { - streamId = buf.readInt16BE(offset); - offset += 2; + /** + * Gets the protocol version based on the first byte of the header + * @param {Buffer} buffer + * @returns {Number} + */ + static getProtocolVersion(buffer: Buffer): number { + return buffer[0] & 0x7F; } - return new FrameHeader(version, flags, streamId, buf.readUInt8(offset++), buf.readUInt32BE(offset)); -}; - -/** @returns {Buffer} */ -FrameHeader.prototype.toBuffer = function () { - const buf = utils.allocBufferUnsafe(FrameHeader.size(this.version)); - buf.writeUInt8(this.version, 0); - buf.writeUInt8(this.flags, 1); - let offset = 3; - if (!protocolVersion.uses2BytesStreamIds(this.version)) { - buf.writeInt8(this.streamId, 2); + /** + * @param {Buffer} buf + * @param {Number} [offset] + * @returns {FrameHeader} + */ + static fromBuffer(buf: Buffer, offset?: number): FrameHeader { + let streamId = 0; + if (!offset) { + offset = 0; + } + const version = buf[offset++] & 0x7F; + const flags = buf.readUInt8(offset++); + if (!protocolVersion.uses2BytesStreamIds(version)) { + streamId = buf.readInt8(offset++); + } + else { + streamId = buf.readInt16BE(offset); + offset += 2; + } + return new FrameHeader(version, flags, streamId, buf.readUInt8(offset++), buf.readUInt32BE(offset)); } - else { - buf.writeInt16BE(this.streamId, 2); - offset = 4; + /** @returns {Buffer} */ + toBuffer(): Buffer { + const buf = utils.allocBufferUnsafe(FrameHeader.size(this.version)); + buf.writeUInt8(this.version, 0); + buf.writeUInt8(this.flags, 1); + let offset = 3; + if (!protocolVersion.uses2BytesStreamIds(this.version)) { + buf.writeInt8(this.streamId, 2); + } + else { + buf.writeInt16BE(this.streamId, 2); + offset = 4; + } + buf.writeUInt8(this.opcode, offset++); + buf.writeUInt32BE(this.bodyLength, offset); + return buf; } - buf.writeUInt8(this.opcode, offset++); - buf.writeUInt32BE(this.bodyLength, offset); - return buf; -}; +} + +// These methods are internal to the driver, +// therefore they don't have type supports and TypeScript errors are ignored /** * Returns a long representation. * Used internally for deserialization */ -Long.fromBuffer = function (value) { +Long["fromBuffer"] = function (value) { if (!(value instanceof Buffer)) { throw new TypeError('Expected Buffer, obtained ' + util.inspect(value)); } @@ -544,7 +560,7 @@ Long.fromBuffer = function (value) { * Returns a big-endian buffer representation of the Long instance * @param {Long} value */ -Long.toBuffer = function (value) { +Long["toBuffer"] = function (value) { if (!(value instanceof Long)) { throw new TypeError('Expected Long, obtained ' + util.inspect(value)); } @@ -558,7 +574,7 @@ Long.toBuffer = function (value) { * Provide the name of the constructor and the string representation * @returns {string} */ -Long.prototype.inspect = function () { +Long.prototype["inspect"] = function () { return 'Long: ' + this.toString(); }; @@ -566,7 +582,7 @@ Long.prototype.inspect = function () { * Returns the string representation. * Method used by the native JSON.stringify() to serialize this instance */ -Long.prototype.toJSON = function () { +Long.prototype["toJSON"] = function () { return this.toString(); }; @@ -575,6 +591,7 @@ Long.prototype.toJSON = function () { * @param {Date} [date] The date to generate the value, if not provided it will use the current date. * @param {Number} [microseconds] A number from 0 to 999 used to build the microseconds part of the date. * @returns {Long} + * @internal @ignore */ function generateTimestamp(date, microseconds) { if (!date) { @@ -600,49 +617,78 @@ function generateTimestamp(date, microseconds) { //error classes /** @private */ -function QueryParserError(e) { - QueryParserError.super_.call(this, e.message, this.constructor); - this.internalError = e; +class TimeoutError extends errors.DriverError { + /** + * @param {string} message + */ + constructor(message: string) { + super(message); + this.info = 'Represents an error that happens when the maximum amount of time for an operation passed.'; + } } -util.inherits(QueryParserError, errors.DriverError); -/** @private */ -function TimeoutError (message) { - TimeoutError.super_.call(this, message, this.constructor); - this.info = 'Represents an error that happens when the maximum amount of time for an operation passed.'; -} -util.inherits(TimeoutError, errors.DriverError); +const DriverError = errors.DriverError; + +export default { + opcodes, + consistencies, + consistencyToString, + dataTypes, + /** @internal */ + getDataTypeNameByCode, + distance, + frameFlags, + protocolEvents, + protocolVersion, + responseErrorCodes, + resultKind, + timeuuid, + uuid, + BigDecimal, + Duration, + /** @internal */ + FrameHeader, + InetAddress, + Integer, + LocalDate, + LocalTime, + Long, + ResultSet, + ResultStream, + Row, + // have to export DriverError here for backward compatibility + DriverError, + TimeoutError, + TimeUuid, + Tuple, + Uuid, + unset, + /** @internal */ + generateTimestamp, + Vector, +}; -exports.opcodes = opcodes; -exports.consistencies = consistencies; -exports.consistencyToString = consistencyToString; -exports.dataTypes = dataTypes; -exports.getDataTypeNameByCode = getDataTypeNameByCode; -exports.distance = distance; -exports.frameFlags = frameFlags; -exports.protocolEvents = protocolEvents; -exports.protocolVersion = protocolVersion; -exports.responseErrorCodes = responseErrorCodes; -exports.resultKind = resultKind; -exports.timeuuid = timeuuid; -exports.uuid = uuid; -exports.BigDecimal = require('./big-decimal'); -exports.Duration = require('./duration'); -exports.FrameHeader = FrameHeader; -exports.InetAddress = require('./inet-address'); -exports.Integer = require('./integer'); -exports.LocalDate = require('./local-date'); -exports.LocalTime = require('./local-time'); -exports.Long = Long; -exports.ResultSet = require('./result-set'); -exports.ResultStream = require('./result-stream'); -exports.Row = require('./row'); -//export DriverError for backward-compatibility -exports.DriverError = errors.DriverError; -exports.TimeoutError = TimeoutError; -exports.TimeUuid = TimeUuid; -exports.Tuple = require('./tuple'); -exports.Vector = require('./vector'); -exports.Uuid = Uuid; -exports.unset = unset; -exports.generateTimestamp = generateTimestamp; +export { + BigDecimal, consistencies, + consistencyToString, + dataTypes, + distance, Duration, frameFlags, + /** @internal */ + FrameHeader, + /** @internal */ + generateTimestamp, + /** @internal */ + getDataTypeNameByCode, InetAddress, + Integer, + LocalDate, + LocalTime, + Long, opcodes, protocolEvents, + protocolVersion, + responseErrorCodes, + resultKind, ResultSet, + ResultStream, + Row, + TimeoutError, timeuuid, //TODO: why TimeoutError is here instead of lib/errors?? + TimeUuid, + Tuple, unset, uuid, Uuid, Vector +}; diff --git a/lib/types/inet-address.js b/lib/types/inet-address.js deleted file mode 100644 index 1647deaf6..000000000 --- a/lib/types/inet-address.js +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const utils = require('../utils'); - -/** @module types */ -/** - * Creates a new instance of InetAddress - * @class - * @classdesc Represents an v4 or v6 Internet Protocol (IP) address. - * @param {Buffer} buffer - * @constructor - */ -function InetAddress(buffer) { - if (!(buffer instanceof Buffer) || (buffer.length !== 4 && buffer.length !== 16)) { - throw new TypeError('The ip address must contain 4 or 16 bytes'); - } - - /** - * Immutable buffer that represents the IP address - * @type Array - */ - this.buffer = buffer; - - /** - * Returns the length of the underlying buffer - * @type Number - */ - this.length = buffer.length; - - /** - * Returns the Ip version (4 or 6) - * @type Number - */ - this.version = buffer.length === 4 ? 4 : 6; -} - -/** - * Parses the string representation and returns an Ip address - * @param {String} value - */ -InetAddress.fromString = function (value) { - if (!value) { - return new InetAddress(utils.allocBufferFromArray([0, 0, 0, 0])); - } - const ipv4Pattern = /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/; - const ipv6Pattern = /^[\da-f:.]+$/i; - let parts; - if (ipv4Pattern.test(value)) { - parts = value.split('.'); - return new InetAddress(utils.allocBufferFromArray(parts)); - } - if (!ipv6Pattern.test(value)) { - throw new TypeError('Value could not be parsed as InetAddress: ' + value); - } - parts = value.split(':'); - if (parts.length < 3) { - throw new TypeError('Value could not be parsed as InetAddress: ' + value); - } - const buffer = utils.allocBufferUnsafe(16); - let filling = 8 - parts.length + 1; - let applied = false; - let offset = 0; - const embeddedIp4 = ipv4Pattern.test(parts[parts.length - 1]); - if (embeddedIp4) { - // Its IPv6 address with an embedded IPv4 address: - // subtract 1 from the potential empty filling as ip4 contains 4 bytes instead of 2 of a ipv6 section - filling -= 1; - } - function writeItem(uIntValue) { - buffer.writeUInt8(+uIntValue, offset++); - } - for (let i = 0; i < parts.length; i++) { - const item = parts[i]; - if (item) { - if (embeddedIp4 && i === parts.length - 1) { - item.split('.').forEach(writeItem); - break; - } - buffer.writeUInt16BE(parseInt(item, 16), offset); - offset = offset + 2; - continue; - } - //its an empty string - if (applied) { - //there could be 2 occurrences of empty string - filling = 1; - } - applied = true; - for (let j = 0; j < filling; j++) { - buffer[offset++] = 0; - buffer[offset++] = 0; - } - } - if (embeddedIp4 && !isValidIPv4Mapped(buffer)) { - throw new TypeError('Only IPv4-Mapped IPv6 addresses are allowed as IPv6 address with embedded IPv4 address'); - } - return new InetAddress(buffer); -}; - -/** - * Compares 2 addresses and returns true if the underlying bytes are the same - * @param {InetAddress} other - * @returns {Boolean} - */ -InetAddress.prototype.equals = function (other) { - if (!(other instanceof InetAddress)) { - return false; - } - return (this.buffer.length === other.buffer.length && - this.buffer.toString('hex') === other.buffer.toString('hex')); -}; - -/** - * Returns the underlying buffer - * @returns {Buffer} - */ -InetAddress.prototype.getBuffer = function () { - return this.buffer; -}; - -/** - * Provide the name of the constructor and the string representation - * @returns {string} - */ -InetAddress.prototype.inspect = function () { - return this.constructor.name + ': ' + this.toString(); -}; - -/** - * Returns the string representation of the IP address. - *

For v4 IP addresses, a string in the form of d.d.d.d is returned.

- *

- * For v6 IP addresses, a string in the form of x:x:x:x:x:x:x:x is returned, where the 'x's are the hexadecimal - * values of the eight 16-bit pieces of the address, according to rfc5952. - * In cases where there is more than one field of only zeros, it can be shortened. For example, 2001:0db8:0:0:0:1:0:1 - * will be expressed as 2001:0db8::1:0:1. - *

- * @param {String} [encoding] - * @returns {String} - */ -InetAddress.prototype.toString = function (encoding) { - if (encoding === 'hex') { - //backward compatibility: behave in the same way as the buffer - return this.buffer.toString('hex'); - } - if (this.buffer.length === 4) { - return ( - this.buffer[0] + '.' + - this.buffer[1] + '.' + - this.buffer[2] + '.' + - this.buffer[3] - ); - } - let start = -1; - const longest = { length: 0, start: -1}; - function checkLongest (i) { - if (start >= 0) { - //close the group - const length = i - start; - if (length > longest.length) { - longest.length = length; - longest.start = start; - start = -1; - } - } - } - //get the longest 16-bit group of zeros - for (let i = 0; i < this.buffer.length; i = i + 2) { - if (this.buffer[i] === 0 && this.buffer[i + 1] === 0) { - //its a group of zeros - if (start < 0) { - start = i; - } - - // at the end of the buffer, make a final call to checkLongest. - if(i === this.buffer.length - 2) { - checkLongest(i+2); - } - continue; - } - //its a group of non-zeros - checkLongest(i); - } - - let address = ''; - for (let h = 0; h < this.buffer.length; h = h + 2) { - if (h === longest.start) { - address += ':'; - continue; - } - if (h < (longest.start + longest.length) && h > longest.start) { - //its a group of zeros - continue; - } - if (address.length > 0) { - address += ':'; - } - address += ((this.buffer[h] << 8) | this.buffer[h+1]).toString(16); - } - if (address.charAt(address.length-1) === ':') { - address += ':'; - } - return address; -}; - -/** - * Returns the string representation. - * Method used by the native JSON.stringify() to serialize this instance. - */ -InetAddress.prototype.toJSON = function () { - return this.toString(); -}; - -/** - * Validates for a IPv4-Mapped IPv6 according to https://tools.ietf.org/html/rfc4291#section-2.5.5 - * @private - * @param {Buffer} buffer - */ -function isValidIPv4Mapped(buffer) { - // check the form - // | 80 bits | 16 | 32 bits - // +----------------+----+------------- - // |0000........0000|FFFF| IPv4 address - - for (let i = 0; i < buffer.length - 6; i++) { - if (buffer[i] !== 0) { - return false; - } - } - return !(buffer[10] !== 255 || buffer[11] !== 255); -} - -module.exports = InetAddress; \ No newline at end of file diff --git a/lib/types/inet-address.ts b/lib/types/inet-address.ts new file mode 100644 index 000000000..ce263e612 --- /dev/null +++ b/lib/types/inet-address.ts @@ -0,0 +1,255 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import utils from "../utils"; + +/** @module types */ +/** + * @class + * @classdesc Represents an v4 or v6 Internet Protocol (IP) address. + */ +class InetAddress { + private buffer: Buffer; + length: number; + version: number; + + /** + * Creates a new instance of InetAddress + * @param {Buffer} buffer + * @constructor + */ + constructor(buffer: Buffer) { + if (!(buffer instanceof Buffer) || (buffer.length !== 4 && buffer.length !== 16)) { + throw new TypeError('The ip address must contain 4 or 16 bytes'); + } + + /** + * Immutable buffer that represents the IP address + * @type Array + */ + this.buffer = buffer; + + /** + * Returns the length of the underlying buffer + * @type Number + */ + this.length = buffer.length; + + /** + * Returns the Ip version (4 or 6) + * @type Number + */ + this.version = buffer.length === 4 ? 4 : 6; + } + + /** + * Parses the string representation and returns an Ip address + * @param {String} value + */ + static fromString(value: string): InetAddress { + if (!value) { + return new InetAddress(utils.allocBufferFromArray([0, 0, 0, 0])); + } + const ipv4Pattern = /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/; + const ipv6Pattern = /^[\da-f:.]+$/i; + let parts; + if (ipv4Pattern.test(value)) { + parts = value.split('.'); + return new InetAddress(utils.allocBufferFromArray(parts)); + } + if (!ipv6Pattern.test(value)) { + throw new TypeError('Value could not be parsed as InetAddress: ' + value); + } + parts = value.split(':'); + if (parts.length < 3) { + throw new TypeError('Value could not be parsed as InetAddress: ' + value); + } + const buffer = utils.allocBufferUnsafe(16); + let filling = 8 - parts.length + 1; + let applied = false; + let offset = 0; + const embeddedIp4 = ipv4Pattern.test(parts[parts.length - 1]); + if (embeddedIp4) { + // Its IPv6 address with an embedded IPv4 address: + // subtract 1 from the potential empty filling as ip4 contains 4 bytes instead of 2 of a ipv6 section + filling -= 1; + } + function writeItem(uIntValue) { + buffer.writeUInt8(+uIntValue, offset++); + } + for (let i = 0; i < parts.length; i++) { + const item = parts[i]; + if (item) { + if (embeddedIp4 && i === parts.length - 1) { + item.split('.').forEach(writeItem); + break; + } + buffer.writeUInt16BE(parseInt(item, 16), offset); + offset = offset + 2; + continue; + } + //its an empty string + if (applied) { + //there could be 2 occurrences of empty string + filling = 1; + } + applied = true; + for (let j = 0; j < filling; j++) { + buffer[offset++] = 0; + buffer[offset++] = 0; + } + } + if (embeddedIp4 && !InetAddress.isValidIPv4Mapped(buffer)) { + throw new TypeError('Only IPv4-Mapped IPv6 addresses are allowed as IPv6 address with embedded IPv4 address'); + } + return new InetAddress(buffer); + } + + /** + * Compares 2 addresses and returns true if the underlying bytes are the same + * @param {InetAddress} other + * @returns {Boolean} + */ + equals(other: InetAddress): boolean { + if (!(other instanceof InetAddress)) { + return false; + } + return (this.buffer.length === other.buffer.length && + this.buffer.toString('hex') === other.buffer.toString('hex')); + } + + /** + * Returns the underlying buffer + * @returns {Buffer} + */ + getBuffer(): Buffer { + return this.buffer; + } + + /** + * Provide the name of the constructor and the string representation + * @returns {string} + * @internal + */ + inspect(): string { + return this.constructor.name + ': ' + this.toString(); + } + + /** + * Returns the string representation of the IP address. + *

For v4 IP addresses, a string in the form of d.d.d.d is returned.

+ *

+ * For v6 IP addresses, a string in the form of x:x:x:x:x:x:x:x is returned, where the 'x's are the hexadecimal + * values of the eight 16-bit pieces of the address, according to rfc5952. + * In cases where there is more than one field of only zeros, it can be shortened. For example, 2001:0db8:0:0:0:1:0:1 + * will be expressed as 2001:0db8::1:0:1. + *

+ * @param {String} [encoding] If set to 'hex', the hex representation of the buffer is returned. + * @returns {String} + */ + toString(encoding?: string): string { + if (encoding === 'hex') { + //backward compatibility: behave in the same way as the buffer + return this.buffer.toString('hex'); + } + if (this.buffer.length === 4) { + return ( + this.buffer[0] + '.' + + this.buffer[1] + '.' + + this.buffer[2] + '.' + + this.buffer[3] + ); + } + let start = -1; + const longest = { length: 0, start: -1}; + function checkLongest (i) { + if (start >= 0) { + //close the group + const length = i - start; + if (length > longest.length) { + longest.length = length; + longest.start = start; + start = -1; + } + } + } + //get the longest 16-bit group of zeros + for (let i = 0; i < this.buffer.length; i = i + 2) { + if (this.buffer[i] === 0 && this.buffer[i + 1] === 0) { + //its a group of zeros + if (start < 0) { + start = i; + } + + // at the end of the buffer, make a final call to checkLongest. + if(i === this.buffer.length - 2) { + checkLongest(i+2); + } + continue; + } + //its a group of non-zeros + checkLongest(i); + } + + let address = ''; + for (let h = 0; h < this.buffer.length; h = h + 2) { + if (h === longest.start) { + address += ':'; + continue; + } + if (h < (longest.start + longest.length) && h > longest.start) { + //its a group of zeros + continue; + } + if (address.length > 0) { + address += ':'; + } + address += ((this.buffer[h] << 8) | this.buffer[h+1]).toString(16); + } + if (address.charAt(address.length-1) === ':') { + address += ':'; + } + return address; + } + + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string { + return this.toString(); + } + + /** + * Validates for a IPv4-Mapped IPv6 according to https://tools.ietf.org/html/rfc4291#section-2.5.5 + * @private + * @param {Buffer} buffer + */ + private static isValidIPv4Mapped(buffer: Buffer): boolean { + // check the form + // | 80 bits | 16 | 32 bits + // +----------------+----+------------- + // |0000........0000|FFFF| IPv4 address + + for (let i = 0; i < buffer.length - 6; i++) { + if (buffer[i] !== 0) { + return false; + } + } + return !(buffer[10] !== 255 || buffer[11] !== 255); + } +} + +export default InetAddress; \ No newline at end of file diff --git a/lib/types/integer.js b/lib/types/integer.js deleted file mode 100644 index d70d789a1..000000000 --- a/lib/types/integer.js +++ /dev/null @@ -1,855 +0,0 @@ -// Copyright 2009 The Closure Library Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS-IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/** @module types */ - -var utils = require('../utils'); - -/** - * Constructs a two's-complement integer an array containing bits of the - * integer in 32-bit (signed) pieces, given in little-endian order (i.e., - * lowest-order bits in the first piece), and the sign of -1 or 0. - * - * See the from* functions below for other convenient ways of constructing - * Integers. - * - * The internal representation of an integer is an array of 32-bit signed - * pieces, along with a sign (0 or -1) that indicates the contents of all the - * other 32-bit pieces out to infinity. We use 32-bit pieces because these are - * the size of integers on which Javascript performs bit-operations. For - * operations like addition and multiplication, we split each number into 16-bit - * pieces, which can easily be multiplied within Javascript's floating-point - * representation without overflow or change in sign. - * - * @constructor - * @param {Array.} bits Array containing the bits of the number. - * @param {number} sign The sign of the number: -1 for negative and 0 positive. - * @final - */ -function Integer (bits, sign) { - /** - * @type {!Array.} - * @private - */ - this.bits_ = []; - - /** - * @type {number} - * @private - */ - this.sign_ = sign; - - // Copy the 32-bit signed integer values passed in. We prune out those at the - // top that equal the sign since they are redundant. - var top = true; - for (var i = bits.length - 1; i >= 0; i--) { - var val = bits[i] | 0; - if (!top || val != sign) { - this.bits_[i] = val; - top = false; - } - } -} - - -// NOTE: Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the -// from* methods on which they depend. - - -/** - * A cache of the Integer representations of small integer values. - * @type {!Object} - * @private - */ -Integer.IntCache_ = {}; - - -/** - * Returns an Integer representing the given (32-bit) integer value. - * @param {number} value A 32-bit integer value. - * @return {!Integer} The corresponding Integer value. - */ -Integer.fromInt = function(value) { - if (-128 <= value && value < 128) { - var cachedObj = Integer.IntCache_[value]; - if (cachedObj) { - return cachedObj; - } - } - - var obj = new Integer([value | 0], value < 0 ? -1 : 0); - if (-128 <= value && value < 128) { - Integer.IntCache_[value] = obj; - } - return obj; -}; - - -/** - * Returns an Integer representing the given value, provided that it is a finite - * number. Otherwise, zero is returned. - * @param {number} value The value in question. - * @return {!Integer} The corresponding Integer value. - */ -Integer.fromNumber = function(value) { - if (isNaN(value) || !isFinite(value)) { - return Integer.ZERO; - } else if (value < 0) { - return Integer.fromNumber(-value).negate(); - } else { - var bits = []; - var pow = 1; - for (var i = 0; value >= pow; i++) { - bits[i] = (value / pow) | 0; - pow *= Integer.TWO_PWR_32_DBL_; - } - return new Integer(bits, 0); - } -}; - - -/** - * Returns a Integer representing the value that comes by concatenating the - * given entries, each is assumed to be 32 signed bits, given in little-endian - * order (lowest order bits in the lowest index), and sign-extending the highest - * order 32-bit value. - * @param {Array.} bits The bits of the number, in 32-bit signed pieces, - * in little-endian order. - * @return {!Integer} The corresponding Integer value. - */ -Integer.fromBits = function(bits) { - var high = bits[bits.length - 1]; - //noinspection JSBitwiseOperatorUsage - return new Integer(bits, high & (1 << 31) ? -1 : 0); -}; - - -/** - * Returns an Integer representation of the given string, written using the - * given radix. - * @param {string} str The textual representation of the Integer. - * @param {number=} opt_radix The radix in which the text is written. - * @return {!Integer} The corresponding Integer value. - */ -Integer.fromString = function(str, opt_radix) { - if (str.length == 0) { - throw TypeError('number format error: empty string'); - } - - var radix = opt_radix || 10; - if (radix < 2 || 36 < radix) { - throw Error('radix out of range: ' + radix); - } - - if (str.charAt(0) == '-') { - return Integer.fromString(str.substring(1), radix).negate(); - } else if (str.indexOf('-') >= 0) { - throw TypeError('number format error: interior "-" character'); - } - - // Do several (8) digits each time through the loop, so as to - // minimize the calls to the very expensive emulated div. - var radixToPower = Integer.fromNumber(Math.pow(radix, 8)); - - var result = Integer.ZERO; - for (var i = 0; i < str.length; i += 8) { - var size = Math.min(8, str.length - i); - var value = parseInt(str.substring(i, i + size), radix); - if (size < 8) { - var power = Integer.fromNumber(Math.pow(radix, size)); - result = result.multiply(power).add(Integer.fromNumber(value)); - } else { - result = result.multiply(radixToPower); - result = result.add(Integer.fromNumber(value)); - } - } - return result; -}; - -/** - * Returns an Integer representation of a given big endian Buffer. - * The internal representation of bits contains bytes in groups of 4 - * @param {Buffer} buf - * @returns {Integer} - */ -Integer.fromBuffer = function (buf) { - var bits = new Array(Math.ceil(buf.length / 4)); - //noinspection JSBitwiseOperatorUsage - var sign = buf[0] & (1 << 7) ? -1 : 0; - for (var i = 0; i < bits.length; i++) { - var offset = buf.length - ((i + 1) * 4); - var value; - if (offset < 0) { - //The buffer length is not multiple of 4 - offset = offset + 4; - value = 0; - for (var j = 0; j < offset; j++) { - var byte = buf[j]; - if (sign === -1) { - //invert the bits - byte = ~byte & 0xff; - } - value = value | (byte << (offset - j - 1) * 8); - } - if (sign === -1) { - //invert all the bits - value = ~value; - } - } - else { - value = buf.readInt32BE(offset); - } - bits[i] = value; - } - return new Integer(bits, sign); -}; - -/** - * Returns a big endian buffer representation of an Integer. - * Internally the bits are represented using 4 bytes groups (numbers), - * in the Buffer representation there might be the case where we need less than the 4 bytes. - * For example: 0x00000001 -> '01', 0xFFFFFFFF -> 'FF', 0xFFFFFF01 -> 'FF01' - * @param {Integer} value - * @returns {Buffer} -*/ -Integer.toBuffer = function (value) { - var sign = value.sign_; - var bits = value.bits_; - if (bits.length === 0) { - //[0] or [0xffffffff] - return utils.allocBufferFromArray([value.sign_]); - } - //the high bits might need to be represented in less than 4 bytes - var highBits = bits[bits.length-1]; - if (sign === -1) { - highBits = ~highBits; - } - var high = []; - if (highBits >>> 24 > 0) { - high.push((highBits >> 24) & 0xff); - } - if (highBits >>> 16 > 0) { - high.push((highBits >> 16) & 0xff); - } - if (highBits >>> 8 > 0) { - high.push((highBits >> 8) & 0xff); - } - high.push(highBits & 0xff); - if (sign === -1) { - //The byte containing the sign bit got removed - if (high[0] >> 7 !== 0) { - //it is going to be negated - high.unshift(0); - } - } - else if (high[0] >> 7 !== 0) { - //its positive but it lost the byte containing the sign bit - high.unshift(0); - } - var buf = utils.allocBufferUnsafe(high.length + ((bits.length-1) * 4)); - for (var j = 0; j < high.length; j++) { - var b = high[j]; - if (sign === -1) { - buf[j] = ~b; - } - else { - buf[j] = b; - } - } - for (var i = 0; i < bits.length - 1; i++) { - var group = bits[bits.length - 2 - i]; - var offset = high.length + i * 4; - buf.writeInt32BE(group, offset); - } - return buf; -}; - - -/** - * A number used repeatedly in calculations. This must appear before the first - * call to the from* functions below. - * @type {number} - * @private - */ -Integer.TWO_PWR_32_DBL_ = (1 << 16) * (1 << 16); - - -/** @type {!Integer} */ -Integer.ZERO = Integer.fromInt(0); - - -/** @type {!Integer} */ -Integer.ONE = Integer.fromInt(1); - - -/** - * @type {!Integer} - * @private - */ -Integer.TWO_PWR_24_ = Integer.fromInt(1 << 24); - - -/** - * Returns the value, assuming it is a 32-bit integer. - * @return {number} The corresponding int value. - */ -Integer.prototype.toInt = function() { - return this.bits_.length > 0 ? this.bits_[0] : this.sign_; -}; - - -/** @return {number} The closest floating-point representation to this value. */ -Integer.prototype.toNumber = function() { - if (this.isNegative()) { - return -this.negate().toNumber(); - } else { - var val = 0; - var pow = 1; - for (var i = 0; i < this.bits_.length; i++) { - val += this.getBitsUnsigned(i) * pow; - pow *= Integer.TWO_PWR_32_DBL_; - } - return val; - } -}; - - -/** - * @param {number=} opt_radix The radix in which the text should be written. - * @return {string} The textual representation of this value. - * @override - */ -Integer.prototype.toString = function(opt_radix) { - var radix = opt_radix || 10; - if (radix < 2 || 36 < radix) { - throw Error('radix out of range: ' + radix); - } - - if (this.isZero()) { - return '0'; - } else if (this.isNegative()) { - return '-' + this.negate().toString(radix); - } - - // Do several (6) digits each time through the loop, so as to - // minimize the calls to the very expensive emulated div. - var radixToPower = Integer.fromNumber(Math.pow(radix, 6)); - - var rem = this; - var result = ''; - while (true) { - var remDiv = rem.divide(radixToPower); - var intval = rem.subtract(remDiv.multiply(radixToPower)).toInt(); - var digits = intval.toString(radix); - - rem = remDiv; - if (rem.isZero()) { - return digits + result; - } else { - while (digits.length < 6) { - digits = '0' + digits; - } - result = '' + digits + result; - } - } -}; - - -/** - * Returns the index-th 32-bit (signed) piece of the Integer according to - * little-endian order (i.e., index 0 contains the smallest bits). - * @param {number} index The index in question. - * @return {number} The requested 32-bits as a signed number. - */ -Integer.prototype.getBits = function(index) { - if (index < 0) { - return 0; // Allowing this simplifies bit shifting operations below... - } else if (index < this.bits_.length) { - return this.bits_[index]; - } else { - return this.sign_; - } -}; - - -/** - * Returns the index-th 32-bit piece as an unsigned number. - * @param {number} index The index in question. - * @return {number} The requested 32-bits as an unsigned number. - */ -Integer.prototype.getBitsUnsigned = function(index) { - var val = this.getBits(index); - return val >= 0 ? val : Integer.TWO_PWR_32_DBL_ + val; -}; - - -/** @return {number} The sign bit of this number, -1 or 0. */ -Integer.prototype.getSign = function() { - return this.sign_; -}; - - -/** @return {boolean} Whether this value is zero. */ -Integer.prototype.isZero = function() { - if (this.sign_ != 0) { - return false; - } - for (var i = 0; i < this.bits_.length; i++) { - if (this.bits_[i] != 0) { - return false; - } - } - return true; -}; - - -/** @return {boolean} Whether this value is negative. */ -Integer.prototype.isNegative = function() { - return this.sign_ == -1; -}; - - -/** @return {boolean} Whether this value is odd. */ -Integer.prototype.isOdd = function() { - return (this.bits_.length == 0) && (this.sign_ == -1) || - (this.bits_.length > 0) && ((this.bits_[0] & 1) != 0); -}; - - -/** - * @param {Integer} other Integer to compare against. - * @return {boolean} Whether this Integer equals the other. - */ -Integer.prototype.equals = function(other) { - if (this.sign_ != other.sign_) { - return false; - } - var len = Math.max(this.bits_.length, other.bits_.length); - for (var i = 0; i < len; i++) { - if (this.getBits(i) != other.getBits(i)) { - return false; - } - } - return true; -}; - - -/** - * @param {Integer} other Integer to compare against. - * @return {boolean} Whether this Integer does not equal the other. - */ -Integer.prototype.notEquals = function(other) { - return !this.equals(other); -}; - - -/** - * @param {Integer} other Integer to compare against. - * @return {boolean} Whether this Integer is greater than the other. - */ -Integer.prototype.greaterThan = function(other) { - return this.compare(other) > 0; -}; - - -/** - * @param {Integer} other Integer to compare against. - * @return {boolean} Whether this Integer is greater than or equal to the other. - */ -Integer.prototype.greaterThanOrEqual = function(other) { - return this.compare(other) >= 0; -}; - - -/** - * @param {Integer} other Integer to compare against. - * @return {boolean} Whether this Integer is less than the other. - */ -Integer.prototype.lessThan = function(other) { - return this.compare(other) < 0; -}; - - -/** - * @param {Integer} other Integer to compare against. - * @return {boolean} Whether this Integer is less than or equal to the other. - */ -Integer.prototype.lessThanOrEqual = function(other) { - return this.compare(other) <= 0; -}; - - -/** - * Compares this Integer with the given one. - * @param {Integer} other Integer to compare against. - * @return {number} 0 if they are the same, 1 if the this is greater, and -1 - * if the given one is greater. - */ -Integer.prototype.compare = function(other) { - var diff = this.subtract(other); - if (diff.isNegative()) { - return -1; - } else if (diff.isZero()) { - return 0; - } else { - return +1; - } -}; - - -/** - * Returns an integer with only the first numBits bits of this value, sign - * extended from the final bit. - * @param {number} numBits The number of bits by which to shift. - * @return {!Integer} The shorted integer value. - */ -Integer.prototype.shorten = function(numBits) { - var arr_index = (numBits - 1) >> 5; - var bit_index = (numBits - 1) % 32; - var bits = []; - for (var i = 0; i < arr_index; i++) { - bits[i] = this.getBits(i); - } - var sigBits = bit_index == 31 ? 0xFFFFFFFF : (1 << (bit_index + 1)) - 1; - var val = this.getBits(arr_index) & sigBits; - //noinspection JSBitwiseOperatorUsage - if (val & (1 << bit_index)) { - val |= 0xFFFFFFFF - sigBits; - bits[arr_index] = val; - return new Integer(bits, -1); - } else { - bits[arr_index] = val; - return new Integer(bits, 0); - } -}; - - -/** @return {!Integer} The negation of this value. */ -Integer.prototype.negate = function() { - return this.not().add(Integer.ONE); -}; - - -/** - * Returns the sum of this and the given Integer. - * @param {Integer} other The Integer to add to this. - * @return {!Integer} The Integer result. - */ -Integer.prototype.add = function(other) { - var len = Math.max(this.bits_.length, other.bits_.length); - var arr = []; - var carry = 0; - - for (var i = 0; i <= len; i++) { - var a1 = this.getBits(i) >>> 16; - var a0 = this.getBits(i) & 0xFFFF; - - var b1 = other.getBits(i) >>> 16; - var b0 = other.getBits(i) & 0xFFFF; - - var c0 = carry + a0 + b0; - var c1 = (c0 >>> 16) + a1 + b1; - carry = c1 >>> 16; - c0 &= 0xFFFF; - c1 &= 0xFFFF; - arr[i] = (c1 << 16) | c0; - } - return Integer.fromBits(arr); -}; - - -/** - * Returns the difference of this and the given Integer. - * @param {Integer} other The Integer to subtract from this. - * @return {!Integer} The Integer result. - */ -Integer.prototype.subtract = function(other) { - return this.add(other.negate()); -}; - - -/** - * Returns the product of this and the given Integer. - * @param {Integer} other The Integer to multiply against this. - * @return {!Integer} The product of this and the other. - */ -Integer.prototype.multiply = function(other) { - if (this.isZero()) { - return Integer.ZERO; - } else if (other.isZero()) { - return Integer.ZERO; - } - - if (this.isNegative()) { - if (other.isNegative()) { - return this.negate().multiply(other.negate()); - } else { - return this.negate().multiply(other).negate(); - } - } else if (other.isNegative()) { - return this.multiply(other.negate()).negate(); - } - - // If both numbers are small, use float multiplication - if (this.lessThan(Integer.TWO_PWR_24_) && - other.lessThan(Integer.TWO_PWR_24_)) { - return Integer.fromNumber(this.toNumber() * other.toNumber()); - } - - // Fill in an array of 16-bit products. - var len = this.bits_.length + other.bits_.length; - var arr = []; - for (var i = 0; i < 2 * len; i++) { - arr[i] = 0; - } - for (var i = 0; i < this.bits_.length; i++) { - for (var j = 0; j < other.bits_.length; j++) { - var a1 = this.getBits(i) >>> 16; - var a0 = this.getBits(i) & 0xFFFF; - - var b1 = other.getBits(j) >>> 16; - var b0 = other.getBits(j) & 0xFFFF; - - arr[2 * i + 2 * j] += a0 * b0; - Integer.carry16_(arr, 2 * i + 2 * j); - arr[2 * i + 2 * j + 1] += a1 * b0; - Integer.carry16_(arr, 2 * i + 2 * j + 1); - arr[2 * i + 2 * j + 1] += a0 * b1; - Integer.carry16_(arr, 2 * i + 2 * j + 1); - arr[2 * i + 2 * j + 2] += a1 * b1; - Integer.carry16_(arr, 2 * i + 2 * j + 2); - } - } - - // Combine the 16-bit values into 32-bit values. - for (var i = 0; i < len; i++) { - arr[i] = (arr[2 * i + 1] << 16) | arr[2 * i]; - } - for (var i = len; i < 2 * len; i++) { - arr[i] = 0; - } - return new Integer(arr, 0); -}; - - -/** - * Carries any overflow from the given index into later entries. - * @param {Array.} bits Array of 16-bit values in little-endian order. - * @param {number} index The index in question. - * @private - */ -Integer.carry16_ = function(bits, index) { - while ((bits[index] & 0xFFFF) != bits[index]) { - bits[index + 1] += bits[index] >>> 16; - bits[index] &= 0xFFFF; - } -}; - - -/** - * Returns this Integer divided by the given one. - * @param {Integer} other Th Integer to divide this by. - * @return {!Integer} This value divided by the given one. - */ -Integer.prototype.divide = function(other) { - if (other.isZero()) { - throw Error('division by zero'); - } else if (this.isZero()) { - return Integer.ZERO; - } - - if (this.isNegative()) { - if (other.isNegative()) { - return this.negate().divide(other.negate()); - } else { - return this.negate().divide(other).negate(); - } - } else if (other.isNegative()) { - return this.divide(other.negate()).negate(); - } - - // Repeat the following until the remainder is less than other: find a - // floating-point that approximates remainder / other *from below*, add this - // into the result, and subtract it from the remainder. It is critical that - // the approximate value is less than or equal to the real value so that the - // remainder never becomes negative. - var res = Integer.ZERO; - var rem = this; - while (rem.greaterThanOrEqual(other)) { - // Approximate the result of division. This may be a little greater or - // smaller than the actual value. - var approx = Math.max(1, Math.floor(rem.toNumber() / other.toNumber())); - - // We will tweak the approximate result by changing it in the 48-th digit or - // the smallest non-fractional digit, whichever is larger. - var log2 = Math.ceil(Math.log(approx) / Math.LN2); - var delta = (log2 <= 48) ? 1 : Math.pow(2, log2 - 48); - - // Decrease the approximation until it is smaller than the remainder. Note - // that if it is too large, the product overflows and is negative. - var approxRes = Integer.fromNumber(approx); - var approxRem = approxRes.multiply(other); - while (approxRem.isNegative() || approxRem.greaterThan(rem)) { - approx -= delta; - approxRes = Integer.fromNumber(approx); - approxRem = approxRes.multiply(other); - } - - // We know the answer can't be zero... and actually, zero would cause - // infinite recursion since we would make no progress. - if (approxRes.isZero()) { - approxRes = Integer.ONE; - } - - res = res.add(approxRes); - rem = rem.subtract(approxRem); - } - return res; -}; - - -/** - * Returns this Integer modulo the given one. - * @param {Integer} other The Integer by which to mod. - * @return {!Integer} This value modulo the given one. - */ -Integer.prototype.modulo = function(other) { - return this.subtract(this.divide(other).multiply(other)); -}; - - -/** @return {!Integer} The bitwise-NOT of this value. */ -Integer.prototype.not = function() { - var len = this.bits_.length; - var arr = []; - for (var i = 0; i < len; i++) { - arr[i] = ~this.bits_[i]; - } - return new Integer(arr, ~this.sign_); -}; - - -/** - * Returns the bitwise-AND of this Integer and the given one. - * @param {Integer} other The Integer to AND with this. - * @return {!Integer} The bitwise-AND of this and the other. - */ -Integer.prototype.and = function(other) { - var len = Math.max(this.bits_.length, other.bits_.length); - var arr = []; - for (var i = 0; i < len; i++) { - arr[i] = this.getBits(i) & other.getBits(i); - } - return new Integer(arr, this.sign_ & other.sign_); -}; - - -/** - * Returns the bitwise-OR of this Integer and the given one. - * @param {Integer} other The Integer to OR with this. - * @return {!Integer} The bitwise-OR of this and the other. - */ -Integer.prototype.or = function(other) { - var len = Math.max(this.bits_.length, other.bits_.length); - var arr = []; - for (var i = 0; i < len; i++) { - arr[i] = this.getBits(i) | other.getBits(i); - } - return new Integer(arr, this.sign_ | other.sign_); -}; - - -/** - * Returns the bitwise-XOR of this Integer and the given one. - * @param {Integer} other The Integer to XOR with this. - * @return {!Integer} The bitwise-XOR of this and the other. - */ -Integer.prototype.xor = function(other) { - var len = Math.max(this.bits_.length, other.bits_.length); - var arr = []; - for (var i = 0; i < len; i++) { - arr[i] = this.getBits(i) ^ other.getBits(i); - } - return new Integer(arr, this.sign_ ^ other.sign_); -}; - - -/** - * Returns this value with bits shifted to the left by the given amount. - * @param {number} numBits The number of bits by which to shift. - * @return {!Integer} This shifted to the left by the given amount. - */ -Integer.prototype.shiftLeft = function(numBits) { - var arr_delta = numBits >> 5; - var bit_delta = numBits % 32; - var len = this.bits_.length + arr_delta + (bit_delta > 0 ? 1 : 0); - var arr = []; - for (var i = 0; i < len; i++) { - if (bit_delta > 0) { - arr[i] = (this.getBits(i - arr_delta) << bit_delta) | - (this.getBits(i - arr_delta - 1) >>> (32 - bit_delta)); - } else { - arr[i] = this.getBits(i - arr_delta); - } - } - return new Integer(arr, this.sign_); -}; - - -/** - * Returns this value with bits shifted to the right by the given amount. - * @param {number} numBits The number of bits by which to shift. - * @return {!Integer} This shifted to the right by the given amount. - */ -Integer.prototype.shiftRight = function(numBits) { - var arr_delta = numBits >> 5; - var bit_delta = numBits % 32; - var len = this.bits_.length - arr_delta; - var arr = []; - for (var i = 0; i < len; i++) { - if (bit_delta > 0) { - arr[i] = (this.getBits(i + arr_delta) >>> bit_delta) | - (this.getBits(i + arr_delta + 1) << (32 - bit_delta)); - } else { - arr[i] = this.getBits(i + arr_delta); - } - } - return new Integer(arr, this.sign_); -}; - -/** - * Provide the name of the constructor and the string representation - * @returns {string} - */ -Integer.prototype.inspect = function () { - return this.constructor.name + ': ' + this.toString(); -}; - -/** - * Returns a Integer whose value is the absolute value of this - * @returns {Integer} - */ -Integer.prototype.abs = function () { - return this.sign_ === 0 ? this : this.negate(); -}; - -/** - * Returns the string representation. - * Method used by the native JSON.stringify() to serialize this instance. - */ -Integer.prototype.toJSON = function () { - return this.toString(); -}; - -module.exports = Integer; diff --git a/lib/types/integer.ts b/lib/types/integer.ts new file mode 100644 index 000000000..ea6f4f14b --- /dev/null +++ b/lib/types/integer.ts @@ -0,0 +1,827 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import utils from "../utils"; + +/** + * A two's-complement integer an array containing bits of the + * integer in 32-bit (signed) pieces, given in little-endian order (i.e., + * lowest-order bits in the first piece), and the sign of -1 or 0. + * + * See the from* functions below for other convenient ways of constructing + * Integers. + * + * The internal representation of an integer is an array of 32-bit signed + * pieces, along with a sign (0 or -1) that indicates the contents of all the + * other 32-bit pieces out to infinity. We use 32-bit pieces because these are + * the size of integers on which Javascript performs bit-operations. For + * operations like addition and multiplication, we split each number into 16-bit + * pieces, which can easily be multiplied within Javascript's floating-point + * representation without overflow or change in sign. + * @final + */ +class Integer { + private bits_: number[]; + private sign_: number; + + /** + * Constructs a two's-complement integer an array containing bits of the + * integer in 32-bit (signed) pieces, given in little-endian order (i.e., + * lowest-order bits in the first piece), and the sign of -1 or 0. + * + * See the from* functions below for other convenient ways of constructing + * Integers. + * + * The internal representation of an integer is an array of 32-bit signed + * pieces, along with a sign (0 or -1) that indicates the contents of all the + * other 32-bit pieces out to infinity. We use 32-bit pieces because these are + * the size of integers on which Javascript performs bit-operations. For + * operations like addition and multiplication, we split each number into 16-bit + * pieces, which can easily be multiplied within Javascript's floating-point + * representation without overflow or change in sign. + * + * @constructor + * @param {Array.} bits Array containing the bits of the number. + * @param {number} sign The sign of the number: -1 for negative and 0 positive. + * @final + */ + constructor(bits: number[], sign: number) { + this.bits_ = []; + this.sign_ = sign; + + // Copy the 32-bit signed integer values passed in. We prune out those at the + // top that equal the sign since they are redundant. + let top = true; + for (let i = bits.length - 1; i >= 0; i--) { + const val = bits[i] | 0; + if (!top || val !== sign) { + this.bits_[i] = val; + top = false; + } + } + } + + + // NOTE: Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the + // from* methods on which they depend. + + + /** + * A cache of the Integer representations of small integer values. + * @type {!Object} + * @private + */ + private static IntCache_: { [key: number]: Integer } = {}; + + /** + * Returns an Integer representing the given (32-bit) integer value. + * @param {number} value A 32-bit integer value. + * @return {!Integer} The corresponding Integer value. + */ + static fromInt(value: number): Integer { + if (value >= -128 && value < 128) { + const cachedObj = Integer.IntCache_[value]; + if (cachedObj) { + return cachedObj; + } + } + + const obj = new Integer([value | 0], value < 0 ? -1 : 0); + if (value >= -128 && value < 128) { + Integer.IntCache_[value] = obj; + } + return obj; + } + + /** + * Returns an Integer representing the given value, provided that it is a finite + * number. Otherwise, zero is returned. + * @param {number} value The value in question. + * @return {!Integer} The corresponding Integer value. + */ + static fromNumber(value: number): Integer { + if (isNaN(value) || !isFinite(value)) { + return Integer.ZERO; + } else if (value < 0) { + return Integer.fromNumber(-value).negate(); + } + const bits = []; + let pow = 1; + for (let i = 0; value >= pow; i++) { + bits[i] = (value / pow) | 0; + pow *= Integer.TWO_PWR_32_DBL_; + } + return new Integer(bits, 0); + + } + + /** + * Returns a Integer representing the value that comes by concatenating the + * given entries, each is assumed to be 32 signed bits, given in little-endian + * order (lowest order bits in the lowest index), and sign-extending the highest + * order 32-bit value. + * @param {Array.} bits The bits of the number, in 32-bit signed pieces, + * in little-endian order. + * @return {!Integer} The corresponding Integer value. + */ + static fromBits(bits: number[]): Integer { + const high = bits[bits.length - 1]; + //noinspection JSBitwiseOperatorUsage + return new Integer(bits, high & (1 << 31) ? -1 : 0); + } + + /** + * Returns an Integer representation of the given string, written using the + * given radix. + * @param {string} str The textual representation of the Integer. + * @param {number=} opt_radix The radix in which the text is written. + * @return {!Integer} The corresponding Integer value. + */ + static fromString(str: string, opt_radix?: number): Integer { + if (str.length === 0) { + throw TypeError('number format error: empty string'); + } + + const radix = opt_radix || 10; + if (radix < 2 || radix > 36) { + throw Error('radix out of range: ' + radix); + } + + if (str.charAt(0) === '-') { + return Integer.fromString(str.substring(1), radix).negate(); + } else if (str.indexOf('-') >= 0) { + throw TypeError('number format error: interior "-" character'); + } + + // Do several (8) digits each time through the loop, so as to + // minimize the calls to the very expensive emulated div. + const radixToPower = Integer.fromNumber(Math.pow(radix, 8)); + + let result = Integer.ZERO; + for (let i = 0; i < str.length; i += 8) { + const size = Math.min(8, str.length - i); + const value = parseInt(str.substring(i, i + size), radix); + if (size < 8) { + const power = Integer.fromNumber(Math.pow(radix, size)); + result = result.multiply(power).add(Integer.fromNumber(value)); + } else { + result = result.multiply(radixToPower); + result = result.add(Integer.fromNumber(value)); + } + } + return result; + } + + /** + * Returns an Integer representation of a given big endian Buffer. + * The internal representation of bits contains bytes in groups of 4 + * @param {Buffer} buf + * @returns {Integer} + */ + static fromBuffer(buf: Buffer): Integer { + const bits = new Array(Math.ceil(buf.length / 4)); + //noinspection JSBitwiseOperatorUsage + const sign = buf[0] & (1 << 7) ? -1 : 0; + for (let i = 0; i < bits.length; i++) { + let offset = buf.length - ((i + 1) * 4); + let value; + if (offset < 0) { + //The buffer length is not multiple of 4 + offset = offset + 4; + value = 0; + for (let j = 0; j < offset; j++) { + let byte = buf[j]; + if (sign === -1) { + //invert the bits + byte = ~byte & 0xff; + } + value = value | (byte << (offset - j - 1) * 8); + } + if (sign === -1) { + //invert all the bits + value = ~value; + } + } else { + value = buf.readInt32BE(offset); + } + bits[i] = value; + } + return new Integer(bits, sign); + } + + /** + * Returns a big endian buffer representation of an Integer. + * Internally the bits are represented using 4 bytes groups (numbers), + * in the Buffer representation there might be the case where we need less than the 4 bytes. + * For example: 0x00000001 -> '01', 0xFFFFFFFF -> 'FF', 0xFFFFFF01 -> 'FF01' + * @param {Integer} value + * @returns {Buffer} + */ + static toBuffer(value: Integer): Buffer { + const sign = value.sign_; + const bits = value.bits_; + if (bits.length === 0) { + //[0] or [0xffffffff] + return utils.allocBufferFromArray([value.sign_]); + } + //the high bits might need to be represented in less than 4 bytes + let highBits = bits[bits.length - 1]; + if (sign === -1) { + highBits = ~highBits; + } + const high = []; + if (highBits >>> 24 > 0) { + high.push((highBits >> 24) & 0xff); + } + if (highBits >>> 16 > 0) { + high.push((highBits >> 16) & 0xff); + } + if (highBits >>> 8 > 0) { + high.push((highBits >> 8) & 0xff); + } + high.push(highBits & 0xff); + if (sign === -1) { + //The byte containing the sign bit got removed + if (high[0] >> 7 !== 0) { + //it is going to be negated + high.unshift(0); + } + } + else if (high[0] >> 7 !== 0) { + //its positive but it lost the byte containing the sign bit + high.unshift(0); + } + const buf = utils.allocBufferUnsafe(high.length + ((bits.length - 1) * 4)); + for (let j = 0; j < high.length; j++) { + const b = high[j]; + if (sign === -1) { + buf[j] = ~b; + } else { + buf[j] = b; + } + } + for (let i = 0; i < bits.length - 1; i++) { + const group = bits[bits.length - 2 - i]; + const offset = high.length + i * 4; + buf.writeInt32BE(group, offset); + } + return buf; + } + + /** + * A number used repeatedly in calculations. This must appear before the first + * call to the from* functions below. + * @type {number} + * @private + */ + private static TWO_PWR_32_DBL_: number = (1 << 16) * (1 << 16); + + /** @type {!Integer} */ + static ZERO: Integer = Integer.fromInt(0); + + /** @type {!Integer} */ + static ONE: Integer = Integer.fromInt(1); + + /** + * @type {!Integer} + * @private + */ + private static TWO_PWR_24_: Integer = Integer.fromInt(1 << 24); + + /** + * Returns the value, assuming it is a 32-bit integer. + * @return {number} The corresponding int value. + */ + toInt(): number { + return this.bits_.length > 0 ? this.bits_[0] : this.sign_; + } + + /** @return {number} The closest floating-point representation to this value. */ + toNumber(): number { + if (this.isNegative()) { + return -this.negate().toNumber(); + } + let val = 0; + let pow = 1; + for (let i = 0; i < this.bits_.length; i++) { + val += this.getBitsUnsigned(i) * pow; + pow *= Integer.TWO_PWR_32_DBL_; + } + return val; + + } + + /** + * @param {number=} opt_radix The radix in which the text should be written. + * @return {string} The textual representation of this value. + * @override + */ + toString(opt_radix?: number): string { + const radix = opt_radix || 10; + if (radix < 2 || radix > 36) { + throw Error('radix out of range: ' + radix); + } + + if (this.isZero()) { + return '0'; + } else if (this.isNegative()) { + return '-' + this.negate().toString(radix); + } + + // Do several (6) digits each time through the loop, so as to + // minimize the calls to the very expensive emulated div. + const radixToPower = Integer.fromNumber(Math.pow(radix, 6)); + + let rem : Integer = this; + let result = ''; + while (true) { + const remDiv = rem.divide(radixToPower); + const intval = rem.subtract(remDiv.multiply(radixToPower)).toInt(); + let digits = intval.toString(radix); + + rem = remDiv; + if (rem.isZero()) { + return digits + result; + } + while (digits.length < 6) { + digits = '0' + digits; + } + result = '' + digits + result; + + } + } + + /** + * Returns the index-th 32-bit (signed) piece of the Integer according to + * little-endian order (i.e., index 0 contains the smallest bits). + * @param {number} index The index in question. + * @return {number} The requested 32-bits as a signed number. + */ + getBits(index: number): number { + if (index < 0) { + return 0; + } else if (index < this.bits_.length) { + return this.bits_[index]; + } + return this.sign_; + + } + + /** + * Returns the index-th 32-bit piece as an unsigned number. + * @param {number} index The index in question. + * @return {number} The requested 32-bits as an unsigned number. + */ + getBitsUnsigned(index: number): number { + const val = this.getBits(index); + return val >= 0 ? val : Integer.TWO_PWR_32_DBL_ + val; + } + + /** @return {number} The sign bit of this number, -1 or 0. */ + getSign(): number { + return this.sign_; + } + + /** @return {boolean} Whether this value is zero. */ + isZero(): boolean { + if (this.sign_ !== 0) { + return false; + } + for (let i = 0; i < this.bits_.length; i++) { + if (this.bits_[i] !== 0) { + return false; + } + } + return true; + } + + /** @return {boolean} Whether this value is negative. */ + isNegative(): boolean { + return this.sign_ === -1; + } + + /** @return {boolean} Whether this value is odd. */ + isOdd(): boolean { + return (this.bits_.length === 0 && this.sign_ === -1) || + (this.bits_.length > 0 && (this.bits_[0] & 1) !== 0); + } + + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer equals the other. + */ + equals(other: Integer): boolean { + if (this.sign_ !== other.sign_) { + return false; + } + const len = Math.max(this.bits_.length, other.bits_.length); + for (let i = 0; i < len; i++) { + if (this.getBits(i) !== other.getBits(i)) { + return false; + } + } + return true; + } + + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer does not equal the other. + */ + notEquals(other: Integer): boolean { + return !this.equals(other); + } + + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is greater than the other. + */ + greaterThan(other: Integer): boolean { + return this.compare(other) > 0; + } + + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is greater than or equal to the other. + */ + greaterThanOrEqual(other: Integer): boolean { + return this.compare(other) >= 0; + } + + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is less than the other. + */ + lessThan(other: Integer): boolean { + return this.compare(other) < 0; + } + + /** + * @param {Integer} other Integer to compare against. + * @return {boolean} Whether this Integer is less than or equal to the other. + */ + lessThanOrEqual(other: Integer): boolean { + return this.compare(other) <= 0; + } + + /** + * Compares this Integer with the given one. + * @param {Integer} other Integer to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: Integer): number { + const diff = this.subtract(other); + if (diff.isNegative()) { + return -1; + } else if (diff.isZero()) { + return 0; + } + return +1; + + } + + /** + * Returns an integer with only the first numBits bits of this value, sign + * extended from the final bit. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} The shorted integer value. + */ + shorten(numBits: number): Integer { + const arr_index = (numBits - 1) >> 5; + const bit_index = (numBits - 1) % 32; + const bits = []; + for (let i = 0; i < arr_index; i++) { + bits[i] = this.getBits(i); + } + const sigBits = bit_index === 31 ? 0xFFFFFFFF : (1 << (bit_index + 1)) - 1; + let val = this.getBits(arr_index) & sigBits; + //noinspection JSBitwiseOperatorUsage + if (val & (1 << bit_index)) { + val |= 0xFFFFFFFF - sigBits; + bits[arr_index] = val; + return new Integer(bits, -1); + } + bits[arr_index] = val; + return new Integer(bits, 0); + + } + + /** @return {!Integer} The negation of this value. */ + negate(): Integer { + return this.not().add(Integer.ONE); + } + + /** + * Returns the sum of this and the given Integer. + * @param {Integer} other The Integer to add to this. + * @return {!Integer} The Integer result. + */ + add(other: Integer): Integer { + const len = Math.max(this.bits_.length, other.bits_.length); + const arr = []; + let carry = 0; + + for (let i = 0; i <= len; i++) { + const a1 = this.getBits(i) >>> 16; + const a0 = this.getBits(i) & 0xFFFF; + + const b1 = other.getBits(i) >>> 16; + const b0 = other.getBits(i) & 0xFFFF; + + let c0 = carry + a0 + b0; + let c1 = (c0 >>> 16) + a1 + b1; + carry = c1 >>> 16; + c0 &= 0xFFFF; + c1 &= 0xFFFF; + arr[i] = (c1 << 16) | c0; + } + return Integer.fromBits(arr); + } + + /** + * Returns the difference of this and the given Integer. + * @param {Integer} other The Integer to subtract from this. + * @return {!Integer} The Integer result. + */ + subtract(other: Integer): Integer { + return this.add(other.negate()); + } + + /** + * Returns the product of this and the given Integer. + * @param {Integer} other The Integer to multiply against this. + * @return {!Integer} The product of this and the other. + */ + multiply(other: Integer): Integer { + if (this.isZero()) { + return Integer.ZERO; + } else if (other.isZero()) { + return Integer.ZERO; + } + + if (this.isNegative()) { + if (other.isNegative()) { + return this.negate().multiply(other.negate()); + } + return this.negate().multiply(other).negate(); + + } else if (other.isNegative()) { + return this.multiply(other.negate()).negate(); + } + + // If both numbers are small, use float multiplication + if (this.lessThan(Integer.TWO_PWR_24_) && other.lessThan(Integer.TWO_PWR_24_)) { + return Integer.fromNumber(this.toNumber() * other.toNumber()); + } + + // Fill in an array of 16-bit products. + const len = this.bits_.length + other.bits_.length; + const arr = []; + for (let i = 0; i < 2 * len; i++) { + arr[i] = 0; + } + for (let i = 0; i < this.bits_.length; i++) { + for (let j = 0; j < other.bits_.length; j++) { + const a1 = this.getBits(i) >>> 16; + const a0 = this.getBits(i) & 0xFFFF; + + const b1 = other.getBits(j) >>> 16; + const b0 = other.getBits(j) & 0xFFFF; + + arr[2 * i + 2 * j] += a0 * b0; + Integer.carry16_(arr, 2 * i + 2 * j); + arr[2 * i + 2 * j + 1] += a1 * b0; + Integer.carry16_(arr, 2 * i + 2 * j + 1); + arr[2 * i + 2 * j + 1] += a0 * b1; + Integer.carry16_(arr, 2 * i + 2 * j + 1); + arr[2 * i + 2 * j + 2] += a1 * b1; + Integer.carry16_(arr, 2 * i + 2 * j + 2); + } + } + + for (let i = 0; i < len; i++) { + arr[i] = (arr[2 * i + 1] << 16) | arr[2 * i]; + } + for (let i = len; i < 2 * len; i++) { + arr[i] = 0; + } + return new Integer(arr, 0); + } + + /** + * Carries any overflow from the given index into later entries. + * @param {Array.} bits Array of 16-bit values in little-endian order. + * @param {number} index The index in question. + * @private + */ + private static carry16_(bits: number[], index: number) { + while ((bits[index] & 0xFFFF) !== bits[index]) { + bits[index + 1] += bits[index] >>> 16; + bits[index] &= 0xFFFF; + } + } + + /** + * Returns this Integer divided by the given one. + * @param {Integer} other Th Integer to divide this by. + * @return {!Integer} This value divided by the given one. + */ + divide(other: Integer): Integer { + if (other.isZero()) { + throw Error('division by zero'); + } else if (this.isZero()) { + return Integer.ZERO; + } + + if (this.isNegative()) { + if (other.isNegative()) { + return this.negate().divide(other.negate()); + } + return this.negate().divide(other).negate(); + + } else if (other.isNegative()) { + return this.divide(other.negate()).negate(); + } + + // Repeat the following until the remainder is less than other: find a + // floating-point that approximates remainder / other *from below*, add this + // into the result, and subtract it from the remainder. It is critical that + // the approximate value is less than or equal to the real value so that the + // remainder never becomes negative. + let res = Integer.ZERO; + let rem: Integer = this; + while (rem.greaterThanOrEqual(other)) { + // Approximate the result of division. This may be a little greater or + // smaller than the actual value. + let approx = Math.max(1, Math.floor(rem.toNumber() / other.toNumber())); + + + // We will tweak the approximate result by changing it in the 48-th digit or + // the smallest non-fractional digit, whichever is larger. + const log2 = Math.ceil(Math.log(approx) / Math.LN2); + const delta = (log2 <= 48) ? 1 : Math.pow(2, log2 - 48); + + // Decrease the approximation until it is smaller than the remainder. Note + // that if it is too large, the product overflows and is negative. + let approxRes = Integer.fromNumber(approx); + let approxRem = approxRes.multiply(other); + while (approxRem.isNegative() || approxRem.greaterThan(rem)) { + approx -= delta; + approxRes = Integer.fromNumber(approx); + approxRem = approxRes.multiply(other); + } + + // We know the answer can't be zero... and actually, zero would cause + // infinite recursion since we would make no progress. + if (approxRes.isZero()) { + approxRes = Integer.ONE; + } + + res = res.add(approxRes); + rem = rem.subtract(approxRem); + } + return res; + } + + /** + * Returns this Integer modulo the given one. + * @param {Integer} other The Integer by which to mod. + * @return {!Integer} This value modulo the given one. + */ + modulo(other: Integer): Integer { + return this.subtract(this.divide(other).multiply(other)); + } + + /** @return {!Integer} The bitwise-NOT of this value. */ + not(): Integer { + const len = this.bits_.length; + const arr = []; + for (let i = 0; i < len; i++) { + arr[i] = ~this.bits_[i]; + } + return new Integer(arr, ~this.sign_); + } + + /** + * Returns the bitwise-AND of this Integer and the given one. + * @param {Integer} other The Integer to AND with this. + * @return {!Integer} The bitwise-AND of this and the other. + */ + and(other: Integer): Integer { + const len = Math.max(this.bits_.length, other.bits_.length); + const arr = []; + for (let i = 0; i < len; i++) { + arr[i] = this.getBits(i) & other.getBits(i); + } + return new Integer(arr, this.sign_ & other.sign_); + } + + /** + * Returns the bitwise-OR of this Integer and the given one. + * @param {Integer} other The Integer to OR with this. + * @return {!Integer} The bitwise-OR of this and the other. + */ + or(other: Integer): Integer { + const len = Math.max(this.bits_.length, other.bits_.length); + const arr = []; + for (let i = 0; i < len; i++) { + arr[i] = this.getBits(i) | other.getBits(i); + } + return new Integer(arr, this.sign_ | other.sign_); + } + + /** + * Returns the bitwise-XOR of this Integer and the given one. + * @param {Integer} other The Integer to XOR with this. + * @return {!Integer} The bitwise-XOR of this and the other. + */ + xor(other: Integer): Integer { + const len = Math.max(this.bits_.length, other.bits_.length); + const arr = []; + for (let i = 0; i < len; i++) { + arr[i] = this.getBits(i) ^ other.getBits(i); + } + return new Integer(arr, this.sign_ ^ other.sign_); + } + + /** + * Returns this value with bits shifted to the left by the given amount. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} This shifted to the left by the given amount. + */ + shiftLeft(numBits: number): Integer { + const arr_delta = numBits >> 5; + const bit_delta = numBits % 32; + const len = this.bits_.length + arr_delta + (bit_delta > 0 ? 1 : 0); + const arr = []; + for (let i = 0; i < len; i++) { + if (bit_delta > 0) { + arr[i] = (this.getBits(i - arr_delta) << bit_delta) | + (this.getBits(i - arr_delta - 1) >>> (32 - bit_delta)); + } else { + arr[i] = this.getBits(i - arr_delta); + } + } + return new Integer(arr, this.sign_); + } + + /** + * Returns this value with bits shifted to the right by the given amount. + * @param {number} numBits The number of bits by which to shift. + * @return {!Integer} This shifted to the right by the given amount. + */ + shiftRight(numBits: number): Integer { + const arr_delta = numBits >> 5; + const bit_delta = numBits % 32; + const len = this.bits_.length - arr_delta; + const arr = []; + for (let i = 0; i < len; i++) { + if (bit_delta > 0) { + arr[i] = (this.getBits(i + arr_delta) >>> bit_delta) | + (this.getBits(i + arr_delta + 1) << (32 - bit_delta)); + } else { + arr[i] = this.getBits(i + arr_delta); + } + } + return new Integer(arr, this.sign_); + } + + /** + * Provide the name of the constructor and the string representation + * @internal + * @returns {string} + */ + inspect(): string { + return this.constructor.name + ': ' + this.toString(); + } + + /** + * Returns a Integer whose value is the absolute value of this + * @returns {Integer} + */ + abs(): Integer { + return this.sign_ === 0 ? this : this.negate(); + } + + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string { + return this.toString(); + } +} + +export default Integer; diff --git a/lib/types/local-date.js b/lib/types/local-date.js deleted file mode 100644 index 9651d7355..000000000 --- a/lib/types/local-date.js +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); - -const utils = require('../utils'); -/** @module types */ - -/** - * @private - * @const - */ -const millisecondsPerDay = 86400000; -/** - * @private - */ -const dateCenter = Math.pow(2,31); -/** - * - * Creates a new instance of LocalDate. - * @class - * @classdesc A date without a time-zone in the ISO-8601 calendar system, such as 2010-08-05. - *

- * LocalDate is an immutable object that represents a date, often viewed as year-month-day. For example, the value "1st October 2014" can be stored in a LocalDate. - *

- *

- * This class does not store or represent a time or time-zone. Instead, it is a description of the date, as used for birthdays. It cannot represent an instant on the time-line without additional information such as an offset or time-zone. - *

- *

- * Note that this type can represent dates in the range [-5877641-06-23; 5881580-07-17] while the ES5 date type can only represent values in the range of [-271821-04-20; 275760-09-13]. - * In the event that year, month, day parameters do not fall within the ES5 date range an Error will be thrown. If you wish to represent a date outside of this range, pass a single - * parameter indicating the days since epoch. For example, -1 represents 1969-12-31. - *

- * @param {Number} year The year or days since epoch. If days since epoch, month and day should not be provided. - * @param {Number} month Between 1 and 12 inclusive. - * @param {Number} day Between 1 and the number of days in the given month of the given year. - * - * @property {Date} date The date representation if falls within a range of an ES5 data type, otherwise an invalid date. - * - * @constructor - */ -function LocalDate(year, month, day) { - //implementation detail: internally uses a UTC based date - if (typeof year === 'number' && typeof month === 'number' && typeof day === 'number') { - // Use setUTCFullYear as if there is a 2 digit year, Date.UTC() assumes - // that is the 20th century. - this.date = new Date(); - this.date.setUTCHours(0, 0, 0, 0); - this.date.setUTCFullYear(year, month-1, day); - if(isNaN(this.date.getTime())) { - throw new Error(util.format('%d-%d-%d does not form a valid ES5 date!', - year, month, day)); - } - } - else if (typeof month === 'undefined' && typeof day === 'undefined') { - if (typeof year === 'number') { - //in days since epoch. - if(year < -2147483648 || year > 2147483647) { - throw new Error('You must provide a valid value for days since epoch (-2147483648 <= value <= 2147483647).'); - } - this.date = new Date(year * millisecondsPerDay); - } - } - - if (typeof this.date === 'undefined') { - throw new Error('You must provide a valid year, month and day'); - } - - /** - * If date cannot be represented yet given a valid days since epoch, track - * it internally. - */ - this._value = isNaN(this.date.getTime()) ? year : null; - - /** - * A number representing the year. May return NaN if cannot be represented as - * a Date. - * @type Number - */ - this.year = this.date.getUTCFullYear(); - /** - * A number between 1 and 12 inclusive representing the month. May return - * NaN if cannot be represented as a Date. - * @type Number - */ - this.month = this.date.getUTCMonth() + 1; - /** - * A number between 1 and the number of days in the given month of the given year (28, 29, 30, 31). - * May return NaN if cannot be represented as a Date. - * @type Number - */ - this.day = this.date.getUTCDate(); -} - -/** - * Creates a new instance of LocalDate using the current year, month and day from the system clock in the default time-zone. - */ -LocalDate.now = function () { - return LocalDate.fromDate(new Date()); -}; - -/** - * Creates a new instance of LocalDate using the current date from the system clock at UTC. - */ -LocalDate.utcNow = function () { - return new LocalDate(Date.now()); -}; - - -/** - * Creates a new instance of LocalDate using the year, month and day from the provided local date time. - * @param {Date} date - */ -LocalDate.fromDate = function (date) { - if (isNaN(date.getTime())) { - throw new TypeError('Invalid date: ' + date); - } - return new LocalDate(date.getFullYear(), date.getMonth() + 1, date.getDate()); -}; - -/** - * Creates a new instance of LocalDate using the year, month and day provided in the form: yyyy-mm-dd or - * days since epoch (i.e. -1 for Dec 31, 1969). - * @param {String} value - */ -LocalDate.fromString = function (value) { - const dashCount = (value.match(/-/g) || []).length; - if(dashCount >= 2) { - let multiplier = 1; - if (value[0] === '-') { - value = value.substring(1); - multiplier = -1; - } - const parts = value.split('-'); - return new LocalDate(multiplier * parseInt(parts[0], 10), parseInt(parts[1], 10), parseInt(parts[2], 10)); - } - if(value.match(/^-?\d+$/)) { - // Parse as days since epoch. - return new LocalDate(parseInt(value, 10)); - } - throw new Error("Invalid input '" + value + "'."); -}; - -/** - * Creates a new instance of LocalDate using the bytes representation. - * @param {Buffer} buffer - */ -LocalDate.fromBuffer = function (buffer) { - //move to unix epoch: 0. - return new LocalDate((buffer.readUInt32BE(0) - dateCenter)); -}; - -/** - * Compares this LocalDate with the given one. - * @param {LocalDate} other date to compare against. - * @return {number} 0 if they are the same, 1 if the this is greater, and -1 - * if the given one is greater. - */ -LocalDate.prototype.compare = function (other) { - const thisValue = isNaN(this.date.getTime()) ? this._value * millisecondsPerDay : this.date.getTime(); - const otherValue = isNaN(other.date.getTime()) ? other._value * millisecondsPerDay : other.date.getTime(); - const diff = thisValue - otherValue; - if (diff < 0) { - return -1; - } - if (diff > 0) { - return 1; - } - return 0; -}; - -/** - * Returns true if the value of the LocalDate instance and other are the same - * @param {LocalDate} other - * @returns {Boolean} - */ -LocalDate.prototype.equals = function (other) { - return ((other instanceof LocalDate)) && this.compare(other) === 0; -}; - -LocalDate.prototype.inspect = function () { - return this.constructor.name + ': ' + this.toString(); -}; - -/** - * Gets the bytes representation of the instance. - * @returns {Buffer} - */ -LocalDate.prototype.toBuffer = function () { - //days since unix epoch - const daysSinceEpoch = isNaN(this.date.getTime()) ? this._value : Math.floor(this.date.getTime() / millisecondsPerDay); - const value = daysSinceEpoch + dateCenter; - const buf = utils.allocBufferUnsafe(4); - buf.writeUInt32BE(value, 0); - return buf; -}; - -/** - * Gets the string representation of the instance in the form: yyyy-mm-dd if - * the value can be parsed as a Date, otherwise days since epoch. - * @returns {String} - */ -LocalDate.prototype.toString = function () { - let result; - //if cannot be parsed as date, return days since epoch representation. - if (isNaN(this.date.getTime())) { - return this._value.toString(); - } - if (this.year < 0) { - result = '-' + fillZeros((this.year * -1).toString(), 4); - } - else { - result = fillZeros(this.year.toString(), 4); - } - result += '-' + fillZeros(this.month.toString(), 2) + '-' + fillZeros(this.day.toString(), 2); - return result; -}; - -/** - * Gets the string representation of the instance in the form: yyyy-mm-dd, valid for JSON. - * @returns {String} - */ -LocalDate.prototype.toJSON = function () { - return this.toString(); -}; - -/** - * @param {String} value - * @param {Number} amount - * @private - */ -function fillZeros(value, amount) { - if (value.length >= amount) { - return value; - } - return utils.stringRepeat('0', amount - value.length) + value; -} - -module.exports = LocalDate; \ No newline at end of file diff --git a/lib/types/local-date.ts b/lib/types/local-date.ts new file mode 100644 index 000000000..edca93d77 --- /dev/null +++ b/lib/types/local-date.ts @@ -0,0 +1,274 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import util from "util"; +import utils from "../utils"; + + +/** @module types */ + +/** + * @private + * @const + */ +const millisecondsPerDay = 86400000; +/** + * @private + */ +const dateCenter = Math.pow(2, 31); + +/** + * @class + * @classdesc A date without a time-zone in the ISO-8601 calendar system, such as 2010-08-05. + *

+ * LocalDate is an immutable object that represents a date, often viewed as year-month-day. For example, the value "1st October 2014" can be stored in a LocalDate. + *

+ *

+ * This class does not store or represent a time or time-zone. Instead, it is a description of the date, as used for birthdays. It cannot represent an instant on the time-line without additional information such as an offset or time-zone. + *

+ *

+ * Note that this type can represent dates in the range [-5877641-06-23; 5881580-07-17] while the ES5 date type can only represent values in the range of [-271821-04-20; 275760-09-13]. + * In the event that year, month, day parameters do not fall within the ES5 date range an Error will be thrown. If you wish to represent a date outside of this range, pass a single + * parameter indicating the days since epoch. For example, -1 represents 1969-12-31. + *

+ */ +class LocalDate { + //TODO: not exposed. I believe it should. + /** + * The date representation if falls within a range of an ES5 data type, otherwise an invalid date. + */ + date: Date; + private _value: number | null; + year: number; + month: number; + day: number; + + /** + * Creates a new instance of LocalDate. + * A date without a time-zone in the ISO-8601 calendar system, such as 2010-08-05. + *

+ * LocalDate is an immutable object that represents a date, often viewed as year-month-day. For example, the value "1st October 2014" can be stored in a LocalDate. + *

+ *

+ * This class does not store or represent a time or time-zone. Instead, it is a description of the date, as used for birthdays. It cannot represent an instant on the time-line without additional information such as an offset or time-zone. + *

+ *

+ * Note that this type can represent dates in the range [-5877641-06-23; 5881580-07-17] while the ES5 date type can only represent values in the range of [-271821-04-20; 275760-09-13]. + * In the event that year, month, day parameters do not fall within the ES5 date range an Error will be thrown. If you wish to represent a date outside of this range, pass a single + * parameter indicating the days since epoch. For example, -1 represents 1969-12-31. + *

+ * @param {Number} year The year or days since epoch. If days since epoch, month and day should not be provided. + * @param {Number} [month] Between 1 and 12 inclusive. + * @param {Number} [day] Between 1 and the number of days in the given month of the given year. + * + * @constructor + */ + constructor(year: number, month?: number, day?: number) { + //implementation detail: internally uses a UTC based date + if (typeof year === 'number' && typeof month === 'number' && typeof day === 'number') { + // Use setUTCFullYear as if there is a 2 digit year, Date.UTC() assumes + // that is the 20th century. + this.date = new Date(); + this.date.setUTCHours(0, 0, 0, 0); + this.date.setUTCFullYear(year, month-1, day); + if(isNaN(this.date.getTime())) { + throw new Error(util.format('%d-%d-%d does not form a valid ES5 date!', + year, month, day)); + } + } + else if (typeof month === 'undefined' && typeof day === 'undefined') { + if (typeof year === 'number') { + //in days since epoch. + if(year < -2147483648 || year > 2147483647) { + throw new Error('You must provide a valid value for days since epoch (-2147483648 <= value <= 2147483647).'); + } + this.date = new Date(year * millisecondsPerDay); + } + } + + if (typeof this.date === 'undefined') { + throw new Error('You must provide a valid year, month and day'); + } + + /** + * If date cannot be represented yet given a valid days since epoch, track + * it internally. + */ + this._value = isNaN(this.date.getTime()) ? year : null; + + /** + * A number representing the year. May return NaN if cannot be represented as + * a Date. + * @type Number + */ + this.year = this.date.getUTCFullYear(); + /** + * A number between 1 and 12 inclusive representing the month. May return + * NaN if cannot be represented as a Date. + * @type Number + */ + this.month = this.date.getUTCMonth() + 1; + /** + * A number between 1 and the number of days in the given month of the given year (28, 29, 30, 31). + * May return NaN if cannot be represented as a Date. + * @type Number + */ + this.day = this.date.getUTCDate(); + } + + /** + * Creates a new instance of LocalDate using the current year, month and day from the system clock in the default time-zone. + */ + static now(): LocalDate { + return LocalDate.fromDate(new Date()); + } + + /** + * Creates a new instance of LocalDate using the current date from the system clock at UTC. + */ + static utcNow(): LocalDate { + return new LocalDate(Date.now()); + } + + /** + * Creates a new instance of LocalDate using the year, month and day from the provided local date time. + * @param {Date} date + */ + static fromDate(date: Date): LocalDate { + if (isNaN(date.getTime())) { + throw new TypeError('Invalid date: ' + date); + } + return new LocalDate(date.getFullYear(), date.getMonth() + 1, date.getDate()); + } + + /** + * Creates a new instance of LocalDate using the year, month and day provided in the form: yyyy-mm-dd or + * days since epoch (i.e. -1 for Dec 31, 1969). + * @param {String} value + */ + static fromString(value: string): LocalDate { + const dashCount = (value.match(/-/g) || []).length; + if (dashCount >= 2) { + let multiplier = 1; + if (value[0] === '-') { + value = value.substring(1); + multiplier = -1; + } + const parts = value.split('-'); + return new LocalDate(multiplier * parseInt(parts[0], 10), parseInt(parts[1], 10), parseInt(parts[2], 10)); + } + if (value.match(/^-?\d+$/)) { + // Parse as days since epoch. + return new LocalDate(parseInt(value, 10)); + } + throw new Error("Invalid input '" + value + "'."); + } + + /** + * Creates a new instance of LocalDate using the bytes representation. + * @param {Buffer} buffer + */ + static fromBuffer(buffer: Buffer): LocalDate { + //move to unix epoch: 0. + return new LocalDate(buffer.readUInt32BE(0) - dateCenter); + } + + /** + * Compares this LocalDate with the given one. + * @param {LocalDate} other date to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: LocalDate): number { + const thisValue = isNaN(this.date.getTime()) ? this._value! * millisecondsPerDay : this.date.getTime(); + const otherValue = isNaN(other.date.getTime()) ? other._value! * millisecondsPerDay : other.date.getTime(); + const diff = thisValue - otherValue; + if (diff < 0) { + return -1; + } + if (diff > 0) { + return 1; + } + return 0; + } + + /** + * Returns true if the value of the LocalDate instance and other are the same + * @param {LocalDate} other + * @returns {Boolean} + */ + equals(other: LocalDate): boolean { + return (other instanceof LocalDate) && this.compare(other) === 0; + } + + inspect(): string { + return this.constructor.name + ': ' + this.toString(); + } + + /** + * Gets the bytes representation of the instance. + * @returns {Buffer} + */ + toBuffer(): Buffer { + //days since unix epoch + const daysSinceEpoch = isNaN(this.date.getTime()) ? this._value! : Math.floor(this.date.getTime() / millisecondsPerDay); + const value = daysSinceEpoch + dateCenter; + const buf = utils.allocBufferUnsafe(4); + buf.writeUInt32BE(value, 0); + return buf; + } + + /** + * Gets the string representation of the instance in the form: yyyy-mm-dd if + * the value can be parsed as a Date, otherwise days since epoch. + * @returns {String} + */ + toString(): string { + let result; + //if cannot be parsed as date, return days since epoch representation. + if (isNaN(this.date.getTime())) { + return this._value!.toString(); + } + if (this.year < 0) { + result = '-' + fillZeros((this.year * -1).toString(), 4); + } else { + result = fillZeros(this.year.toString(), 4); + } + result += '-' + fillZeros(this.month.toString(), 2) + '-' + fillZeros(this.day.toString(), 2); + return result; + } + + /** + * Gets the string representation of the instance in the form: yyyy-mm-dd, valid for JSON. + * @returns {String} + */ + toJSON(): string { + return this.toString(); + } +} + +/** + * @param {String} value + * @param {Number} amount + * @private + */ +function fillZeros(value: string, amount: number): string { + if (value.length >= amount) { + return value; + } + return utils.stringRepeat('0', amount - value.length) + value; +} + +export default LocalDate; \ No newline at end of file diff --git a/lib/types/local-time.js b/lib/types/local-time.js deleted file mode 100644 index 0e320cc98..000000000 --- a/lib/types/local-time.js +++ /dev/null @@ -1,295 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const Long = require('long'); -const util = require('util'); -const utils = require('../utils'); -/** @module types */ - -/** - * @const - * @private - * */ -const maxNanos = Long.fromString('86399999999999'); -/** - * Nanoseconds in a second - * @const - * @private - * */ -const nanoSecInSec = Long.fromNumber(1000000000); -/** - * Nanoseconds in a millisecond - * @const - * @private - * */ -const nanoSecInMillis = Long.fromNumber(1000000); -/** - * Milliseconds in day - * @const - * @private - * */ -const millisInDay = 86400000; -/** - * - * Creates a new instance of LocalTime. - * @class - * @classdesc A time without a time-zone in the ISO-8601 calendar system, such as 10:30:05. - *

- * LocalTime is an immutable date-time object that represents a time, often viewed as hour-minute-second. Time is represented to nanosecond precision. For example, the value "13:45.30.123456789" can be stored in a LocalTime. - *

- * @param {Long} totalNanoseconds Total nanoseconds since midnight. - * @constructor - */ -function LocalTime(totalNanoseconds) { - if (!(totalNanoseconds instanceof Long)) { - throw new Error('You must specify a Long value as totalNanoseconds'); - } - if (totalNanoseconds.lessThan(Long.ZERO) || totalNanoseconds.greaterThan(maxNanos)) { - throw new Error('Total nanoseconds out of range'); - } - this.value = totalNanoseconds; - - /** - * Gets the hour component of the time represented by the current instance, a number from 0 to 23. - * @type Number - */ - this.hour = this._getParts()[0]; - /** - * Gets the minute component of the time represented by the current instance, a number from 0 to 59. - * @type Number - */ - this.minute = this._getParts()[1]; - /** - * Gets the second component of the time represented by the current instance, a number from 0 to 59. - * @type Number - */ - this.second = this._getParts()[2]; - /** - * Gets the nanoseconds component of the time represented by the current instance, a number from 0 to 999999999. - * @type Number - */ - this.nanosecond = this._getParts()[3]; -} - -/** - * Parses an string representation and returns a new LocalDate. - * @param {String} value - * @returns {LocalTime} - */ -LocalTime.fromString = function (value) { - if (typeof value !== 'string') { - throw new Error('Argument type invalid: ' + util.inspect(value)); - } - const parts = value.split(':'); - let millis = parseInt(parts[0], 10) * 3600000 + parseInt(parts[1], 10) * 60000; - let nanos; - if (parts.length === 3) { - const secParts = parts[2].split('.'); - millis += parseInt(secParts[0], 10) * 1000; - if (secParts.length === 2) { - nanos = secParts[1]; - //add zeros at the end - nanos = nanos + utils.stringRepeat('0', 9 - nanos.length); - } - } - return LocalTime.fromMilliseconds(millis, parseInt(nanos, 10) || 0); -}; - -/** - * Uses the current local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime - * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. - * @returns {LocalTime} - */ -LocalTime.now = function (nanoseconds) { - return LocalTime.fromDate(new Date(), nanoseconds); -}; - -/** - * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime - * @param {Date} date Local date portion to extract the time passed since midnight. - * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the nanosecond time portion. - * @returns {LocalTime} - */ -LocalTime.fromDate = function (date, nanoseconds) { - if (!(date instanceof Date)) { - throw new Error('Not a valid date'); - } - //Use the local representation, only the milliseconds portion - const millis = (date.getTime() + date.getTimezoneOffset() * -60000) % millisInDay; - return LocalTime.fromMilliseconds(millis, nanoseconds); -}; - -/** - * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime - * @param {Number} milliseconds A Number from 0 to 86,399,999. - * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. - * @returns {LocalTime} - */ -LocalTime.fromMilliseconds = function (milliseconds, nanoseconds) { - if (typeof nanoseconds !== 'number') { - nanoseconds = 0; - } - return new LocalTime(Long - .fromNumber(milliseconds) - .multiply(nanoSecInMillis) - .add(Long.fromNumber(nanoseconds))); -}; - -/** - * Creates a new instance of LocalTime from the bytes representation. - * @param {Buffer} value - * @returns {LocalTime} - */ -LocalTime.fromBuffer = function (value) { - if (!(value instanceof Buffer)) { - throw new TypeError('Expected Buffer, obtained ' + util.inspect(value)); - } - return new LocalTime(new Long(value.readInt32BE(4), value.readInt32BE(0))); -}; - -/** - * Compares this LocalTime with the given one. - * @param {LocalTime} other time to compare against. - * @return {number} 0 if they are the same, 1 if the this is greater, and -1 - * if the given one is greater. - */ -LocalTime.prototype.compare = function (other) { - return this.value.compare(other.value); -}; - -/** - * Returns true if the value of the LocalTime instance and other are the same - * @param {LocalTime} other - * @returns {Boolean} - */ -LocalTime.prototype.equals = function (other) { - return ((other instanceof LocalTime)) && this.compare(other) === 0; -}; - -/** - * Gets the total amount of nanoseconds since midnight for this instance. - * @returns {Long} - */ -LocalTime.prototype.getTotalNanoseconds = function () { - return this.value; -}; - -LocalTime.prototype.inspect = function () { - return this.constructor.name + ': ' + this.toString(); -}; - -/** - * Returns a big-endian bytes representation of the instance - * @returns {Buffer} - */ -LocalTime.prototype.toBuffer = function () { - const buffer = utils.allocBufferUnsafe(8); - buffer.writeUInt32BE(this.value.getHighBitsUnsigned(), 0); - buffer.writeUInt32BE(this.value.getLowBitsUnsigned(), 4); - return buffer; -}; - -/** - * Returns the string representation of the instance in the form of hh:MM:ss.ns - * @returns {String} - */ -LocalTime.prototype.toString = function () { - return formatTime(this._getParts()); -}; - -/** - * Gets the string representation of the instance in the form: hh:MM:ss.ns - * @returns {String} - */ -LocalTime.prototype.toJSON = function () { - return this.toString(); -}; - -/** - * @returns {Array.} - * @ignore - */ -LocalTime.prototype._getParts = function () { - if (!this._partsCache) { - //hours, minutes, seconds and nanos - const parts = [0, 0, 0, 0]; - const secs = this.value.div(nanoSecInSec); - //faster modulo - //total nanos - parts[3] = this.value.subtract(secs.multiply(nanoSecInSec)).toNumber(); - //seconds - parts[2] = secs.toNumber(); - if (parts[2] >= 60) { - //minutes - parts[1] = Math.floor(parts[2] / 60); - parts[2] = parts[2] % 60; - } - if (parts[1] >= 60) { - //hours - parts[0] = Math.floor(parts[1] / 60); - parts[1] = parts[1] % 60; - } - this._partsCache = parts; - } - return this._partsCache; -}; - -/** - * @param {Array.} values - * @private - */ -function formatTime(values) { - let result; - if (values[0] < 10) { - result = '0' + values[0] + ':'; - } - else { - result = values[0] + ':'; - } - if (values[1] < 10) { - result += '0' + values[1] + ':'; - } - else { - result += values[1] + ':'; - } - if (values[2] < 10) { - result += '0' + values[2]; - } - else { - result += values[2]; - } - if (values[3] > 0) { - let nanos = values[3].toString(); - //nine digits - if (nanos.length < 9) { - nanos = utils.stringRepeat('0', 9 - nanos.length) + nanos; - } - let lastPosition; - for (let i = nanos.length - 1; i > 0; i--) { - if (nanos[i] !== '0') { - break; - } - lastPosition = i; - } - if (lastPosition) { - nanos = nanos.substring(0, lastPosition); - } - result += '.' + nanos; - } - return result; -} - -module.exports = LocalTime; \ No newline at end of file diff --git a/lib/types/local-time.ts b/lib/types/local-time.ts new file mode 100644 index 000000000..18f274845 --- /dev/null +++ b/lib/types/local-time.ts @@ -0,0 +1,308 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Long from "long"; +import util from "util"; +import utils from "../utils"; + +/** @module types */ + +/** + * @const + * @private + * */ +const maxNanos = Long.fromString('86399999999999'); +/** + * Nanoseconds in a second + * @const + * @private + * */ +const nanoSecInSec = Long.fromNumber(1000000000); +/** + * Nanoseconds in a millisecond + * @const + * @private + * */ +const nanoSecInMillis = Long.fromNumber(1000000); +/** + * Milliseconds in day + * @const + * @private + * */ +const millisInDay = 86400000; +/** + * @class + * @classdesc A time without a time-zone in the ISO-8601 calendar system, such as 10:30:05. + *

+ * LocalTime is an immutable date-time object that represents a time, often viewed as hour-minute-second. Time is represented to nanosecond precision. For example, the value "13:45.30.123456789" can be stored in a LocalTime. + *

+ */ +class LocalTime { + private value: Long; + /** + * Gets the hour component of the time represented by the current instance, a number from 0 to 23. + * @type Number + */ + hour: number; + /** + * Gets the minute component of the time represented by the current instance, a number from 0 to 59. + * @type Number + */ + minute: number; + /** + * Gets the second component of the time represented by the current instance, a number from 0 to 59. + * @type Number + */ + second: number; + /** + * Gets the nanoseconds component of the time represented by the current instance, a number from 0 to 999999999. + * @type Number + */ + nanosecond: number; + private _partsCache?: Array; + + /** + * Creates a new instance of LocalTime. + * A time without a time-zone in the ISO-8601 calendar system, such as 10:30:05. + *

+ * LocalTime is an immutable date-time object that represents a time, often viewed as hour-minute-second. Time is represented to nanosecond precision. For example, the value "13:45.30.123456789" can be stored in a LocalTime. + *

+ * @param {Long} totalNanoseconds Total nanoseconds since midnight. + * @constructor + */ + constructor(totalNanoseconds: Long) { + if (!(totalNanoseconds instanceof Long)) { + throw new Error('You must specify a Long value as totalNanoseconds'); + } + if (totalNanoseconds.lessThan(Long.ZERO) || totalNanoseconds.greaterThan(maxNanos)) { + throw new Error('Total nanoseconds out of range'); + } + this.value = totalNanoseconds; + this.hour = this._getParts()[0]; + this.minute = this._getParts()[1]; + this.second = this._getParts()[2]; + this.nanosecond = this._getParts()[3]; + } + + /** + * Parses a string representation and returns a new LocalTime. + * @param {String} value + * @returns {LocalTime} + */ + static fromString(value: string): LocalTime { + if (typeof value !== 'string') { + throw new Error('Argument type invalid: ' + util.inspect(value)); + } + const parts = value.split(':'); + let millis = parseInt(parts[0], 10) * 3600000 + parseInt(parts[1], 10) * 60000; + let nanos; + if (parts.length === 3) { + const secParts = parts[2].split('.'); + millis += parseInt(secParts[0], 10) * 1000; + if (secParts.length === 2) { + nanos = secParts[1]; + //add zeros at the end + nanos = nanos + utils.stringRepeat('0', 9 - nanos.length); + } + } + return LocalTime.fromMilliseconds(millis, parseInt(nanos, 10) || 0); + } + + /** + * Uses the current local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. + * @returns {LocalTime} + */ + static now(nanoseconds?: number): LocalTime { + return LocalTime.fromDate(new Date(), nanoseconds); + } + + /** + * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Date} date Local date portion to extract the time passed since midnight. + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the nanosecond time portion. + * @returns {LocalTime} + */ + static fromDate(date: Date, nanoseconds?: number): LocalTime { + if (!(date instanceof Date)) { + throw new Error('Not a valid date'); + } + const millis = (date.getTime() + date.getTimezoneOffset() * -60000) % millisInDay; + return LocalTime.fromMilliseconds(millis, nanoseconds); + } + + /** + * Uses the provided local time (in milliseconds) and the nanoseconds to create a new instance of LocalTime + * @param {Number} milliseconds A Number from 0 to 86,399,999. + * @param {Number} [nanoseconds] A Number from 0 to 999,999, representing the time nanosecond portion. + * @returns {LocalTime} + */ + static fromMilliseconds(milliseconds: number, nanoseconds?: number): LocalTime { + if (typeof nanoseconds !== 'number') { + nanoseconds = 0; + } + return new LocalTime(Long + .fromNumber(milliseconds) + .multiply(nanoSecInMillis) + .add(Long.fromNumber(nanoseconds))); + } + + /** + * Creates a new instance of LocalTime from the bytes representation. + * @param {Buffer} value + * @returns {LocalTime} + */ + static fromBuffer(value: Buffer): LocalTime { + if (!(value instanceof Buffer)) { + throw new TypeError('Expected Buffer, obtained ' + util.inspect(value)); + } + return new LocalTime(new Long(value.readInt32BE(4), value.readInt32BE(0))); + } + + /** + * Compares this LocalTime with the given one. + * @param {LocalTime} other time to compare against. + * @return {number} 0 if they are the same, 1 if the this is greater, and -1 + * if the given one is greater. + */ + compare(other: LocalTime): number { + return this.value.compare(other.value); + } + + /** + * Returns true if the value of the LocalTime instance and other are the same + * @param {LocalTime} other + * @returns {Boolean} + */ + equals(other: LocalTime): boolean { + return ((other instanceof LocalTime)) && this.compare(other) === 0; + } + + /** + * Gets the total amount of nanoseconds since midnight for this instance. + * @returns {Long} + */ + getTotalNanoseconds(): Long { + return this.value; + } + + inspect(): string { + return this.constructor.name + ': ' + this.toString(); + } + + /** + * Returns a big-endian bytes representation of the instance + * @returns {Buffer} + */ + toBuffer(): Buffer { + const buffer = utils.allocBufferUnsafe(8); + buffer.writeUInt32BE(this.value.getHighBitsUnsigned(), 0); + buffer.writeUInt32BE(this.value.getLowBitsUnsigned(), 4); + return buffer; + } + + /** + * Returns the string representation of the instance in the form of hh:MM:ss.ns + * @returns {String} + */ + toString(): string { + return formatTime(this._getParts()); + } + + /** + * Gets the string representation of the instance in the form: hh:MM:ss.ns + * @returns {String} + */ + toJSON(): string { + return this.toString(); + } + + /** + * @returns {Array.} + * @ignore @internal + */ + private _getParts(): Array { + if (!this._partsCache) { + //hours, minutes, seconds and nanos + const parts = [0, 0, 0, 0]; + const secs = this.value.div(nanoSecInSec); + //faster modulo + //total nanos + parts[3] = this.value.subtract(secs.multiply(nanoSecInSec)).toNumber(); + //seconds + parts[2] = secs.toNumber(); + if (parts[2] >= 60) { + //minutes + parts[1] = Math.floor(parts[2] / 60); + parts[2] = parts[2] % 60; + } + if (parts[1] >= 60) { + //hours + parts[0] = Math.floor(parts[1] / 60); + parts[1] = parts[1] % 60; + } + this._partsCache = parts; + } + return this._partsCache; + } +} + +/** + * @param {Array.} values + * @private + */ +function formatTime(values: Array): string { + let result; + if (values[0] < 10) { + result = '0' + values[0] + ':'; + } + else { + result = values[0] + ':'; + } + if (values[1] < 10) { + result += '0' + values[1] + ':'; + } + else { + result += values[1] + ':'; + } + if (values[2] < 10) { + result += '0' + values[2]; + } + else { + result += values[2]; + } + if (values[3] > 0) { + let nanos = values[3].toString(); + //nine digits + if (nanos.length < 9) { + nanos = utils.stringRepeat('0', 9 - nanos.length) + nanos; + } + let lastPosition; + for (let i = nanos.length - 1; i > 0; i--) { + if (nanos[i] !== '0') { + break; + } + lastPosition = i; + } + if (lastPosition) { + nanos = nanos.substring(0, lastPosition); + } + result += '.' + nanos; + } + return result; +} + +export default LocalTime; \ No newline at end of file diff --git a/lib/types/mutable-long.js b/lib/types/mutable-long.ts similarity index 95% rename from lib/types/mutable-long.js rename to lib/types/mutable-long.ts index 43992d50b..3e5688cd3 100644 --- a/lib/types/mutable-long.js +++ b/lib/types/mutable-long.ts @@ -13,23 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -"use strict"; -const Long = require('long'); +import Long from "long"; const TWO_PWR_16_DBL = 1 << 16; const TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL; /** * Constructs a signed int64 representation. - * @ignore + * @ignore @internal */ class MutableLong { - constructor(b00, b16, b32, b48) { + _arr: number[]; + constructor(b00?, b16?, b32?, b48?) { // Use an array of uint16 this._arr = [b00 & 0xffff, b16 & 0xffff, b32 & 0xffff, b48 & 0xffff]; } + static one: MutableLong = new MutableLong(1, 0, 0, 0); + toString() { return this.toImmutable().toString(); } @@ -39,7 +41,7 @@ class MutableLong { * @param {MutableLong} other * @return {number} */ - compare(other) { + compare(other: MutableLong): number { const thisNeg = this.isNegative(); const otherNeg = other.isNegative(); if (thisNeg && !otherNeg) { @@ -52,7 +54,7 @@ class MutableLong { return this._compareBits(other); } - _compareBits(other) { + _compareBits(other: MutableLong) { for (let i = 3; i >= 0; i--) { if (this._arr[i] > other._arr[i]) { return 1; @@ -84,7 +86,7 @@ class MutableLong { * Performs the bitwise NOT of this value. * @return {MutableLong} */ - not() { + not(): MutableLong { this._arr[0] = ~this._arr[0] & 0xffff; this._arr[1] = ~this._arr[1] & 0xffff; this._arr[2] = ~this._arr[2] & 0xffff; @@ -170,7 +172,7 @@ class MutableLong { * @param {MutableLong} other * @returns {MutableLong} this instance. */ - xor(other) { + xor(other: MutableLong): MutableLong { this._arr[0] ^= other._arr[0]; this._arr[1] ^= other._arr[1]; this._arr[2] ^= other._arr[2]; @@ -187,7 +189,7 @@ class MutableLong { * @param {MutableLong} multiplier * @returns {MutableLong} this instance. */ - multiply(multiplier) { + multiply(multiplier: MutableLong): MutableLong { let negate = false; if (this.isZero() || multiplier.isZero()) { return this.toZero(); @@ -247,7 +249,7 @@ class MutableLong { * Negates this value. * @return {MutableLong} */ - negate() { + negate(): MutableLong { return this.not().add(MutableLong.one); } @@ -285,7 +287,7 @@ class MutableLong { * @param {Number} [radix] * @return {MutableLong} */ - static fromString(str, radix) { + static fromString(str: string, radix?: number): MutableLong { if (typeof str !== 'string') { throw new Error('String format is not valid: ' + str); } @@ -324,6 +326,4 @@ class MutableLong { } } -MutableLong.one = new MutableLong(1, 0, 0, 0); - -module.exports = MutableLong; \ No newline at end of file +export default MutableLong; \ No newline at end of file diff --git a/lib/types/protocol-version.js b/lib/types/protocol-version.ts similarity index 82% rename from lib/types/protocol-version.js rename to lib/types/protocol-version.ts index 4accf2b68..775dfa461 100644 --- a/lib/types/protocol-version.js +++ b/lib/types/protocol-version.ts @@ -13,10 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import type Connection from "../connection"; +import type { HostMap } from "../host"; +import utils from "../utils"; +import VersionNumber from "./version-number"; + -const utils = require('../utils'); -const VersionNumber = require('./version-number'); const v200 = VersionNumber.parse('2.0.0'); const v210 = VersionNumber.parse('2.1.0'); const v220 = VersionNumber.parse('2.2.0'); @@ -41,196 +43,197 @@ const v600 = VersionNumber.parse('6.0.0'); * is supported. * @alias module:types~protocolVersion */ -const protocolVersion = { +enum protocolVersion { // Strict equality operators to compare versions are allowed, other comparison operators are discouraged. Instead, // use a function that checks if a functionality is present on a certain version, for maintainability purposes. - v1: 0x01, - v2: 0x02, - v3: 0x03, - v4: 0x04, - v5: 0x05, - v6: 0x06, - dseV1: 0x41, - dseV2: 0x42, - maxSupported: 0x42, - minSupported: 0x01, + v1 = 0x01, + v2 = 0x02, + v3 = 0x03, + v4 = 0x04, + v5 = 0x05, + v6 = 0x06, + dseV1 = 0x41, + dseV2 = 0x42, + maxSupported = 0x42, + minSupported = 0x01, +} +namespace protocolVersion { /** * Determines whether the protocol version is a DSE-specific protocol version. * @param {Number} version * @returns {Boolean} - * @ignore + * @ignore @internal */ - isDse: function(version) { + export function isDse(version: number): boolean { return ((version >= this.dseV1 && version <= this.dseV2)); - }, + } /** * Returns true if the protocol version represents a version of Cassandra * supported by this driver, false otherwise * @param {Number} version * @returns {Boolean} - * @ignore + * @ignore @internal */ - isSupportedCassandra: function(version) { + export function isSupportedCassandra(version: number): boolean { return (version <= 0x04 && version >= 0x01); - }, + } /** * Determines whether the protocol version is supported by this driver. * @param {Number} version * @returns {Boolean} - * @ignore */ - isSupported: function (version) { + export function isSupported(version: number): boolean { return (this.isDse(version) || this.isSupportedCassandra(version)); - }, + } /** * Determines whether the protocol includes flags for PREPARE messages. * @param {Number} version * @returns {Boolean} - * @ignore + * @ignore @internal */ - supportsPrepareFlags: function (version) { + export function supportsPrepareFlags(version: number): boolean { return (version === this.dseV2); - }, + } /** * Determines whether the protocol supports sending the keyspace as part of PREPARE, QUERY, EXECUTE, and BATCH. * @param {Number} version * @returns {Boolean} - * @ignore + * @ignore @internal */ - supportsKeyspaceInRequest: function (version) { + export function supportsKeyspaceInRequest (version: number): boolean { return (version === this.dseV2); - }, + } /** * Determines whether the protocol supports result_metadata_id on `prepared` response and * and `execute` request. * @param {Number} version * @returns {Boolean} - * @ignore + * @ignore @internal */ - supportsResultMetadataId: function (version) { + export function supportsResultMetadataId(version: number): boolean { return (version === this.dseV2); - }, + } /** * Determines whether the protocol supports partition key indexes in the `prepared` RESULT responses. * @param {Number} version * @returns {Boolean} - * @ignore + * @ignore @internal */ - supportsPreparedPartitionKey: function (version) { + export function supportsPreparedPartitionKey(version: number): boolean { return (version >= this.v4); - }, + } /** * Determines whether the protocol supports up to 4 strings (ie: change_type, target, keyspace and table) in the * schema change responses. * @param version * @return {boolean} - * @ignore + * @ignore @internal */ - supportsSchemaChangeFullMetadata: function (version) { + export function supportsSchemaChangeFullMetadata(version): boolean { return (version >= this.v3); - }, + } /** * Determines whether the protocol supports continuous paging. * @param version * @return {boolean} - * @ignore + * @ignore @internal */ - supportsContinuousPaging: function (version) { + export function supportsContinuousPaging(version): boolean { return (this.isDse(version)); - }, + } /** * Determines whether the protocol supports paging state and serial consistency parameters in QUERY and EXECUTE * requests. * @param version * @return {boolean} - * @ignore + * @ignore @internal */ - supportsPaging: function (version) { + export function supportsPaging(version): boolean { return (version >= this.v2); - }, + } /** * Determines whether the protocol supports timestamps parameters in BATCH, QUERY and EXECUTE requests. * @param {Number} version * @return {boolean} - * @ignore + * @ignore @internal */ - supportsTimestamp: function (version) { + export function supportsTimestamp(version: number): boolean { return (version >= this.v3); - }, + } /** * Determines whether the protocol supports named parameters in QUERY and EXECUTE requests. * @param {Number} version * @return {boolean} - * @ignore + * @ignore @internal */ - supportsNamedParameters: function (version) { + export function supportsNamedParameters(version: number): boolean { return (version >= this.v3); - }, + } /** * Determines whether the protocol supports unset parameters. * @param {Number} version * @return {boolean} - * @ignore + * @ignore @internal */ - supportsUnset: function (version) { + export function supportsUnset(version: number): boolean { return (version >= this.v4); - }, + } /** * Determines whether the protocol provides a reason map for read and write failure errors. * @param version * @return {boolean} - * @ignore + * @ignore @internal */ - supportsFailureReasonMap: function (version) { + export function supportsFailureReasonMap(version): boolean { return (version >= this.v5); - }, + } /** * Determines whether the protocol supports timestamp and serial consistency parameters in BATCH requests. * @param {Number} version * @return {boolean} - * @ignore + * @ignore @internal */ - uses2BytesStreamIds: function (version) { + export function uses2BytesStreamIds(version: number): boolean { return (version >= this.v3); - }, + } /** * Determines whether the collection length is encoded using 32 bits. * @param {Number} version * @return {boolean} - * @ignore + * @ignore @internal */ - uses4BytesCollectionLength: function (version) { + export function uses4BytesCollectionLength(version: number): boolean { return (version >= this.v3); - }, + } /** * Determines whether the QUERY, EXECUTE and BATCH flags are encoded using 32 bits. * @param {Number} version * @return {boolean} - * @ignore + * @ignore @internal */ - uses4BytesQueryFlags: function (version) { + export function uses4BytesQueryFlags(version: number): boolean { return (this.isDse(version)); - }, + } /** * Startup responses using protocol v4+ can be a SERVER_ERROR wrapping a ProtocolException, this method returns true * when is possible to receive such error. * @param {Number} version * @return {boolean} - * @ignore + * @ignore @internal */ - canStartupResponseErrorBeWrapped: function (version) { + export function canStartupResponseErrorBeWrapped(version: number): boolean { return (version >= this.v4); - }, + } /** * Gets the first version number that is supported, lower than the one provided. * Returns zero when there isn't a lower supported version. * @param {Number} version * @return {Number} - * @ignore + * @ignore @internal */ - getLowerSupported: function (version) { + export function getLowerSupported(version: number): number { if (version >= this.v5) { return this.v4; } @@ -238,7 +241,7 @@ const protocolVersion = { return 0; } return version - 1; - }, + } /** * Computes the highest supported protocol version collectively by the given hosts. @@ -253,8 +256,9 @@ const protocolVersion = { * @param {Connection} connection Connection hosts were discovered from. * @param {Array.} hosts The hosts to determine highest protocol version from. * @return {Number} Highest supported protocol version among hosts. + * @ignore @internal */ - getHighestCommon: function(connection, hosts) { + export function getHighestCommon(connection: Connection, hosts: HostMap): number { const log = connection.log ? connection.log.bind(connection) : utils.noop; let maxVersion = connection.protocolVersion; // whether or not protocol v3 is required (nodes detected that don't support < 3). @@ -312,7 +316,7 @@ const protocolVersion = { // Anything else is < 2.x and requires protocol version V1. maxVersion = this.v1; } - } catch (e) { + } catch (_e) { log('warning', 'Encountered host ' + h.address + ' with unparseable cassandra version ' + h.cassandraVersion + ' skipping as part of protocol version evaluation'); } @@ -334,16 +338,17 @@ const protocolVersion = { log('verbose', 'Resolved protocol version 0x' + maxVersion.toString(16) + ' as the highest common protocol version among hosts'); return maxVersion; - }, + } /** * Determines if the protocol is a BETA version of the protocol. * @param {Number} version - * @return {Number} + * @return {boolean} + * @ignore @internal */ - isBeta: function (version) { + export function isBeta(version: number): boolean { return version === this.v5; } -}; +} -module.exports = protocolVersion; \ No newline at end of file +export default protocolVersion; \ No newline at end of file diff --git a/lib/types/result-set.js b/lib/types/result-set.js deleted file mode 100644 index 4137ddb03..000000000 --- a/lib/types/result-set.js +++ /dev/null @@ -1,275 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -'use strict'; - -const utils = require('../utils'); -const errors = require('../errors'); - -const asyncIteratorSymbol = Symbol.asyncIterator || '@@asyncIterator'; - -/** @module types */ - -/** - * Creates a new instance of ResultSet. - * @class - * @classdesc Represents the result of a query. - * @param {Object} response - * @param {String} host - * @param {Object} triedHosts - * @param {Number} speculativeExecutions - * @param {Number} consistency - * @param {Boolean} isSchemaInAgreement - * @constructor - */ -function ResultSet(response, host, triedHosts, speculativeExecutions, consistency, isSchemaInAgreement) { - // if no execution was made at all, set to 0. - if (speculativeExecutions === -1) { - speculativeExecutions = 0; - } - /** - * Information on the execution of a successful query: - * @member {Object} - * @property {Number} achievedConsistency The consistency level that has been actually achieved by the query. - * @property {String} queriedHost The Cassandra host that coordinated this query. - * @property {Object} triedHosts Gets the associative array of host that were queried before getting a valid response, - * being the last host the one that replied correctly. - * @property {Object} speculativeExecutions The number of speculative executions (not including the first) executed before - * getting a valid response. - * @property {Uuid} traceId Identifier of the trace session. - * @property {Array.} warnings Warning messages generated by the server when executing the query. - * @property {Boolean} isSchemaInAgreement Whether the cluster had reached schema agreement after the execution of - * this query. - *

- * After a successful schema-altering query (ex: creating a table), the driver will check if - * the cluster's nodes agree on the new schema version. If not, it will keep retrying for a given - * delay (see protocolOptions.maxSchemaAgreementWaitSeconds). - *

- *

- * Note that the schema agreement check is only performed for schema-altering queries For other - * query types, this method will always return true. If this method returns false, - * clients can call [Metadata.checkSchemaAgreement()]{@link module:metadata~Metadata#checkSchemaAgreement} later to - * perform the check manually. - *

- */ - this.info = { - queriedHost: host, - triedHosts: triedHosts, - speculativeExecutions: speculativeExecutions, - achievedConsistency: consistency, - traceId: null, - warnings: null, - customPayload: null, - isSchemaInAgreement - }; - - if (response.flags) { - this.info.traceId = response.flags.traceId; - this.info.warnings = response.flags.warnings; - this.info.customPayload = response.flags.customPayload; - } - - /** - * Gets an array rows returned by the query. - * When the result set represents a response from a write query, this property will be undefined. - * When the read query result contains more rows than the fetch size (5000), this property will only contain the - * first rows up to fetch size. To obtain all the rows, you can use the built-in async iterator that will retrieve the - * following pages of results. - * @type {Array|undefined} - */ - this.rows = response.rows; - - /** - * Gets the row length of the result, regardless if the result has been buffered or not - * @type {Number|undefined} - */ - this.rowLength = this.rows ? this.rows.length : response.rowLength; - - /** - * Gets the columns returned in this ResultSet. - * @type {Array.<{name, type}>} - * @default null - */ - this.columns = null; - - /** - * A string token representing the current page state of query. It can be used in the following executions to - * continue paging and retrieve the remained of the result for the query. - * @type {String|null} - * @default null - */ - this.pageState = null; - - /** - * Method used to manually fetch the next page of results. - * This method is only exposed when using the {@link Client#eachRow} method and there are more rows available in - * following pages. - * @type Function - */ - this.nextPage = undefined; - - /** - * Method used internally to fetch the next page of results using promises. - * @internal - * @ignore - * @type {Function} - */ - this.nextPageAsync = undefined; - - const meta = response.meta; - - if (meta) { - this.columns = meta.columns; - - if (meta.pageState) { - this.pageState = meta.pageState.toString('hex'); - - // Expose rawPageState internally - Object.defineProperty(this, 'rawPageState', { value: meta.pageState, enumerable: false }); - } - } -} - -/** - * Returns the first row or null if the result rows are empty. - */ -ResultSet.prototype.first = function () { - if (this.rows && this.rows.length) { - return this.rows[0]; - } - return null; -}; - -ResultSet.prototype.getPageState = function () { - // backward-compatibility - return this.pageState; -}; - -ResultSet.prototype.getColumns = function () { - // backward-compatibility - return this.columns; -}; - -/** - * When this instance is the result of a conditional update query, it returns whether it was successful. - * Otherwise, it returns true. - *

- * For consistency, this method always returns true for non-conditional queries (although there is - * no reason to call the method in that case). This is also the case for conditional DDL statements - * (CREATE KEYSPACE... IF NOT EXISTS, CREATE TABLE... IF NOT EXISTS), for which the server doesn't return - * information whether it was applied or not. - *

- */ -ResultSet.prototype.wasApplied = function () { - if (!this.rows || this.rows.length === 0) { - return true; - } - const firstRow = this.rows[0]; - const applied = firstRow['[applied]']; - return typeof applied === 'boolean' ? applied : true; -}; - -/** - * Gets the iterator function. - *

- * Retrieves the iterator of the underlying fetched rows, without causing the driver to fetch the following - * result pages. For more information on result paging, - * [visit the documentation]{@link http://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. - *

- * @alias module:types~ResultSet#@@iterator - * @see {@link module:types~ResultSet#@@asyncIterator} - * @example Using for...of statement - * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; - * const result = await client.execute(query, [ id ], { prepare: true }); - * for (const row of result) { - * console.log(row['email']); - * } - * @returns {Iterator.} - */ -ResultSet.prototype[Symbol.iterator] = function getIterator() { - if (!this.rows) { - return utils.emptyArray[Symbol.iterator](); - } - return this.rows[Symbol.iterator](); -}; - -/** - * Gets the async iterator function. - *

- * Retrieves the async iterator representing the entire query result, the driver will fetch the following result - * pages. - *

- *

Use the async iterator when the query result might contain more rows than the fetchSize.

- *

- * Note that using the async iterator will not affect the internal state of the ResultSet instance. - * You should avoid using both rows property that contains the row instances of the first page of - * results, and the async iterator, that will yield all the rows in the result regardless on the number of pages. - *

- *

Multiple concurrent async iterations are not supported.

- * @alias module:types~ResultSet#@@asyncIterator - * @example Using for await...of statement - * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; - * const result = await client.execute(query, [ id ], { prepare: true }); - * for await (const row of result) { - * console.log(row['email']); - * } - * @returns {AsyncIterator} - */ -ResultSet.prototype[asyncIteratorSymbol] = function getAsyncGenerator() { - let index = 0; - let pageState = this.rawPageState; - let rows = this.rows; - - if (!rows || rows.length === 0) { - return { next: () => Promise.resolve({ done: true }) }; - } - - const self = this; - - // Async generators are not present in Node.js 8, implement it manually - return { - async next() { - if (index >= rows.length && pageState) { - if (!self.nextPageAsync) { - throw new errors.DriverInternalError('Property nextPageAsync should be set when pageState is defined'); - } - - const rs = await self.nextPageAsync(pageState); - rows = rs.rows; - index = 0; - pageState = rs.rawPageState; - } - - if (index < rows.length) { - return { done: false, value: rows[index++] }; - } - - return { done: true }; - } - }; -}; - -/** - * Determines whether there are more pages of results. - * If so, the driver will initially retrieve and contain only the first page of results. - * To obtain all the rows, use the [AsyncIterator]{@linkcode module:types~ResultSet#@@asyncIterator}. - * @returns {boolean} - */ -ResultSet.prototype.isPaged = function() { - return !!this.rawPageState; -}; - -module.exports = ResultSet; \ No newline at end of file diff --git a/lib/types/result-set.ts b/lib/types/result-set.ts new file mode 100644 index 000000000..948f6b20a --- /dev/null +++ b/lib/types/result-set.ts @@ -0,0 +1,301 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import type { DataTypeInfo } from "../encoder"; +import errors from "../errors"; +import type { consistencies, Uuid } from "../types"; +import utils from "../utils"; +import Row from "./row"; + + +//TODO: Node.js started to support Symbol.asyncIterator since version 10. Can we drop the support for `@@asyncIterator`? + +/** @module types */ + +/** + * @class + * @classdesc Represents the result of a query. + */ +class ResultSet implements Iterable, AsyncIterable { + info: { + queriedHost: string, + triedHosts: { [key: string]: any; }, + speculativeExecutions: number, + achievedConsistency: consistencies, + traceId: Uuid, + warnings: string[], + customPayload: any, + //TODO: This was not exposed in the past, I believe it should + isSchemaInAgreement: boolean + }; + columns: Array<{ name: string, type: DataTypeInfo }>; + nextPage: (() => void) | null; + pageState: string; + rowLength: number; + rows: Row[]; + /** @internal */ + nextPageAsync: Function | undefined; + /** @internal */ + rawPageState: any; + + /** + * Creates a new instance of ResultSet. + * @internal + * @param {Object} response + * @param {String} host + * @param {Object} triedHosts + * @param {Number} speculativeExecutions + * @param {Number} consistency + * @param {Boolean} isSchemaInAgreement + * @constructor + */ + constructor(response: { rows: Array, flags: { traceId: Uuid, warnings: string[], customPayload: any }, meta?: { columns: Array<{ name: string, type: DataTypeInfo }>, pageState: Buffer } }, host: string, triedHosts: { [key: string]: any; }, speculativeExecutions: number, consistency: consistencies, isSchemaInAgreement: boolean) { + // if no execution was made at all, set to 0. + if (speculativeExecutions === -1) { + speculativeExecutions = 0; + } + + /** + * Information on the execution of a successful query: + * @member {Object} + * @property {Number} achievedConsistency The consistency level that has been actually achieved by the query. + * @property {String} queriedHost The Cassandra host that coordinated this query. + * @property {Object} triedHosts Gets the associative array of host that were queried before getting a valid response, + * being the last host the one that replied correctly. + * @property {Object} speculativeExecutions The number of speculative executions (not including the first) executed before + * getting a valid response. + * @property {Uuid} traceId Identifier of the trace session. + * @property {Array.} warnings Warning messages generated by the server when executing the query. + * @property {Boolean} isSchemaInAgreement Whether the cluster had reached schema agreement after the execution of + * this query. + *

+ * After a successful schema-altering query (ex: creating a table), the driver will check if + * the cluster's nodes agree on the new schema version. If not, it will keep retrying for a given + * delay (see protocolOptions.maxSchemaAgreementWaitSeconds). + *

+ *

+ * Note that the schema agreement check is only performed for schema-altering queries For other + * query types, this method will always return true. If this method returns false, + * clients can call [Metadata.checkSchemaAgreement()]{@link module:metadata~Metadata#checkSchemaAgreement} later to + * perform the check manually. + *

+ */ + this.info = { + queriedHost: host, + triedHosts: triedHosts, + speculativeExecutions: speculativeExecutions, + achievedConsistency: consistency, + traceId: null, + warnings: null, + customPayload: null, + isSchemaInAgreement + }; + + if (response.flags) { + this.info.traceId = response.flags.traceId; + this.info.warnings = response.flags.warnings; + this.info.customPayload = response.flags.customPayload; + } + + /** + * Gets an array rows returned by the query. + * When the result set represents a response from a write query, this property will be undefined. + * When the read query result contains more rows than the fetch size (5000), this property will only contain the + * first rows up to fetch size. To obtain all the rows, you can use the built-in async iterator that will retrieve the + * following pages of results. + * @type {Array|undefined} + */ + this.rows = response.rows; + + /** + * Gets the row length of the result, regardless if the result has been buffered or not + * @type {Number|undefined} + */ + // @ts-ignore + this.rowLength = this.rows ? this.rows.length : response.rowLength; + + /** + * Gets the columns returned in this ResultSet. + * @type {Array.<{name, type}>} + * @default null + */ + this.columns = null; + + /** + * A string token representing the current page state of query. It can be used in the following executions to + * continue paging and retrieve the remained of the result for the query. + * @type {String|null} + * @default null + */ + this.pageState = null; + + /** + * Method used to manually fetch the next page of results. + * This method is only exposed when using the {@link Client#eachRow} method and there are more rows available in + * following pages. + * @type Function + */ + this.nextPage = undefined; + + /** + * Method used internally to fetch the next page of results using promises. + * @internal + * @ignore + * @type {Function} + */ + this.nextPageAsync = undefined; + + const meta = response.meta; + + if (meta) { + this.columns = meta.columns; + + if (meta.pageState) { + this.pageState = meta.pageState.toString('hex'); + + // Expose rawPageState internally + Object.defineProperty(this, 'rawPageState', { value: meta.pageState, enumerable: false }); + } + } + } + + /** + * Returns the first row or null if the result rows are empty. + */ + first(): Row { + if (this.rows && this.rows.length) { + return this.rows[0]; + } + return null; + } + + getPageState(): string { + return this.pageState; + } + + getColumns(): Array<{ name: string, type: DataTypeInfo }> { + return this.columns; + } + + /** + * When this instance is the result of a conditional update query, it returns whether it was successful. + * Otherwise, it returns true. + *

+ * For consistency, this method always returns true for non-conditional queries (although there is + * no reason to call the method in that case). This is also the case for conditional DDL statements + * (CREATE KEYSPACE... IF NOT EXISTS, CREATE TABLE... IF NOT EXISTS), for which the server doesn't return + * information whether it was applied or not. + *

+ */ + wasApplied(): boolean { + if (!this.rows || this.rows.length === 0) { + return true; + } + const firstRow = this.rows[0]; + const applied = firstRow['[applied]']; + return typeof applied === 'boolean' ? applied : true; + } + + /** + * Gets the iterator function. + *

+ * Retrieves the iterator of the underlying fetched rows, without causing the driver to fetch the following + * result pages. For more information on result paging, + * [visit the documentation]{@link http://docs.datastax.com/en/developer/nodejs-driver/latest/features/paging/}. + *

+ * @alias module:types~ResultSet#@@iterator + * @see {@link module:types~ResultSet#@@asyncIterator} + * @example Using for...of statement + * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * for (const row of result) { + * console.log(row['email']); + * } + * @returns {Iterator.} + */ + [Symbol.iterator](): Iterator { + if (!this.rows) { + return utils.emptyArray[Symbol.iterator](); + } + return this.rows[Symbol.iterator](); + } + + /** + * Gets the async iterator function. + *

+ * Retrieves the async iterator representing the entire query result, the driver will fetch the following result + * pages. + *

+ *

Use the async iterator when the query result might contain more rows than the fetchSize.

+ *

+ * Note that using the async iterator will not affect the internal state of the ResultSet instance. + * You should avoid using both rows property that contains the row instances of the first page of + * results, and the async iterator, that will yield all the rows in the result regardless on the number of pages. + *

+ *

Multiple concurrent async iterations are not supported.

+ * @alias module:types~ResultSet#@@asyncIterator + * @example Using for await...of statement + * const query = 'SELECT user_id, post_id, content FROM timeline WHERE user_id = ?'; + * const result = await client.execute(query, [ id ], { prepare: true }); + * for await (const row of result) { + * console.log(row['email']); + * } + * @returns {AsyncIterator} + */ + [Symbol.asyncIterator](): AsyncIterator { + let index = 0; + let pageState = this.rawPageState; + let rows = this.rows; + + if (!rows || rows.length === 0) { + return { next: () => Promise.resolve({ done: true, value: undefined }) }; + } + + const self = this; + + return { + async next() { + if (index >= rows.length && pageState) { + if (!self.nextPageAsync) { + throw new errors.DriverInternalError('Property nextPageAsync should be set when pageState is defined'); + } + + const rs = await self.nextPageAsync(pageState); + rows = rs.rows; + index = 0; + pageState = rs.rawPageState; + } + + if (index < rows.length) { + return { done: false, value: rows[index++] }; + } + + return { done: true, value: undefined }; + } + }; + } + + /** + * Determines whether there are more pages of results. + * If so, the driver will initially retrieve and contain only the first page of results. + * To obtain all the rows, use the [AsyncIterator]{@linkcode module:types~ResultSet#@@asyncIterator}. + * @returns {boolean} + */ + isPaged(): boolean { + return !!this.rawPageState; + } +} + +export default ResultSet; \ No newline at end of file diff --git a/lib/types/result-stream.js b/lib/types/result-stream.ts similarity index 82% rename from lib/types/result-stream.js rename to lib/types/result-stream.ts index 90cb64c74..2e9d3e633 100644 --- a/lib/types/result-stream.js +++ b/lib/types/result-stream.ts @@ -13,19 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { Readable } from "stream"; +import errors from "../errors"; +import utils from "../utils"; -'use strict'; -const { Readable } = require('stream'); -const utils = require('../utils'); -const errors = require('../errors'); -const clientOptions = require('../client-options'); /** @module types */ /** * Readable stream using to yield data from a result or a field */ class ResultStream extends Readable { + buffer: any[]; + paused: boolean; + private _cancelAllowed: boolean; + private _handlersObject: { resumeReadingHandler?: Function, cancelHandler?: Function }; + private _highWaterMarkRows: number; + private _readableState: any; + private _readNext: Function; + /** @internal */ constructor(opt) { super(opt); this.buffer = []; @@ -35,6 +41,7 @@ class ResultStream extends Readable { this._highWaterMarkRows = 0; } + /** @internal @ignore */ _read() { this.paused = false; if (this.buffer.length === 0) { @@ -52,10 +59,10 @@ class ResultStream extends Readable { /** * Allows for throttling, helping nodejs keep the internal buffers reasonably sized. - * @param {function} readNext function that triggers reading the next result chunk - * @ignore + * @param {Function} readNext function that triggers reading the next result chunk + * @ignore @internal */ - _valve(readNext) { + _valve(readNext: Function) { this._readNext = null; if (!readNext) { return; @@ -75,7 +82,7 @@ class ResultStream extends Readable { return length; } - _checkAboveHighWaterMark() { + private _checkAboveHighWaterMark() { if (!this._handlersObject || !this._handlersObject.resumeReadingHandler) { return; } @@ -85,7 +92,7 @@ class ResultStream extends Readable { this._handlersObject.resumeReadingHandler(false); } - _checkBelowHighWaterMark() { + private _checkBelowHighWaterMark() { if (!this._handlersObject || !this._handlersObject.resumeReadingHandler) { return; } @@ -105,9 +112,9 @@ class ResultStream extends Readable { * // ... * // Ask the server to stop pushing rows. * stream.cancel(); - * @ignore + * @ignore @internal */ - cancel(callback) { + cancel(callback: Function) { if (!this._cancelAllowed) { const err = new Error('You can only cancel streaming executions when continuous paging is enabled'); if (!callback) { @@ -141,8 +148,9 @@ class ResultStream extends Readable { this._cancelAllowed = true; this._handlersObject = options; this._highWaterMarkRows = - options.continuousPaging.highWaterMarkRows || clientOptions.continuousPageDefaultHighWaterMark; + // Substitute clientOptions.continuousPaging.highWaterMarkRows by 10000 to remove circular dependency + options.continuousPaging.highWaterMarkRows || 10000; } } -module.exports = ResultStream; \ No newline at end of file +export default ResultStream; \ No newline at end of file diff --git a/lib/types/row.js b/lib/types/row.js deleted file mode 100644 index fca713211..000000000 --- a/lib/types/row.js +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -/** @module types */ -/** - * Represents a result row - * @param {Array} columns - * @constructor - */ -function Row(columns) { - if (!columns) { - throw new Error('Columns not defined'); - } - //Private non-enumerable properties, with double underscore to avoid interfering with column names - Object.defineProperty(this, '__columns', { value: columns, enumerable: false, writable: false}); -} - -/** - * Returns the cell value. - * @param {String|Number} columnName Name or index of the column - */ -Row.prototype.get = function (columnName) { - if (typeof columnName === 'number') { - //its an index - return this[this.__columns[columnName].name]; - } - return this[columnName]; -}; - -/** - * Returns an array of the values of the row - * @returns {Array} - */ -Row.prototype.values = function () { - const valuesArray = []; - this.forEach(function (val) { - valuesArray.push(val); - }); - return valuesArray; -}; - -/** - * Returns an array of the column names of the row - * @returns {Array} - */ -Row.prototype.keys = function () { - const keysArray = []; - this.forEach(function (val, key) { - keysArray.push(key); - }); - return keysArray; -}; - -/** - * Executes the callback for each field in the row, containing the value as first parameter followed by the columnName - * @param {Function} callback - */ -Row.prototype.forEach = function (callback) { - for (const columnName in this) { - if (!this.hasOwnProperty(columnName)) { - continue; - } - callback(this[columnName], columnName); - } -}; - -module.exports = Row; \ No newline at end of file diff --git a/lib/types/row.ts b/lib/types/row.ts new file mode 100644 index 000000000..db47c59e1 --- /dev/null +++ b/lib/types/row.ts @@ -0,0 +1,87 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** @module types */ +/** + * Represents a result row + * @param {Array} columns + * @constructor + */ +class Row { + private readonly __columns: Array; + [key: string]: any; + + /** @internal */ + constructor(columns: Array) { + if (!columns) { + throw new Error('Columns not defined'); + } + //Private non-enumerable properties, with double underscore to avoid interfering with column names + Object.defineProperty(this, '__columns', { value: columns, enumerable: false, writable: false}); + } + + /** + * Returns the cell value. + * @param {String|Number} columnName Name or index of the column + */ + get(columnName: string | number): any { + if (typeof columnName === 'number') { + //its an index + return this[this.__columns[columnName].name]; + } + return this[columnName]; + } + + /** + * Returns an array of the values of the row + * @returns {Array} + */ + values(): Array { + const valuesArray = []; + this.forEach(function (val) { + valuesArray.push(val); + }); + return valuesArray; + } + + /** + * Returns an array of the column names of the row + * @returns {Array} + */ + keys(): string[] { + const keysArray = []; + this.forEach(function (val, key) { + keysArray.push(key); + }); + return keysArray; + } + + //TODO: was exposed as forEach(callback: (row: Row) => void): void; + /** + * Executes the callback for each field in the row, containing the value as first parameter followed by the columnName + * @param {Function} callback + */ + forEach(callback: (val: any, key: string) => void): void { + for (const columnName in this) { + if (!this.hasOwnProperty(columnName)) { + continue; + } + callback(this[columnName], columnName); + } + } +} + +export default Row; \ No newline at end of file diff --git a/lib/types/time-uuid.js b/lib/types/time-uuid.js deleted file mode 100644 index d4caae67a..000000000 --- a/lib/types/time-uuid.js +++ /dev/null @@ -1,410 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; -const util = require('util'); -const crypto = require('crypto'); -const Long = require('long'); - -const Uuid = require('./uuid'); -const utils = require('../utils'); - -/** @module types */ -/** - * Oct 15, 1582 in milliseconds since unix epoch - * @const - * @private - */ -const _unixToGregorian = 12219292800000; -/** - * 10,000 ticks in a millisecond - * @const - * @private - */ -const _ticksInMs = 10000; - -const minNodeId = utils.allocBufferFromString('808080808080', 'hex'); -const minClockId = utils.allocBufferFromString('8080', 'hex'); -const maxNodeId = utils.allocBufferFromString('7f7f7f7f7f7f', 'hex'); -const maxClockId = utils.allocBufferFromString('7f7f', 'hex'); - -/** - * Counter used to generate up to 10000 different timeuuid values with the same Date - * @private - * @type {number} - */ -let _ticks = 0; -/** - * Counter used to generate ticks for the current time - * @private - * @type {number} - */ -let _ticksForCurrentTime = 0; -/** - * Remember the last time when a ticks for the current time so that it can be reset - * @private - * @type {number} - */ -let _lastTimestamp = 0; - -/** - * Creates a new instance of Uuid based on the parameters provided according to rfc4122. - * If any of the arguments is not provided, it will be randomly generated, except for the date that will use the current - * date. - *

- * Note that when nodeId and/or clockId portions are not provided, the constructor will generate them using - * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's - * recommended that you use the callback-based version of the static methods fromDate() or - * now() in that case. - *

- * @class - * @classdesc Represents an immutable version 1 universally unique identifier (UUID). A UUID represents a 128-bit value. - *

Usage: TimeUuid.now()

- * @extends module:types~Uuid - * @param {Date} [value] The datetime for the instance, if not provided, it will use the current Date. - * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, - * as Ecmascript Dates have only milliseconds precision. - * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. - * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. - * @constructor - */ -function TimeUuid(value, ticks, nodeId, clockId) { - let buffer; - if (value instanceof Buffer) { - if (value.length !== 16) { - throw new Error('Buffer for v1 uuid not valid'); - } - buffer = value; - } - else { - buffer = generateBuffer(value, ticks, nodeId, clockId); - } - Uuid.call(this, buffer); -} - -util.inherits(TimeUuid, Uuid); - -/** - * Generates a TimeUuid instance based on the Date provided using random node and clock values. - * @param {Date} date Date to generate the v1 uuid. - * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, - * as Ecmascript Dates have only milliseconds precision. - * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. - * If not provided, a random nodeId will be generated. - * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. - * If not provided a random clockId will be generated. - * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created - * TimeUuid as second parameter. When a callback is provided, the random portions of the - * TimeUuid instance are created asynchronously. - *

- * When nodeId and/or clockId portions are not provided, this method will generate them using - * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's - * recommended that you use the callback-based version of this method in that case. - *

- * @example Generate a TimeUuid from a ECMAScript Date - * const timeuuid = TimeUuid.fromDate(new Date()); - * @example Generate a TimeUuid from a Date with ticks portion - * const timeuuid = TimeUuid.fromDate(new Date(), 1203); - * @example Generate a TimeUuid from a Date without any random portion - * const timeuuid = TimeUuid.fromDate(new Date(), 1203, 'host01', '02'); - * @example Generate a TimeUuid from a Date with random node and clock identifiers - * TimeUuid.fromDate(new Date(), 1203, function (err, timeuuid) { - * // do something with the generated timeuuid - * }); - */ -TimeUuid.fromDate = function (date, ticks, nodeId, clockId, callback) { - if (typeof ticks === 'function') { - callback = ticks; - ticks = nodeId = clockId = null; - } else if (typeof nodeId === 'function') { - callback = nodeId; - nodeId = clockId = null; - } else if (typeof clockId === 'function') { - callback = clockId; - clockId = null; - } - - if (!callback) { - return new TimeUuid(date, ticks, nodeId, clockId); - } - - utils.parallel([ - next => getOrGenerateRandom(nodeId, 6, (err, buffer) => next(err, nodeId = buffer)), - next => getOrGenerateRandom(clockId, 2, (err, buffer) => next(err, clockId = buffer)), - ], (err) => { - if (err) { - return callback(err); - } - - let timeUuid; - try { - timeUuid = new TimeUuid(date, ticks, nodeId, clockId); - } - catch (e) { - return callback(e); - } - - callback(null, timeUuid); - }); -}; - -/** - * Parses a string representation of a TimeUuid - * @param {String} value - * @returns {TimeUuid} - */ -TimeUuid.fromString = function (value) { - return new TimeUuid(Uuid.fromString(value).getBuffer()); -}; - -/** - * Returns the smaller possible type 1 uuid with the provided Date. - */ -TimeUuid.min = function (date, ticks) { - return new TimeUuid(date, ticks, minNodeId, minClockId); -}; - -/** - * Returns the biggest possible type 1 uuid with the provided Date. - */ -TimeUuid.max = function (date, ticks) { - return new TimeUuid(date, ticks, maxNodeId, maxClockId); -}; - -/** - * Generates a TimeUuid instance based on the current date using random node and clock values. - * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. - * If not provided, a random nodeId will be generated. - * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. - * If not provided a random clockId will be generated. - * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created - * TimeUuid as second parameter. When a callback is provided, the random portions of the - * TimeUuid instance are created asynchronously. - *

- * When nodeId and/or clockId portions are not provided, this method will generate them using - * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's - * recommended that you use the callback-based version of this method in that case. - *

- * @example Generate a TimeUuid from a Date without any random portion - * const timeuuid = TimeUuid.now('host01', '02'); - * @example Generate a TimeUuid with random node and clock identifiers - * TimeUuid.now(function (err, timeuuid) { - * // do something with the generated timeuuid - * }); - * @example Generate a TimeUuid based on the current date (might block) - * const timeuuid = TimeUuid.now(); - */ -TimeUuid.now = function (nodeId, clockId, callback) { - return TimeUuid.fromDate(null, null, nodeId, clockId, callback); -}; - - -/** - * Gets the Date and 100-nanoseconds units representation of this instance. - * @returns {{date: Date, ticks: Number}} - */ -TimeUuid.prototype.getDatePrecision = function () { - const timeLow = this.buffer.readUInt32BE(0); - - let timeHigh = 0; - timeHigh |= ( this.buffer[4] & 0xff ) << 8; - timeHigh |= this.buffer[5] & 0xff; - timeHigh |= ( this.buffer[6] & 0x0f ) << 24; - timeHigh |= ( this.buffer[7] & 0xff ) << 16; - - const val = Long.fromBits(timeLow, timeHigh); - const ticksInMsLong = Long.fromNumber(_ticksInMs); - const ticks = val.modulo(ticksInMsLong); - const time = val - .div(ticksInMsLong) - .subtract(Long.fromNumber(_unixToGregorian)); - return { date: new Date(time.toNumber()), ticks: ticks.toNumber()}; -}; - -/** - * Gets the Date representation of this instance. - * @returns {Date} - */ -TimeUuid.prototype.getDate = function () { - return this.getDatePrecision().date; -}; - -/** - * Returns the node id this instance - * @returns {Buffer} - */ -TimeUuid.prototype.getNodeId = function () { - return this.buffer.slice(10); -}; - -/** - * Returns the clock id this instance, with the variant applied (first 2 msb being 1 and 0). - * @returns {Buffer} - */ -TimeUuid.prototype.getClockId = function () { - return this.buffer.slice(8, 10); -}; - -/** - * Returns the node id this instance as an ascii string - * @returns {String} - */ -TimeUuid.prototype.getNodeIdString = function () { - return this.buffer.slice(10).toString('ascii'); -}; - -function writeTime(buffer, time, ticks) { - //value time expressed in ticks precision - const val = Long - .fromNumber(time + _unixToGregorian) - .multiply(Long.fromNumber(10000)) - .add(Long.fromNumber(ticks)); - const timeHigh = val.getHighBitsUnsigned(); - buffer.writeUInt32BE(val.getLowBitsUnsigned(), 0); - buffer.writeUInt16BE(timeHigh & 0xffff, 4); - buffer.writeUInt16BE(timeHigh >>> 16 & 0xffff, 6); -} - -/** - * Returns a buffer of length 2 representing the clock identifier - * @param {String|Buffer} clockId - * @returns {Buffer} - * @private - */ -function getClockId(clockId) { - let buffer = clockId; - if (typeof clockId === 'string') { - buffer = utils.allocBufferFromString(clockId, 'ascii'); - } - if (!(buffer instanceof Buffer)) { - //Generate - buffer = getRandomBytes(2); - } - else if (buffer.length !== 2) { - throw new Error('Clock identifier must have 2 bytes'); - } - return buffer; -} - -/** - * Returns a buffer of length 6 representing the clock identifier - * @param {String|Buffer} nodeId - * @returns {Buffer} - * @private - */ -function getNodeId(nodeId) { - let buffer = nodeId; - if (typeof nodeId === 'string') { - buffer = utils.allocBufferFromString(nodeId, 'ascii'); - } - if (!(buffer instanceof Buffer)) { - //Generate - buffer = getRandomBytes(6); - } - else if (buffer.length !== 6) { - throw new Error('Node identifier must have 6 bytes'); - } - return buffer; -} - -/** - * Returns the ticks portion of a timestamp. If the ticks are not provided an internal counter is used that gets reset at 10000. - * @private - * @param {Number} [ticks] - * @returns {Number} - */ -function getTicks(ticks) { - if (typeof ticks !== 'number'|| ticks >= _ticksInMs) { - _ticks++; - if (_ticks >= _ticksInMs) { - _ticks = 0; - } - ticks = _ticks; - } - return ticks; -} - -/** - * Returns an object with the time representation of the date expressed in milliseconds since unix epoch - * and a ticks property for the 100-nanoseconds precision. - * @private - * @returns {{time: Number, ticks: Number}} - */ -function getTimeWithTicks(date, ticks) { - if (!(date instanceof Date) || isNaN(date.getTime())) { - // time with ticks for the current time - date = new Date(); - const time = date.getTime(); - _ticksForCurrentTime++; - if(_ticksForCurrentTime > _ticksInMs || time > _lastTimestamp) { - _ticksForCurrentTime = 0; - _lastTimestamp = time; - } - ticks = _ticksForCurrentTime; - } - return { - time: date.getTime(), - ticks: getTicks(ticks) - }; -} - -function getRandomBytes(length) { - return crypto.randomBytes(length); -} - -function getOrGenerateRandom(id, length, callback) { - if (id) { - return callback(null, id); - } - crypto.randomBytes(length, callback); -} - -/** - * Generates a 16-length Buffer instance - * @private - * @param {Date} date - * @param {Number} ticks - * @param {String|Buffer} nodeId - * @param {String|Buffer} clockId - * @returns {Buffer} - */ -function generateBuffer(date, ticks, nodeId, clockId) { - const timeWithTicks = getTimeWithTicks(date, ticks); - nodeId = getNodeId(nodeId); - clockId = getClockId(clockId); - const buffer = utils.allocBufferUnsafe(16); - //Positions 0-7 Timestamp - writeTime(buffer, timeWithTicks.time, timeWithTicks.ticks); - //Position 8-9 Clock - clockId.copy(buffer, 8, 0); - //Positions 10-15 Node - nodeId.copy(buffer, 10, 0); - //Version Byte: Time based - //0001xxxx - //turn off first 4 bits - buffer[6] = buffer[6] & 0x0f; - //turn on fifth bit - buffer[6] = buffer[6] | 0x10; - - //IETF Variant Byte: 1.0.x - //10xxxxxx - //turn off first 2 bits - buffer[8] = buffer[8] & 0x3f; - //turn on first bit - buffer[8] = buffer[8] | 0x80; - return buffer; -} - -module.exports = TimeUuid; \ No newline at end of file diff --git a/lib/types/time-uuid.ts b/lib/types/time-uuid.ts new file mode 100644 index 000000000..d7067383c --- /dev/null +++ b/lib/types/time-uuid.ts @@ -0,0 +1,429 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import crypto from "crypto"; +import Long from "long"; +import utils, { type ValueCallback } from "../utils"; +import Uuid from "./uuid"; + +/** @module types */ +/** + * Oct 15, 1582 in milliseconds since unix epoch + * @const + * @private + */ +const _unixToGregorian = 12219292800000; +/** + * 10,000 ticks in a millisecond + * @const + * @private + */ +const _ticksInMs = 10000; + +const minNodeId = utils.allocBufferFromString('808080808080', 'hex'); +const minClockId = utils.allocBufferFromString('8080', 'hex'); +const maxNodeId = utils.allocBufferFromString('7f7f7f7f7f7f', 'hex'); +const maxClockId = utils.allocBufferFromString('7f7f', 'hex'); + +/** + * Counter used to generate up to 10000 different timeuuid values with the same Date + * @private + * @type {number} + */ +let _ticks: number = 0; +/** + * Counter used to generate ticks for the current time + * @private + * @type {number} + */ +let _ticksForCurrentTime: number = 0; +/** + * Remember the last time when a ticks for the current time so that it can be reset + * @private + * @type {number} + */ +let _lastTimestamp: number = 0; + +/** + * If any of the arguments is not provided, it will be randomly generated, except for the date that will use the current + * date. + *

+ * Note that when nodeId and/or clockId portions are not provided, the constructor will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of the static methods fromDate() or + * now() in that case. + *

+ * @class + * @classdesc Represents an immutable version 1 universally unique identifier (UUID). A UUID represents a 128-bit value. + *

Usage: TimeUuid.now()

+ * @extends module:types~Uuid + */ +class TimeUuid extends Uuid { + /** + * Creates a new instance of Uuid based on the parameters provided according to rfc4122. + * If any of the arguments is not provided, it will be randomly generated, except for the date that will use the current + * date. + *

+ * Note that when nodeId and/or clockId portions are not provided, the constructor will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of the static methods fromDate() or + * now() in that case. + *

+ * This class represents an immutable version 1 universally unique identifier (UUID). A UUID represents a 128-bit value. + *

Usage: TimeUuid.now()

+ * @param {Date} [value] The datetime for the instance, if not provided, it will use the current Date. + * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, + * as Ecmascript Dates have only milliseconds precision. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * @constructor + */ + constructor(value: Date | Buffer, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer) { + let buffer; + if (value instanceof Buffer) { + if (value.length !== 16) { + throw new Error('Buffer for v1 uuid not valid'); + } + buffer = value; + } else { + buffer = generateBuffer(value, ticks, nodeId, clockId); + } + super(buffer); + } + + /** + * Generates a TimeUuid instance based on the Date provided using random node and clock values. + * @param {Date} date Date to generate the v1 uuid. + * @param {Number} [ticks] A number from 0 to 10000 representing the 100-nanoseconds units for this instance to fill in the information not available in the Date, + * as Ecmascript Dates have only milliseconds precision. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * If not provided, a random nodeId will be generated. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * If not provided a random clockId will be generated. + * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created + * TimeUuid as second parameter. When a callback is provided, the random portions of the + * TimeUuid instance are created asynchronously. + *

+ * When nodeId and/or clockId portions are not provided, this method will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of this method in that case. + *

+ * @example Generate a TimeUuid from a ECMAScript Date + * const timeuuid = TimeUuid.fromDate(new Date()); + * @example Generate a TimeUuid from a Date with ticks portion + * const timeuuid = TimeUuid.fromDate(new Date(), 1203); + * @example Generate a TimeUuid from a Date without any random portion + * const timeuuid = TimeUuid.fromDate(new Date(), 1203, 'host01', '02'); + * @example Generate a TimeUuid from a Date with random node and clock identifiers + * TimeUuid.fromDate(new Date(), 1203, function (err, timeuuid) { + * // do something with the generated timeuuid + * }); + */ + static fromDate(date: Date, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer): TimeUuid; + static fromDate( + date: Date, + ticks: number, + nodeId: string | Buffer, + clockId: string | Buffer, + callback: ValueCallback): void; + static fromDate(date: Date, ticks?: number, nodeId?: string | Buffer, clockId?: string | Buffer, callback?: ValueCallback): TimeUuid | void { + if (typeof ticks === 'function') { + callback = ticks; + ticks = nodeId = clockId = null; + } else if (typeof nodeId === 'function') { + callback = nodeId; + nodeId = clockId = null; + } else if (typeof clockId === 'function') { + callback = clockId; + clockId = null; + } + + if (!callback) { + return new TimeUuid(date, ticks, nodeId, clockId); + } + + utils.parallel([ + next => getOrGenerateRandom(nodeId, 6, (err, buffer) => next(err, nodeId = buffer)), + next => getOrGenerateRandom(clockId, 2, (err, buffer) => next(err, clockId = buffer)), + ], (err) => { + if (err) { + return callback(err, null); + } + + let timeUuid; + try { + timeUuid = new TimeUuid(date, ticks, nodeId, clockId); + } catch (e) { + return callback(e, null); + } + + callback(null, timeUuid); + }); + } + + /** + * Parses a string representation of a TimeUuid + * @param {String} value + * @returns {TimeUuid} + */ + static fromString(value: string): TimeUuid { + return new TimeUuid(Uuid.fromString(value).getBuffer()); + } + + /** + * Returns the smaller possible type 1 uuid with the provided Date. + */ + static min(date: Date, ticks?: number): TimeUuid { + return new TimeUuid(date, ticks, minNodeId, minClockId); + } + + /** + * Returns the biggest possible type 1 uuid with the provided Date. + */ + static max(date: Date, ticks?: number): TimeUuid { + return new TimeUuid(date, ticks, maxNodeId, maxClockId); + } + + /** + * Generates a TimeUuid instance based on the current date using random node and clock values. + * @param {String|Buffer} [nodeId] A 6-length Buffer or string of 6 ascii characters representing the node identifier, ie: 'host01'. + * If not provided, a random nodeId will be generated. + * @param {String|Buffer} [clockId] A 2-length Buffer or string of 6 ascii characters representing the clock identifier. + * If not provided a random clockId will be generated. + * @param {Function} [callback] An optional callback to be invoked with the error as first parameter and the created + * TimeUuid as second parameter. When a callback is provided, the random portions of the + * TimeUuid instance are created asynchronously. + *

+ * When nodeId and/or clockId portions are not provided, this method will generate them using + * crypto.randomBytes(). As it's possible that crypto.randomBytes() might block, it's + * recommended that you use the callback-based version of this method in that case. + *

+ * @example Generate a TimeUuid from a Date without any random portion + * const timeuuid = TimeUuid.now('host01', '02'); + * @example Generate a TimeUuid with random node and clock identifiers + * TimeUuid.now(function (err, timeuuid) { + * // do something with the generated timeuuid + * }); + * @example Generate a TimeUuid based on the current date (might block) + * const timeuuid = TimeUuid.now(); + */ + static now(): TimeUuid; + static now(nodeId: string | Buffer, clockId?: string | Buffer): TimeUuid; + static now(nodeId: string | Buffer, clockId: string | Buffer, callback: ValueCallback): void; + static now(callback: ValueCallback): void; + //TODO: this was exposed: static now(callback: ValueCallback): void; But I think it never works + static now(nodeId?: string | Buffer | ValueCallback, clockId?: string | Buffer | ValueCallback, callback?: ValueCallback): TimeUuid | void { + // @ts-expect-error + return TimeUuid.fromDate(null, null, nodeId, clockId, callback); + } + + /** + * Gets the Date and 100-nanoseconds units representation of this instance. + * @returns {{date: Date, ticks: Number}} + */ + getDatePrecision(): { date: Date; ticks: number; } { + const timeLow = this.buffer.readUInt32BE(0); + + let timeHigh = 0; + timeHigh |= (this.buffer[4] & 0xff) << 8; + timeHigh |= this.buffer[5] & 0xff; + timeHigh |= (this.buffer[6] & 0x0f) << 24; + timeHigh |= (this.buffer[7] & 0xff) << 16; + + const val = Long.fromBits(timeLow, timeHigh); + const ticksInMsLong = Long.fromNumber(_ticksInMs); + const ticks = val.modulo(ticksInMsLong); + const time = val + .div(ticksInMsLong) + .subtract(Long.fromNumber(_unixToGregorian)); + return { date: new Date(time.toNumber()), ticks: ticks.toNumber() }; + } + + /** + * Gets the Date representation of this instance. + * @returns {Date} + */ + getDate(): Date { + return this.getDatePrecision().date; + } + + //TODO: getNodeId, getClockId, and getNodeIdString were not exposed. I think they should be + /** + * Returns the node id this instance + * @returns {Buffer} + */ + getNodeId(): Buffer { + return this.buffer.slice(10); + } + + /** + * Returns the clock id this instance, with the variant applied (first 2 msb being 1 and 0). + * @returns {Buffer} + */ + getClockId(): Buffer { + return this.buffer.slice(8, 10); + } + + /** + * Returns the node id this instance as an ascii string + * @returns {String} + */ + getNodeIdString(): string { + return this.buffer.slice(10).toString('ascii'); + } +} + +function writeTime(buffer: Buffer, time: number, ticks: number) { + const val = Long + .fromNumber(time + _unixToGregorian) + .multiply(Long.fromNumber(10000)) + .add(Long.fromNumber(ticks)); + const timeHigh = val.getHighBitsUnsigned(); + buffer.writeUInt32BE(val.getLowBitsUnsigned(), 0); + buffer.writeUInt16BE(timeHigh & 0xffff, 4); + buffer.writeUInt16BE(timeHigh >>> 16 & 0xffff, 6); +} + +/** + * Returns a buffer of length 2 representing the clock identifier + * @param {String|Buffer} clockId + * @returns {Buffer} + * @private + */ +function getClockId(clockId: string | Buffer): Buffer { + let buffer = clockId; + if (typeof clockId === 'string') { + buffer = utils.allocBufferFromString(clockId, 'ascii'); + } + if (!(buffer instanceof Buffer)) { + //Generate + buffer = getRandomBytes(2); + } + else if (buffer.length !== 2) { + throw new Error('Clock identifier must have 2 bytes'); + } + return buffer; +} + +/** + * Returns a buffer of length 6 representing the clock identifier + * @param {String|Buffer} nodeId + * @returns {Buffer} + * @private + */ +function getNodeId(nodeId: string | Buffer): Buffer { + let buffer = nodeId; + if (typeof nodeId === 'string') { + buffer = utils.allocBufferFromString(nodeId, 'ascii'); + } + if (!(buffer instanceof Buffer)) { + //Generate + buffer = getRandomBytes(6); + } + else if (buffer.length !== 6) { + throw new Error('Node identifier must have 6 bytes'); + } + return buffer; +} + +/** + * Returns the ticks portion of a timestamp. If the ticks are not provided an internal counter is used that gets reset at 10000. + * @private + * @param {Number} [ticks] + * @returns {Number} + */ +function getTicks(ticks?: number): number { + if (typeof ticks !== 'number' || ticks >= _ticksInMs) { + _ticks++; + if (_ticks >= _ticksInMs) { + _ticks = 0; + } + ticks = _ticks; + } + return ticks; +} + +/** + * Returns an object with the time representation of the date expressed in milliseconds since unix epoch + * and a ticks property for the 100-nanoseconds precision. + * @private + * @returns {{time: Number, ticks: Number}} + */ +function getTimeWithTicks(date: Date, ticks: number): { time: number; ticks: number; } { + if (!(date instanceof Date) || isNaN(date.getTime())) { + // time with ticks for the current time + date = new Date(); + const time = date.getTime(); + _ticksForCurrentTime++; + if(_ticksForCurrentTime > _ticksInMs || time > _lastTimestamp) { + _ticksForCurrentTime = 0; + _lastTimestamp = time; + } + ticks = _ticksForCurrentTime; + } + return { + time: date.getTime(), + ticks: getTicks(ticks) + }; +} + +function getRandomBytes(length: number): Buffer { + return crypto.randomBytes(length); +} + +function getOrGenerateRandom(id, length, callback) { + if (id) { + return callback(null, id); + } + crypto.randomBytes(length, callback); +} + +/** + * Generates a 16-length Buffer instance + * @private + * @param {Date} date + * @param {Number} ticks + * @param {String|Buffer} nodeId + * @param {String|Buffer} clockId + * @returns {Buffer} + */ +function generateBuffer(date: Date, ticks: number, nodeId: string | Buffer, clockId: string | Buffer): Buffer { + const timeWithTicks = getTimeWithTicks(date, ticks); + nodeId = getNodeId(nodeId); + clockId = getClockId(clockId); + const buffer = utils.allocBufferUnsafe(16); + //Positions 0-7 Timestamp + writeTime(buffer, timeWithTicks.time, timeWithTicks.ticks); + //Position 8-9 Clock + clockId.copy(buffer, 8, 0); + //Positions 10-15 Node + nodeId.copy(buffer, 10, 0); + //Version Byte: Time based + //0001xxxx + //turn off first 4 bits + buffer[6] = buffer[6] & 0x0f; + //turn on fifth bit + buffer[6] = buffer[6] | 0x10; + + //IETF Variant Byte: 1.0.x + //10xxxxxx + //turn off first 2 bits + buffer[8] = buffer[8] & 0x3f; + //turn on first bit + buffer[8] = buffer[8] | 0x80; + return buffer; +} + +export default TimeUuid; \ No newline at end of file diff --git a/lib/types/tuple.js b/lib/types/tuple.js deleted file mode 100644 index f8989ccb7..000000000 --- a/lib/types/tuple.js +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -"use strict"; - -/** @module types */ - -/** - * Creates a new sequence of immutable objects with the parameters provided. - * @class - * @classdesc A tuple is a sequence of immutable objects. - * Tuples are sequences, just like [Arrays]{@link Array}. The only difference is that tuples can't be changed. - *

- * As tuples can be used as a Map keys, the {@link Tuple#toString toString()} method calls toString of each element, - * to try to get a unique string key. - *

- * @param args The sequence elements as arguments. - * @constructor - */ -function Tuple(...args) { - - /** - * Immutable elements of Tuple object. - * @type Array - */ - this.elements = args; - - if (this.elements.length === 0) { - throw new TypeError('Tuple must contain at least one value'); - } - - /** - * Returns the number of the elements. - * @type Number - */ - this.length = this.elements.length; -} - -/** - * Creates a new instance of a tuple based on the Array - * @param {Array} elements - * @returns {Tuple} - */ -Tuple.fromArray = function (elements) { - // Apply the elements Array as parameters - return new Tuple(...elements); -}; - -/** - * Returns the value located at the index. - * @param {Number} index Element index - */ -Tuple.prototype.get = function (index) { - return this.elements[index || 0]; -}; - -/** - * Returns the string representation of the sequence surrounded by parenthesis, ie: (1, 2). - *

- * The returned value attempts to be a unique string representation of its values. - *

- * @returns {string} - */ -Tuple.prototype.toString = function () { - return ('(' + - this.elements.reduce(function (prev, x, i) { - return prev + (i > 0 ? ',' : '') + x.toString(); - }, '') + - ')'); -}; - -/** - * Returns the Array representation of the sequence. - * @returns {Array} - */ -Tuple.prototype.toJSON = function () { - return this.elements; -}; - -/** - * Gets the elements as an array - * @returns {Array} - */ -Tuple.prototype.values = function () { - // Clone the elements - return this.elements.slice(0); -}; - -module.exports = Tuple; \ No newline at end of file diff --git a/lib/types/tuple.ts b/lib/types/tuple.ts new file mode 100644 index 000000000..ad664b2ac --- /dev/null +++ b/lib/types/tuple.ts @@ -0,0 +1,113 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** @module types */ + +/** + * @class + * @classdesc A tuple is a sequence of immutable objects. + * Tuples are sequences, just like [Arrays]{@link Array}. The only difference is that tuples can't be changed. + *

+ * As tuples can be used as a Map keys, the {@link Tuple#toString toString()} method calls toString of each element, + * to try to get a unique string key. + *

+ */ +class Tuple { + elements: any[]; + length: number; + + /** + * Creates a new sequence of immutable objects with the parameters provided. + * A tuple is a sequence of immutable objects. + * Tuples are sequences, just like [Arrays]{@link Array}. The only difference is that tuples can't be changed. + *

+ * As tuples can be used as a Map keys, the {@link Tuple#toString toString()} method calls toString of each element, + * to try to get a unique string key. + *

+ * @param {any[]} args The sequence elements as arguments. + * @constructor + */ + constructor(...args: any[]) { + /** + * Immutable elements of Tuple object. + * @type Array + */ + this.elements = args; + + if (this.elements.length === 0) { + throw new TypeError('Tuple must contain at least one value'); + } + + /** + * Returns the number of the elements. + * @type Number + */ + this.length = this.elements.length; + } + + /** + * Creates a new instance of a tuple based on the Array + * @param {Array} elements + * @returns {Tuple} + */ + static fromArray(elements: any[]): Tuple { + // Apply the elements Array as parameters + return new Tuple(...elements); + } + + /** + * Returns the value located at the index. + * @param {Number} index Element index + */ + get(index: number): any { + return this.elements[index || 0]; + } + + /** + * Returns the string representation of the sequence surrounded by parenthesis, ie: (1, 2). + *

+ * The returned value attempts to be a unique string representation of its values. + *

+ * @returns {string} + */ + toString(): string { + return ( + '(' + + this.elements.reduce((prev, x, i) => prev + (i > 0 ? ',' : '') + x.toString(), '') + + ')' + ); + } + + //TODO: was exposed with return type of string. + /** + * Returns the Array representation of the sequence. + * @returns {Array} + */ + toJSON(): any[] { + return this.elements; + } + + /** + * Gets the elements as an array + * @returns {Array} + */ + values(): any[] { + // Clone the elements + return this.elements.slice(0); + } +} + +export default Tuple; \ No newline at end of file diff --git a/lib/types/uuid.js b/lib/types/uuid.js deleted file mode 100644 index 56281d58f..000000000 --- a/lib/types/uuid.js +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -'use strict'; - -const crypto = require('crypto'); -const utils = require('../utils'); - -/** @module types */ - -/** - * Creates a new instance of Uuid based on a Buffer - * @class - * @classdesc Represents an immutable universally unique identifier (UUID). A UUID represents a 128-bit value. - * @param {Buffer} buffer The 16-length buffer. - * @constructor - */ -function Uuid(buffer) { - if (!buffer || buffer.length !== 16) { - throw new Error('You must provide a buffer containing 16 bytes'); - } - this.buffer = buffer; -} - -/** - * Parses a string representation of a Uuid - * @param {String} value - * @returns {Uuid} - */ -Uuid.fromString = function (value) { - //36 chars: 32 + 4 hyphens - if (typeof value !== 'string' || value.length !== 36) { - throw new Error('Invalid string representation of Uuid, it should be in the 00000000-0000-0000-0000-000000000000'); - } - return new Uuid(utils.allocBufferFromString(value.replace(/-/g, ''), 'hex')); -}; - -/** - * Creates a new random (version 4) Uuid. - * @param {function} [callback] Optional callback to be invoked with the error as first parameter and the created Uuid as - * second parameter. - * @returns {Uuid} - */ -Uuid.random = function (callback) { - if (callback) { - getRandomBytes(function(err, buffer) { - if (err) { - return callback(err); - } - return callback(null, createUuidFromBuffer(buffer)); - }); - } else { - const buffer = getRandomBytes(); - return createUuidFromBuffer(buffer); - } -}; - -/** - * Gets the bytes representation of a Uuid - * @returns {Buffer} - */ -Uuid.prototype.getBuffer = function () { - return this.buffer; -}; -/** - * Compares this object to the specified object. - * The result is true if and only if the argument is not null, is a UUID object, and contains the same value, bit for bit, as this UUID. - * @param {Uuid} other The other value to test for equality. - */ -Uuid.prototype.equals = function (other) { - return other instanceof Uuid && this.buffer.equals(other.buffer); -}; - -/** - * Returns a string representation of the value of this Uuid instance. - * 32 hex separated by hyphens, in the form of 00000000-0000-0000-0000-000000000000. - * @returns {String} - */ -Uuid.prototype.toString = function () { - //32 hex representation of the Buffer - const hexValue = getHex(this); - return ( - hexValue.substr(0, 8) + '-' + - hexValue.substr(8, 4) + '-' + - hexValue.substr(12, 4) + '-' + - hexValue.substr(16, 4) + '-' + - hexValue.substr(20, 12)); -}; - -/** - * Provide the name of the constructor and the string representation - * @returns {string} - */ -Uuid.prototype.inspect = function () { - return this.constructor.name + ': ' + this.toString(); -}; - -/** - * Returns the string representation. - * Method used by the native JSON.stringify() to serialize this instance. - */ -Uuid.prototype.toJSON = function () { - return this.toString(); -}; - - -/** - * Returns new Uuid - * @private - * @returns {Uuid} - */ -function createUuidFromBuffer (buffer) { - //clear the version - buffer[6] &= 0x0f; - //set the version 4 - buffer[6] |= 0x40; - //clear the variant - buffer[8] &= 0x3f; - //set the IETF variant - buffer[8] |= 0x80; - return new Uuid(buffer); -} - -/** - * @private - * @returns {String} 32 hex representation of the instance, without separators - */ -function getHex (uuid) { - return uuid.buffer.toString('hex'); -} - -/** - * Gets a crypto generated 16 bytes - * @private - * @returns {Buffer} - */ -function getRandomBytes (cb) { - return crypto.randomBytes(16, cb); -} - -module.exports = Uuid; diff --git a/lib/types/uuid.ts b/lib/types/uuid.ts new file mode 100644 index 000000000..3c548a909 --- /dev/null +++ b/lib/types/uuid.ts @@ -0,0 +1,167 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import crypto from "crypto"; +import utils, { type ValueCallback } from "../utils"; + +/** @module types */ + +/** + * @class + * @classdesc Represents an immutable universally unique identifier (UUID). A UUID represents a 128-bit value. + */ +class Uuid { + /** @internal */ + buffer: Buffer; + + /** + * Creates a new instance of Uuid based on a Buffer + * Represents an immutable universally unique identifier (UUID). A UUID represents a 128-bit value. + * @param {Buffer} buffer The 16-length buffer. + * @constructor + */ + constructor(buffer: Buffer) { + if (!buffer || buffer.length !== 16) { + throw new Error('You must provide a buffer containing 16 bytes'); + } + this.buffer = buffer; + } + + /** + * Parses a string representation of a Uuid + * @param {String} value + * @returns {Uuid} + */ + static fromString(value: string): Uuid { + //36 chars: 32 + 4 hyphens + if (typeof value !== 'string' || value.length !== 36) { + throw new Error('Invalid string representation of Uuid, it should be in the 00000000-0000-0000-0000-000000000000'); + } + return new Uuid(utils.allocBufferFromString(value.replace(/-/g, ''), 'hex')); + } + + /** + * Creates a new random (version 4) Uuid. + * @param {function} [callback] Optional callback to be invoked with the error as first parameter and the created Uuid as + * second parameter. + * @returns {Uuid} + */ + static random(): Uuid; + static random(callback: ValueCallback): void; + static random(callback?: ValueCallback): Uuid | void { + if (callback) { + getRandomBytes(function(err, buffer) { + if (err) { + return callback(err, null); + } + return callback(null, createUuidFromBuffer(buffer)); + }); + } else { + const buffer = getRandomBytes(); + return createUuidFromBuffer(buffer); + } + } + + /** + * Gets the bytes representation of a Uuid + * @returns {Buffer} + */ + getBuffer(): Buffer { + return this.buffer; + } + + /** + * Compares this object to the specified object. + * The result is true if and only if the argument is not null, is a UUID object, and contains the same value, bit for bit, as this UUID. + * @param {Uuid} other The other value to test for equality. + */ + equals(other: Uuid): boolean { + return other instanceof Uuid && this.buffer.equals(other.buffer); + } + + /** + * Returns a string representation of the value of this Uuid instance. + * 32 hex separated by hyphens, in the form of 00000000-0000-0000-0000-000000000000. + * @returns {String} + */ + toString(): string { + //32 hex representation of the Buffer + const hexValue = getHex(this); + return ( + hexValue.substr(0, 8) + '-' + + hexValue.substr(8, 4) + '-' + + hexValue.substr(12, 4) + '-' + + hexValue.substr(16, 4) + '-' + + hexValue.substr(20, 12) + ); + } + + /** + * Provide the name of the constructor and the string representation + * @returns {string} + */ + inspect(): string { + return this.constructor.name + ': ' + this.toString(); + } + + /** + * Returns the string representation. + * Method used by the native JSON.stringify() to serialize this instance. + */ + toJSON(): string { + return this.toString(); + } +} + +/** + * Returns new Uuid + * @private + * @returns {Uuid} + */ +function createUuidFromBuffer (buffer: Buffer): Uuid { + //clear the version + buffer[6] &= 0x0f; + //set the version 4 + buffer[6] |= 0x40; + //clear the variant + buffer[8] &= 0x3f; + //set the IETF variant + buffer[8] |= 0x80; + return new Uuid(buffer); +} + +/** + * @private + * @returns {String} 32 hex representation of the instance, without separators + */ +function getHex(uuid: Uuid): string { + return uuid.buffer.toString('hex'); +} + +/** + * Gets a crypto generated 16 bytes + * @private + * @returns {Buffer | void} + */ +function getRandomBytes(): Buffer; +function getRandomBytes(cb: (err: Error | null, buffer: Buffer) => void): void; +function getRandomBytes(cb?: (err: Error | null, buffer: Buffer) => void): Buffer | void { + if (cb) { + return crypto.randomBytes(16, cb); + } + return crypto.randomBytes(16); +} + +export default Uuid; \ No newline at end of file diff --git a/lib/types/vector.js b/lib/types/vector.ts similarity index 80% rename from lib/types/vector.js rename to lib/types/vector.ts index 308511eea..584cd6e78 100644 --- a/lib/types/vector.js +++ b/lib/types/vector.ts @@ -13,20 +13,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -"use strict"; + /** @module types */ /** * Creates a new instance of Cql Vector, also compatible with Float32Array. * @class */ -const util = require('node:util'); +import util from 'util'; class Vector { /** - * - * @param {Float32Array | Array} elements - * @param {string} [subtype] - */ - constructor (elements, subtype) { + * Returns the number of the elements. + * @type Number + */ + length: number; + subtype: string; + elements: any[]; + /** + * + * @param {Float32Array | Array} elements + * @param {string} [subtype] + */ + constructor (elements: Float32Array | Array, subtype?: string) { if (elements instanceof Float32Array) { this.elements = Array.from(elements); } @@ -39,10 +46,6 @@ class Vector { if (this.elements.length === 0) { throw new TypeError('Vector must contain at least one value'); } - /** - * Returns the number of the elements. - * @type Number - */ this.length = this.elements.length; this.subtype = subtype; return new Proxy(this, { @@ -58,7 +61,7 @@ class Vector { {return obj.elements[key] = value;} return obj[key] = value; }, - ownKeys: function (obj) { + ownKeys: function (_obj) { return Reflect.ownKeys(elements); }, getOwnPropertyDescriptor(target, key) { @@ -67,7 +70,6 @@ class Vector { return { enumerable: true, configurable: true}; } return Reflect.getOwnPropertyDescriptor(target, key); - } }); } @@ -75,14 +77,14 @@ class Vector { * Returns the string representation of the vector. * @returns {string} */ - toString() { + toString(): string { return "[".concat(this.elements.toString(), "]"); } /** * * @param {number} index */ - at(index) { + at(index: number):any { return this.elements[index]; } @@ -90,7 +92,7 @@ class Vector { * * @returns {IterableIterator} an iterator over the elements of the vector */ - [Symbol.iterator]() { + [Symbol.iterator]() : IterableIterator { return this.elements[Symbol.iterator](); } @@ -102,14 +104,14 @@ class Vector { * * @param {(value: any, index: number, array: any[]) => void} callback */ - forEach(callback) { + forEach(callback: (value: any, index: number, array: any[]) => void) { return this.elements.forEach(callback); } /** - * @returns {string | undefined} get the subtype string, e.g., "float", but it's optional so it can return null + * @returns {string | null} get the subtype string, e.g., "float", but it's optional so it can return null */ - getSubtype(){ + getSubtype(): string | null{ return this.subtype; } } @@ -119,4 +121,4 @@ Object.defineProperty(Vector, Symbol.hasInstance, { return (util.types.isProxy(i) && i.IDENTITY === 'Vector') || i instanceof Float32Array; } }); -module.exports = Vector; +export default Vector; diff --git a/lib/types/version-number.js b/lib/types/version-number.ts similarity index 74% rename from lib/types/version-number.js rename to lib/types/version-number.ts index 7f5b74300..7316dd776 100644 --- a/lib/types/version-number.js +++ b/lib/types/version-number.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; + const _versionPattern = /(\d+)\.(\d+)(?:\.(\d+))?(?:\.(\d+)?)?(?:[-~]([\w+]*(?:-\w[.\w]*)*))?(?:\+([.\w]+))?/; @@ -35,10 +35,37 @@ const _versionPattern = /(\d+)\.(\d+)(?:\.(\d+))?(?:\.(\d+)?)?(?:[-~]([\w+]*(?:- * @property {String[]} preReleases Prerelease indicators if present, i.e. SNAPSHOT of X.Y.Z-SNAPSHOT. * @property {String} build Build string if present, i.e. build1 of X.Y.Z+build1. * - * @ignore + * @ignore @internal */ class VersionNumber { - constructor(major, minor, patch, dsePatch, preReleases, build) { + major: number; + minor: number; + patch: number; + dsePatch: number; + preReleases: string[]; + build: string; + + /** + * Represents a version number in the form of X.Y.Z with optional pre-release and build metadata. + * + * Version numbers compare the usual way, the major version number (X) is compared first, then + * the minor one (Y) and then the patch level one (Z). If pre-release or other build metadata + * is present for a version, that version is considered less than an otherwise equivalent version + * that doesn't have these labels, otherwise they are considered equal. + * + * As of initial implementation versions are only compared against those with at most patch versions + * more refined comparisons are not needed. + * + * @property {Number} major The major version, X of X.Y.Z. + * @property {Number} minor The minor version, Y of X.Y.Z. + * @property {Number} patch The patch version, Z of X.Y.Z. + * @property {Number} dsePatch The dsePatch version, A of X.Y.Z.A or undefined if not present. + * @property {String[]} preReleases Prerelease indicators if present, i.e. SNAPSHOT of X.Y.Z-SNAPSHOT. + * @property {String} build Build string if present, i.e. build1 of X.Y.Z+build1. + * + * @ignore @internal + */ + constructor(major: number, minor: number, patch?: number, dsePatch?: number, preReleases?: string[], build?: string) { this.major = major; this.minor = minor; this.patch = patch; @@ -50,7 +77,7 @@ class VersionNumber { /** * @return {String} String representation of this version. */ - toString() { + toString(): string { let str = this.major + '.' + this.minor; if (this.patch !== undefined) { str += '.' + this.patch; @@ -74,7 +101,7 @@ class VersionNumber { * @param {VersionNumber} other * @return {Number} -1 if less than other, 0 if equal, 1 if greater than. */ - compare(other) { + compare(other: VersionNumber): number { if (this.major < other.major) { return -1; } else if (this.major > other.major) { @@ -122,7 +149,7 @@ class VersionNumber { return 0; } - static parse(version) { + static parse(version: string) { if (!version) { return null; } @@ -141,4 +168,4 @@ class VersionNumber { } } -module.exports = VersionNumber; \ No newline at end of file +export default VersionNumber; \ No newline at end of file diff --git a/lib/utils.js b/lib/utils.ts similarity index 79% rename from lib/utils.js rename to lib/utils.ts index dfc217a7b..3c7b4480a 100644 --- a/lib/utils.js +++ b/lib/utils.ts @@ -13,22 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const Long = require('long'); -const util = require('util'); -const net = require('net'); -const { EventEmitter } = require('events'); - -const errors = require('./errors'); -const promiseUtils = require('./promise-utils'); +import { EventEmitter } from "events"; +import Long from "long"; +import net from "net"; +import util from "util"; +import type { ClientOptions } from "./client"; +import errors from "./errors"; +import type { ExecutionOptions } from "./execution-options"; +import promiseUtils from "./promise-utils"; + +type ValueCallback = (err: Error, val: T) => void; +type EmptyCallback = (err: Error) => void; +type ArrayOrObject = any[]|{[key: string]: any}|null; /** * Max int that can be accurately represented with 64-bit Number (2^53) * @type {number} * @const */ -const maxInt = 9007199254740992; +const maxInt: number = 9007199254740992; const maxInt32 = 0x7fffffff; @@ -42,25 +45,27 @@ function noop() {} * Forward-compatible allocation of buffer, filled with zeros. * @type {Function} */ -const allocBuffer = Buffer.alloc || allocBufferFillDeprecated; +const allocBuffer: Function = Buffer.alloc || allocBufferFillDeprecated; /** * Forward-compatible unsafe allocation of buffer. * @type {Function} */ -const allocBufferUnsafe = Buffer.allocUnsafe || allocBufferDeprecated; +const allocBufferUnsafe: Function = Buffer.allocUnsafe || allocBufferDeprecated; /** * Forward-compatible allocation of buffer to contain a string. * @type {Function} */ -const allocBufferFromString = (Int8Array.from !== Buffer.from && Buffer.from) || allocBufferFromStringDeprecated; +// @ts-ignore +const allocBufferFromString: Function = (Int8Array.from !== Buffer.from && Buffer.from) || allocBufferFromStringDeprecated; /** * Forward-compatible allocation of buffer from an array of bytes * @type {Function} */ -const allocBufferFromArray = (Int8Array.from !== Buffer.from && Buffer.from) || allocBufferFromArrayDeprecated; +// @ts-ignore +const allocBufferFromArray: Function = (Int8Array.from !== Buffer.from && Buffer.from) || allocBufferFromArrayDeprecated; function allocBufferDeprecated(size) { // eslint-disable-next-line @@ -73,7 +78,7 @@ function allocBufferFillDeprecated(size) { return b; } -function allocBufferFromStringDeprecated(text, encoding) { +function allocBufferFromStringDeprecated(text, encoding?) { if (typeof text !== 'string') { throw new TypeError('Expected string, obtained ' + util.inspect(text)); } @@ -93,7 +98,7 @@ function allocBufferFromArrayDeprecated(arr) { * @returns {Function} Returns a wrapper function that invokes the underlying callback only once. * @param {Function} callback */ -function callbackOnce(callback) { +function callbackOnce(callback: Function): Function { let cb = callback; return (function wrapperCallback(err, result) { @@ -127,7 +132,7 @@ function fixStack(stackTrace, error) { * @param {String} info * @param [furtherInfo] */ -function log(type, info, furtherInfo, options) { +function log(type: string, info: string, furtherInfo?, options?) { if (!this.logEmitter) { const effectiveOptions = options || this.options; if (!effectiveOptions || !effectiveOptions.logEmitter) { @@ -159,8 +164,7 @@ function totalLength (arr) { * The main difference between this method is that declared properties with an undefined value are not set * to the target. */ -function extend(target) { - const sources = Array.prototype.slice.call(arguments, 1); +function extend(target: any, ...sources: any[]): any { sources.forEach(function (source) { if (!source) { return; @@ -194,10 +198,10 @@ function toLowerCaseProperties(obj) { /** * Extends the target by the most inner props of sources * @param {Object} target + * @param {...Object} sources * @returns {Object} */ -function deepExtend(target) { - const sources = Array.prototype.slice.call(arguments, 1); +function deepExtend(target: any, ...sources: any[]): any { sources.forEach(function (source) { for (const prop in source) { // eslint-disable-next-line no-prototype-builtins @@ -260,7 +264,7 @@ function funcCompare(name, argArray) { * @param {Array} arr * @returns {Iterator} */ -function arrayIterator (arr) { +function arrayIterator (arr: Array): Iterator { return arr[Symbol.iterator](); } @@ -269,7 +273,7 @@ function arrayIterator (arr) { * @param iterator * @returns {Array} */ -function iteratorToArray(iterator) { +function iteratorToArray(iterator): Array { const values = []; let item = iterator.next(); while (!item.done) { @@ -288,7 +292,7 @@ function iteratorToArray(iterator) { * @returns {number} The position of the key in the Array, if it is found. * If it is not found, it returns a negative number which is the bitwise complement of the index of the first element that is larger than key. */ -function binarySearch(arr, key, compareFunc) { +function binarySearch(arr: Array, key, compareFunc: Function): number { let low = 0; let high = arr.length-1; @@ -318,7 +322,7 @@ function binarySearch(arr, key, compareFunc) { * @param item * @param {function} compareFunc */ -function insertSorted(arr, item, compareFunc) { +function insertSorted(arr: Array, item, compareFunc: Function) { if (arr.length === 0) { return arr.push(item); } @@ -335,7 +339,7 @@ function insertSorted(arr, item, compareFunc) { * @param {String} [name] Name of the function to use in the error message. Defaults to 'callback'. * @returns {Function} */ -function validateFn(fn, name) { +function validateFn(fn: T, name: string): T { if (typeof fn !== 'function') { throw new errors.ArgumentError(util.format('%s is not a function', name || 'callback')); } @@ -351,10 +355,10 @@ function validateFn(fn, name) { * @returns {Array} Returns an array of parameters. * @throws {Error} In case a parameter with a specific name is not defined */ -function adaptNamedParamsPrepared(params, columns) { +function adaptNamedParamsPrepared(params: Array | object, columns: Array): Array { if (!params || Array.isArray(params) || !columns || columns.length === 0) { // params is an array or there aren't parameters - return params; + return params as any[]; } const paramsArray = new Array(columns.length); params = toLowerCaseProperties(params); @@ -379,10 +383,10 @@ function adaptNamedParamsPrepared(params, columns) { * @returns {{ params: Array<{name, value}>, namedParameters: boolean, keyIndexes: object }} Returns an array of * parameters and the keys as an associative array. */ -function adaptNamedParamsWithHints(params, execOptions) { +function adaptNamedParamsWithHints(params: {name; value}[] | object, execOptions: ExecutionOptions): { params: Array<{ name; value; }>; namedParameters: boolean; keyIndexes: object; } { if (!params || Array.isArray(params)) { //The parameters is an Array or there isn't parameter - return { params: params, namedParameters: false, keyIndexes: null }; + return { params: params as {name;value}[], namedParameters: false, keyIndexes: null }; } const keys = Object.keys(params); @@ -410,7 +414,7 @@ function adaptNamedParamsWithHints(params, execOptions) { * @param {Number} times * @returns {String} */ -function stringRepeat(val, times) { +function stringRepeat(val: string, times: number): string { if (!times || times < 0) { return null; } @@ -426,9 +430,9 @@ function stringRepeat(val, times) { * @param {Object} obj * @returns {Array} */ -function objectValues(obj) { +function objectValues(obj: object): Array { if (!obj) { - return emptyArray; + return emptyArray as Array; } const keys = Object.keys(obj); const values = new Array(keys.length); @@ -445,7 +449,7 @@ function objectValues(obj) { * @param {Function} handler * @returns {Promise|undefined} */ -function promiseWrapper(options, originalCallback, handler) { +function promiseWrapper(options: ClientOptions, originalCallback: Function, handler: Function): Promise | undefined { if (typeof originalCallback === 'function') { // Callback-based invocation handler.call(this, originalCallback); @@ -462,7 +466,7 @@ function promiseWrapper(options, originalCallback, handler) { * @param {Function} handler * @returns {Promise} */ -function defaultPromiseFactory(handler) { +function defaultPromiseFactory(handler: Function): Promise { return new Promise(function executor(resolve, reject) { handler(function handlerCallback(err, result) { if (err) { @@ -496,7 +500,7 @@ function ifUndefined3(v1, v2, v3) { * @returns {Array} * @private */ -function shuffleArray(arr) { +function shuffleArray(arr: Array): Array { // Fisher–Yates algorithm for (let i = arr.length - 1; i > 0; i--) { // Math.random() has an extremely short permutation cycle length but we don't care about collisions @@ -515,57 +519,64 @@ function shuffleArray(arr) { * Represents a unique set of values. * @constructor */ -function HashSet() { - this.length = 0; - this.items = {}; -} - -/** - * Adds a new item to the set. - * @param {Object} key - * @returns {boolean} Returns true if it was added to the set; false if the key is already present. - */ -HashSet.prototype.add = function (key) { - if (this.contains(key)) { - return false; +class HashSet { + length: number; + items: object; + constructor() { + this.length = 0; + this.items = {}; } - this.items[key] = true; - this.length++; - return true; -}; - -/** - * @returns {boolean} Returns true if the key is present in the set. - */ -HashSet.prototype.contains = function (key) { - return this.length > 0 && this.items[key] === true; -}; - -/** - * Removes the item from set. - * @param key - * @return {boolean} Returns true if the key existed and was removed, otherwise it returns false. - */ -HashSet.prototype.remove = function (key) { - if (!this.contains(key)) { - return false; + /** + * Adds a new item to the set. + * @param {Object} key + * @returns {boolean} Returns true if it was added to the set; false if the key is already present. + */ + add(key: any): boolean { + if (this.contains(key)) { + return false; + } + this.items[key] = true; + this.length++; + return true; } - delete this.items[key]; - this.length--; -}; - -/** - * Returns an array containing the set items. - * @returns {Array} - */ -HashSet.prototype.toArray = function () { - return Object.keys(this.items); -}; + /** + * @returns {boolean} Returns true if the key is present in the set. + */ + contains(key): boolean { + return this.length > 0 && this.items[key] === true; + } + /** + * Removes the item from set. + * @param key + * @return {boolean} Returns true if the key existed and was removed, otherwise it returns false. + */ + remove(key): boolean { + if (!this.contains(key)) { + return false; + } + delete this.items[key]; + this.length--; + } + /** + * Returns an array containing the set items. + * @returns {Array} + */ + toArray(): Array { + return Object.keys(this.items); + } +} /** * Utility class that resolves host names into addresses. + * @internal */ class AddressResolver { + private _resolve4: any; + private _nameOrIp: string; + private _isIp: number; + private _index: number; + private _addresses: string[]; + private _refreshing: EventEmitter; /** * Creates a new instance of the resolver. @@ -573,7 +584,7 @@ class AddressResolver { * @param {String} options.nameOrIp * @param {Object} [options.dns] */ - constructor(options) { + constructor(options: { nameOrIp: string; dns?: {resolve4?}; }) { if (!options || !options.nameOrIp || !options.dns) { throw new Error('nameOrIp and dns lib must be provided as part of the options'); } @@ -613,7 +624,7 @@ class AddressResolver { try { await this._resolve(); - } catch (err) { + } catch (_err) { // Ignore the possible resolution error } @@ -651,7 +662,7 @@ class AddressResolver { * @param {Function} fn * @param {Function} [callback] */ -function each(arr, fn, callback) { +function each(arr: Array, fn: Function, callback?: Function) { if (!Array.isArray(arr)) { throw new TypeError('First parameter is not an Array'); } @@ -683,7 +694,7 @@ function each(arr, fn, callback) { * @param {Function} fn * @param {Function} [callback] */ -function eachSeries(arr, fn, callback) { +function eachSeries(arr: Array, fn: Function, callback: Function) { if (!Array.isArray(arr)) { throw new TypeError('First parameter is not an Array'); } @@ -723,7 +734,7 @@ function eachSeries(arr, fn, callback) { * @param {Function} fn * @param {Function} [callback] */ -function forEachOf(arr, fn, callback) { +function forEachOf(arr: Array, fn: Function, callback: Function) { return mapEach(arr, fn, true, callback); } @@ -732,7 +743,7 @@ function forEachOf(arr, fn, callback) { * @param {Function} fn * @param {Function} [callback] */ -function map(arr, fn, callback) { +function map(arr: Array, fn: Function, callback: Function) { return mapEach(arr, fn, false, callback); } @@ -785,7 +796,7 @@ function mapEach(arr, fn, useIndex, callback) { * @param {Function} fn * @param {Function} [callback] */ -function mapSeries(arr, fn, callback) { +function mapSeries(arr: Array, fn: Function, callback: Function) { if (!Array.isArray(arr)) { throw new TypeError('First parameter must be an Array'); } @@ -833,7 +844,7 @@ function mapSeries(arr, fn, callback) { * @param {Array.} arr * @param {Function} [callback] */ -function parallel(arr, callback) { +function parallel(arr: Array, callback: Function) { if (!Array.isArray(arr)) { throw new TypeError('First parameter must be an Array'); } @@ -862,7 +873,7 @@ function parallel(arr, callback) { * @param {Array.} arr * @param {Function} [callback] */ -function series(arr, callback) { +function series(arr: Array, callback?: Function) { if (!Array.isArray(arr)) { throw new TypeError('First parameter must be an Array'); } @@ -870,7 +881,7 @@ function series(arr, callback) { let index = 0; let sync; next(); - function next(err, result) { + function next(err?, result?) { if (err) { return callback(err); } @@ -895,7 +906,7 @@ function series(arr, callback) { * @param {Function} iteratorFunc * @param {Function} [callback] */ -function times(count, iteratorFunc, callback) { +function times(count: number, iteratorFunc: Function, callback: Function) { callback = callback || noop; count = +count; if (isNaN(count) || count === 0) { @@ -924,7 +935,7 @@ function times(count, iteratorFunc, callback) { * @param {Function} iteratorFunc * @param {Function} [callback] */ -function timesLimit(count, limit, iteratorFunc, callback) { +function timesLimit(count: number, limit: number, iteratorFunc: Function, callback: Function) { let sync = undefined; callback = callback || noop; limit = Math.min(limit, count); @@ -967,7 +978,7 @@ function timesLimit(count, limit, iteratorFunc, callback) { * @param {Function} iteratorFunction * @param {Function} callback */ -function timesSeries(count, iteratorFunction, callback) { +function timesSeries(count: number, iteratorFunction: Function, callback: Function) { count = +count; if (isNaN(count) || count < 1) { return callback(); @@ -1005,10 +1016,10 @@ function timesSeries(count, iteratorFunction, callback) { * @param {Function} fn * @param {Function} callback */ -function whilst(condition, fn, callback) { +function whilst(condition: Function, fn: Function, callback: Function) { let sync = 0; next(); - function next(err) { + function next(err?) { if (err) { return callback(err); } @@ -1041,20 +1052,19 @@ function whilst(condition, fn, callback) { } } - /** * Contains the methods for reading and writing vints into binary format. * Exposes only 2 internal methods, the rest are hidden. */ const VIntCoding = (function () { /** @param {Long} n */ - function encodeZigZag64(n) { + function encodeZigZag64(n: Long) { // (n << 1) ^ (n >> 63); return n.toUnsigned().shiftLeft(1).xor(n.shiftRight(63)); } /** @param {Long} n */ - function decodeZigZag64(n) { + function decodeZigZag64(n: Long) { // (n >>> 1) ^ -(n & 1); return n.shiftRightUnsigned(1).xor(n.and(Long.ONE).negate()); } @@ -1064,7 +1074,7 @@ const VIntCoding = (function () { * @param {Buffer} buffer * @returns {Number} */ - function writeVInt(value, buffer) { + function writeVInt(value: Long, buffer: Buffer): number { return writeUnsignedVInt(encodeZigZag64(value), buffer); } @@ -1073,7 +1083,7 @@ const VIntCoding = (function () { * @param {Buffer} buffer * @returns {number} */ - function writeUnsignedVInt(value, buffer) { + function writeUnsignedVInt(value: Long, buffer: Buffer): number { const size = computeUnsignedVIntSize(value); if (size === 1) { buffer[0] = value.getLowBits(); @@ -1087,7 +1097,7 @@ const VIntCoding = (function () { * @param {Long} value * @returns {number} */ - function computeUnsignedVIntSize(value) { + function computeUnsignedVIntSize(value: Long): number { const magnitude = numberOfLeadingZeros(value.or(Long.ONE)); return (639 - magnitude * 9) >> 6; } @@ -1097,7 +1107,7 @@ const VIntCoding = (function () { * @param {Number} size * @param {Buffer} buffer */ - function encodeVInt(value, size, buffer) { + function encodeVInt(value: Long, size: number, buffer: Buffer) { const extraBytes = size - 1; let intValue = value.getLowBits(); let i; @@ -1118,7 +1128,7 @@ const VIntCoding = (function () { * @param {Long} value * @returns {Number} */ - function numberOfLeadingZeros(value) { + function numberOfLeadingZeros(value: Long): number { if (value.equals(Long.ZERO)) { return 64; } @@ -1158,7 +1168,7 @@ const VIntCoding = (function () { * @param {{value: number}} offset * @returns {Long} */ - function readVInt(buffer, offset) { + function readVInt(buffer: Buffer, offset: { value: number; }): Long { return decodeZigZag64(readUnsignedVInt(buffer, offset)); } @@ -1168,7 +1178,7 @@ const VIntCoding = (function () { * @param {{ value: number}} offset * @returns {Long} */ - function readUnsignedVInt(input, offset) { + function readUnsignedVInt(input: Buffer, offset: { value: number; }): Long { const firstByte = input[offset.value++]; if ((firstByte & 0x80) === 0) { return Long.fromInt(firstByte); @@ -1220,7 +1230,7 @@ const VIntCoding = (function () { * @param {Number} firstByte * @returns {Number} */ - function numberOfExtraBytesToRead(firstByte) { + function numberOfExtraBytesToRead(firstByte: number): number { // Instead of counting 1s of the byte, we negate and count 0 of the byte return numberOfLeadingZerosInt32(~firstByte) - 24; } @@ -1229,7 +1239,7 @@ const VIntCoding = (function () { * @param {Number} extraBytesToRead * @returns {Number} */ - function firstByteValueMask(extraBytesToRead) { + function firstByteValueMask(extraBytesToRead: number): number { return 0xff >> extraBytesToRead; } @@ -1238,8 +1248,8 @@ const VIntCoding = (function () { * @param {Buffer} output * @returns {void} */ - // eslint-disable-next-line no-unused-vars - function writeUnsignedVInt32(value, output) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + function writeUnsignedVInt32(value: number, output: Buffer): void { writeUnsignedVInt(Long.fromNumber(value), output); } @@ -1254,8 +1264,9 @@ const VIntCoding = (function () { * @returns {Number} * @throws VIntOutOfRangeException If the vint doesn't fit into a 32-bit integer */ - // eslint-disable-next-line no-unused-vars - function getUnsignedVInt32(input, readerIndex) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + // eslint-disable-next-line @typescript-eslint/no-unused-vars + function getUnsignedVInt32(input: Buffer, readerIndex: number): number { return checkedCast(getUnsignedVInt(input, readerIndex, input.length)); } @@ -1266,7 +1277,7 @@ const VIntCoding = (function () { * @param {Number} readerLimit * @returns {Long} */ - function getUnsignedVInt(input, readerIndex, readerLimit) { + function getUnsignedVInt(input: Buffer, readerIndex: number, readerLimit: number): Long { if (readerIndex < 0) {throw new errors.ArgumentError( "Reader index should be non-negative, but was " + readerIndex);} @@ -1296,7 +1307,7 @@ const VIntCoding = (function () { * @param {Long} value * @returns {Number} */ - function checkedCast(value) { + function checkedCast(value: Long): number { const result = value.toInt(); if (value.notEquals(result)) {throw new errors.VIntOutOfRangeException(value);} return result; @@ -1307,7 +1318,7 @@ const VIntCoding = (function () { * @param {Buffer} bytes * @returns {[number, number]} [size, bytes read] */ - function uvintUnpack(bytes) { + function uvintUnpack(bytes: Buffer): [number, number] { const firstByte = bytes[0]; if ((firstByte & 128) === 0) { @@ -1331,7 +1342,7 @@ const VIntCoding = (function () { * @param {Number} val * @returns {Buffer} */ - function uvintPack(val) { + function uvintPack(val: number): Buffer { const rv = []; if (val < 128) { rv.push(val); @@ -1370,49 +1381,90 @@ const VIntCoding = (function () { }; })(); -exports.adaptNamedParamsPrepared = adaptNamedParamsPrepared; -exports.adaptNamedParamsWithHints = adaptNamedParamsWithHints; -exports.AddressResolver = AddressResolver; -exports.allocBuffer = allocBuffer; -exports.allocBufferUnsafe = allocBufferUnsafe; -exports.allocBufferFromArray = allocBufferFromArray; -exports.allocBufferFromString = allocBufferFromString; -exports.arrayIterator = arrayIterator; -exports.binarySearch = binarySearch; -exports.callbackOnce = callbackOnce; -exports.copyBuffer = copyBuffer; -exports.deepExtend = deepExtend; -exports.each = each; -exports.eachSeries = eachSeries; -/** @const */ -exports.emptyArray = Object.freeze([]); -/** @const */ -exports.emptyObject = emptyObject; -exports.extend = extend; -exports.fixStack = fixStack; -exports.forEachOf = forEachOf; -exports.funcCompare = funcCompare; -exports.ifUndefined = ifUndefined; -exports.ifUndefined3 = ifUndefined3; -exports.insertSorted = insertSorted; -exports.iteratorToArray = iteratorToArray; -exports.log = log; -exports.map = map; -exports.mapSeries = mapSeries; -exports.maxInt = maxInt; -exports.noop = noop; -exports.objectValues = objectValues; -exports.parallel = parallel; -exports.promiseWrapper = promiseWrapper; -exports.propCompare = propCompare; -exports.series = series; -exports.shuffleArray = shuffleArray; -exports.stringRepeat = stringRepeat; -exports.times = times; -exports.timesLimit = timesLimit; -exports.timesSeries = timesSeries; -exports.totalLength = totalLength; -exports.validateFn = validateFn; -exports.whilst = whilst; -exports.HashSet = HashSet; -exports.VIntCoding = VIntCoding; \ No newline at end of file + +export default { + adaptNamedParamsPrepared, + adaptNamedParamsWithHints, + AddressResolver, + allocBuffer, + allocBufferUnsafe, + allocBufferFromArray, + allocBufferFromString, + arrayIterator, + binarySearch, + callbackOnce, + copyBuffer, + deepExtend, + each, + eachSeries, + emptyArray, + emptyObject, + extend, + fixStack, + forEachOf, + funcCompare, + ifUndefined, + ifUndefined3, + insertSorted, + iteratorToArray, + log, + map, + mapSeries, + maxInt, + noop, + objectValues, + parallel, + promiseWrapper, + propCompare, + series, + shuffleArray, + stringRepeat, + times, + timesLimit, + timesSeries, + totalLength, + validateFn, + whilst, + HashSet, + VIntCoding +}; + +export { + adaptNamedParamsPrepared, + adaptNamedParamsWithHints, + AddressResolver, + allocBuffer, allocBufferFromArray, + allocBufferFromString, allocBufferUnsafe, arrayIterator, + binarySearch, + callbackOnce, + copyBuffer, + deepExtend, + each, + eachSeries, + emptyArray, + emptyObject, + extend, + fixStack, + forEachOf, + funcCompare, HashSet, ifUndefined, + ifUndefined3, + insertSorted, + iteratorToArray, + log, + map, + mapSeries, + maxInt, + noop, + objectValues, + parallel, + promiseWrapper, + propCompare, + series, + shuffleArray, + stringRepeat, + times, + timesLimit, + timesSeries, + totalLength, + validateFn, VIntCoding, whilst, type ArrayOrObject, type EmptyCallback, type ValueCallback +}; diff --git a/lib/writers.js b/lib/writers.ts similarity index 87% rename from lib/writers.js rename to lib/writers.ts index a3d4f2816..01ed7a21b 100644 --- a/lib/writers.js +++ b/lib/writers.ts @@ -13,22 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const events = require('events'); +import events from "events"; +import { Socket } from "net"; +import type { ClientOptions } from "./client"; +import Encoder from "./encoder"; +import { DriverError } from "./errors"; +import type OperationState from "./operation-state"; +import types, { Long } from "./types/index"; +import utils from "./utils"; + -const types = require('./types'); -const utils = require('./utils.js'); const FrameHeader = types.FrameHeader; /** * Contains the logic to write all the different types to the frame. */ class FrameWriter { + bodyLength: any; + buffers: any[]; + opcode: number; /** * Creates a new instance of FrameWriter. * @param {Number} opcode */ - constructor(opcode) { + constructor(opcode: number) { if (!opcode) { throw new Error('Opcode not provided'); } @@ -55,15 +63,15 @@ class FrameWriter { } /** @param {Long} num */ - writeLong(num) { - this.add(types.Long.toBuffer(num)); + writeLong(num: Long) { + this.add(Long["toBuffer"](num)); } /** * Writes bytes according to Cassandra * @param {Buffer|null|types.unset} bytes */ - writeBytes(bytes) { + writeBytes(bytes: Buffer | null | typeof types.unset) { if (bytes === null) { //Only the length buffer containing -1 this.writeInt(-1); @@ -74,7 +82,7 @@ class FrameWriter { return; } //Add the length buffer - this.writeInt(bytes.length); + this.writeInt((bytes as Buffer).length); //Add the actual buffer this.add(bytes); } @@ -83,7 +91,7 @@ class FrameWriter { * Writes a buffer according to Cassandra protocol: bytes.length (2) + bytes * @param {Buffer} bytes */ - writeShortBytes(bytes) { + writeShortBytes(bytes: Buffer) { if(bytes === null) { //Only the length buffer containing -1 this.writeShort(-1); @@ -99,7 +107,7 @@ class FrameWriter { * Writes a single byte * @param {Number} num Value of the byte, a number between 0 and 255. */ - writeByte(num) { + writeByte(num: number) { this.add(utils.allocBufferFromArray([num])); } @@ -160,7 +168,7 @@ class FrameWriter { * @returns {Buffer} * @throws {TypeError} */ - write(version, streamId, flags) { + write(version: number, streamId: number, flags?: number): Buffer { const header = new FrameHeader(version, flags || 0, streamId, this.opcode, this.bodyLength); const headerBuffer = header.toBuffer(); this.buffers.unshift(headerBuffer); @@ -171,15 +179,23 @@ class FrameWriter { /** * Represents a queue that process one write at a time (FIFO). * @extends {EventEmitter} + * @ignore @internal */ class WriteQueue extends events.EventEmitter { + netClient: Socket; + encoder: Encoder; + isRunning: boolean; + queue: any[]; + coalescingThreshold: any; + error: DriverError; + canWrite: boolean; /** * Creates a new WriteQueue instance. * @param {Socket} netClient * @param {Encoder} encoder * @param {ClientOptions} options */ - constructor(netClient, encoder, options) { + constructor(netClient: Socket, encoder: Encoder, options: ClientOptions) { super(); this.netClient = netClient; this.encoder = encoder; @@ -203,7 +219,7 @@ class WriteQueue extends events.EventEmitter { * @param {OperationState} operation * @param {Function} callback The write callback. */ - push(operation, callback) { + push(operation: OperationState, callback: Function) { const self = this; if (this.error) { @@ -298,7 +314,7 @@ class WriteQueue extends events.EventEmitter { this.error.innerError = err; const q = this.queue; // Not more items can be added to the queue. - this.queue = utils.emptyArray; + this.queue = utils.emptyArray as any[]; for (let i = 0; i < q.length; i++) { const item = q[i]; // Use the error marking that it was not written @@ -307,4 +323,4 @@ class WriteQueue extends events.EventEmitter { } } -module.exports = { FrameWriter, WriteQueue }; +export { FrameWriter, WriteQueue }; diff --git a/namespace-to-append.txt b/namespace-to-append.txt new file mode 100644 index 000000000..2f86fcb3a --- /dev/null +++ b/namespace-to-append.txt @@ -0,0 +1,211 @@ +export namespace auth { + export type Authenticator = InstanceType; + export type AuthProvider = InstanceType; + export type PlainTextAuthProvider = InstanceType; + export type DsePlainTextAuthProvider = InstanceType; + export type DseGssapiAuthProvider = InstanceType; +} + +type _Options = Options; + +export namespace concurrent { + export type ResultSetGroup = InstanceType; + export type executeConcurrent = typeof concurrent.executeConcurrent; + export type Options = _Options; +} + +export namespace datastax { + export namespace graph { + export type asDouble = typeof datastax.graph.asDouble; + export type asFloat = typeof datastax.graph.asFloat; + export type asInt = typeof datastax.graph.asInt; + export type asTimestamp = typeof datastax.graph.asTimestamp; + export type asUdt = typeof datastax.graph.asUdt; + export type direction = typeof datastax.graph.direction; + export type Edge = InstanceType; + export type Element = InstanceType; + export type GraphResultSet = InstanceType; + export type Path = InstanceType; + export type Property = InstanceType; + export type t = typeof datastax.graph.t; + export type Vertex = InstanceType; + export type VertexProperty = InstanceType; + } + export namespace search { + export type DateRange = InstanceType; + export type DateRangeBound = InstanceType; + export type dateRangePrecision = typeof datastax.search.dateRangePrecision; + } +} + +export namespace geometry { + export type LineString = InstanceType; + export type Point = InstanceType; + export type Polygon = InstanceType; +} + +type _MappingExecutionOptions = MappingExecutionOptions; +type _MappingOptions = MappingOptions; +type _FindDocInfo = FindDocInfo; +type _UpdateDocInfo = UpdateDocInfo; +type _RemoveDocInfo = RemoveDocInfo; +type _ModelOptions = ModelOptions; +type _ModelColumnOptions = ModelColumnOptions; +type _ModelTables = ModelTables; +type _QueryOperator = QueryOperator; +type _QueryAssignment = QueryAssignment; + +export namespace mapping { + export type TableMappings = InstanceType; + export type DefaultTableMappings = InstanceType; + export type UnderscoreCqlToCamelCaseMappings = InstanceType; + export type Result = InstanceType; + export type MappingExecutionOptions = _MappingExecutionOptions; + export type ModelTables = _ModelTables; + export type Mapper = InstanceType; + export type MappingOptions = _MappingOptions; + export type FindDocInfo = _FindDocInfo; + export type UpdateDocInfo = _UpdateDocInfo; + export type RemoveDocInfo = _RemoveDocInfo; + export type ModelOptions = _ModelOptions; + export type ModelColumnOptions = _ModelColumnOptions; + export type ModelBatchItem = InstanceType; + export type ModelBatchMapper = InstanceType; + export type ModelMapper = InstanceType; + export namespace q{ + export type QueryOperator = _QueryOperator; + export type QueryAssignment = _QueryAssignment; + export type in_ = typeof mapping.q.in_; + export type gt = typeof mapping.q.gt; + export type gte = typeof mapping.q.gte; + export type lt = typeof mapping.q.lt; + export type lte = typeof mapping.q.lte; + export type notEq = typeof mapping.q.notEq; + export type and = typeof mapping.q.and; + export type incr = typeof mapping.q.incr; + export type decr = typeof mapping.q.decr; + export type append = typeof mapping.q.append; + export type prepend = typeof mapping.q.prepend; + export type remove = typeof mapping.q.remove; + } +} + +type _IndexKind = IndexKind; +type _DataTypeInfo = DataTypeInfo; +type _ColumnInfo = ColumnInfo; +type _QueryTrace = QueryTrace; +type _Udt = Udt; +export namespace metadata { + export type Aggregate = InstanceType; + export type ClientState = InstanceType; + export type DataTypeInfo = _DataTypeInfo; + export type ColumnInfo = _ColumnInfo; + export type IndexKind = _IndexKind; + export type Index = InstanceType; + export type DataCollection = InstanceType; + export type MaterializedView = InstanceType; + export type TableMetadata = InstanceType; + export type QueryTrace = _QueryTrace; + export type SchemaFunction = InstanceType; + export type Udt = _Udt; + export type Metadata = InstanceType; +} + +export namespace metrics{ + export type ClientMetrics = InstanceType; + export type DefaultMetrics = InstanceType; +} + +type _DecisionInfo = DecisionInfo; +type _OperationInfo = OperationInfo; +export namespace policies{ + export type defaultAddressTranslator = typeof defaultAddressTranslator; + export type defaultLoadBalancingPolicy = typeof defaultLoadBalancingPolicy; + export type defaultReconnectionPolicy = typeof defaultReconnectionPolicy; + export type defaultRetryPolicy = typeof defaultRetryPolicy; + export type defaultSpeculativeExecutionPolicy = typeof defaultSpeculativeExecutionPolicy; + export type defaultTimestampGenerator = typeof defaultTimestampGenerator; + export namespace addressResolution{ + export type AddressTranslator = InstanceType; + export type EC2MultiRegionTranslator = InstanceType; + } + export namespace loadBalancing{ + export type LoadBalancingPolicy = InstanceType; + export type DCAwareRoundRobinPolicy = InstanceType; + export type TokenAwarePolicy = InstanceType; + export type AllowListPolicy = InstanceType; + export type WhiteListPolicy = InstanceType; + export type RoundRobinPolicy = InstanceType; + export type DefaultLoadBalancingPolicy = InstanceType; + } + export namespace reconnection{ + export type ReconnectionPolicy = InstanceType; + export type ConstantReconnectionPolicy = InstanceType; + export type ExponentialReconnectionPolicy = InstanceType; + } + export namespace retry{ + export type DecisionInfo = _DecisionInfo; + export type OperationInfo = _OperationInfo; + export type IdempotenceAwareRetryPolicy = InstanceType; + export type FallthroughRetryPolicy = InstanceType; + export type RetryPolicy = InstanceType; + export namespace RetryDecision{ + export type retryDecision = RetryPolicy.retryDecision; + } + export namespace speculativeExecution{ + export type ConstantSpeculativeExecutionPolicy = InstanceType; + export type NoSpeculativeExecutionPolicy = InstanceType; + export type SpeculativeExecutionPolicy = InstanceType; + } + export namespace timestampGeneration{ + export type TimestampGenerator = InstanceType; + export type MonotonicTimestampGenerator = InstanceType; + } + } +} + +export namespace tracker{ + export type RequestTracker = InstanceType; + export type RequestLogger = InstanceType; +} + +export namespace types { + export type Long = InstanceType; + export type consistencies = typeof types.consistencies; + export type dataTypes = typeof types.dataTypes; + export type distance = typeof types.distance; + export type responseErrorCodes = typeof types.responseErrorCodes; + export type protocolVersion = typeof types.protocolVersion; + export type unset = Readonly<{readonly unset: true; }>; + export type BigDecimal = InstanceType; + export type Duration = InstanceType; + export type InetAddress = InstanceType; + export type Integer = InstanceType; + export type LocalDate = InstanceType; + export type LocalTime = InstanceType; + export type ResultSet = InstanceType; + export type ResultStream = InstanceType; + export type Row = InstanceType; + export type TimeUuid = InstanceType; + export type Tuple = InstanceType; + export type Uuid = InstanceType; + export type Vector = InstanceType; +} + +export namespace errors { + export type ArgumentError = InstanceType; + export type AuthenticationError = InstanceType; + export type BusyConnectionError = InstanceType; + export type DriverError = InstanceType; + export type DriverInternalError = InstanceType; + export type NoHostAvailableError = InstanceType; + export type NotSupportedError = InstanceType; + export type OperationTimedOutError = InstanceType; + export type ResponseError = InstanceType; + export type VIntOutOfRangeException = InstanceType; +} + +export namespace token{ + export type Token = InstanceType; + export type TokenRange = InstanceType; +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index cc1b48b1b..bba9e6edf 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,18 +14,837 @@ "long": "~5.2.3" }, "devDependencies": { + "@microsoft/api-extractor": "^7.52.5", + "@rollup/plugin-typescript": "^12.1.2", + "@stylistic/eslint-plugin": "^3.1.0", + "@types/chai": "^5.2.1", + "@types/mocha": "^10.0.10", + "@types/sinon": "^17.0.4", + "@typescript-eslint/eslint-plugin": "^8.26.1", + "@typescript-eslint/parser": "^8.26.1", "chai": "~4.3.8", + "eslint": "^8.57.1", "kerberos": "~2.0.3", "mocha": "~10.2.0", - "mocha-jenkins-reporter": "~0.4.8", - "proxyquire": "~2.1.3", + "mocha-appveyor-reporter": "^0.4.2", + "mocha-jenkins-reporter": "^0.4.8", + "rollup": "^4.37.0", + "rollup-plugin-dts": "^6.2.1", "sinon": "~15.2.0", - "temp": ">= 0.8.3" + "temp": ">= 0.8.3", + "ts-mocha": "^11.1.0", + "tslib": "^2.8.1", + "typescript": "^5.8.2" }, "engines": { "node": ">=18" } }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, + "optional": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "dev": true, + "optional": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.0.tgz", + "integrity": "sha512-RoV8Xs9eNwiDvhv7M+xcL4PWyRyIXRY/FLp3buU4h1EYfdF7unWUy3dOjPqb3C7rMUewIcqwW850PgS8h1o1yg==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@microsoft/api-extractor": { + "version": "7.52.5", + "resolved": "https://registry.npmjs.org/@microsoft/api-extractor/-/api-extractor-7.52.5.tgz", + "integrity": "sha512-6WWgjjg6FkoDWpF/O3sjB05OkszpI5wtKJqd8fUIR/JJUv8IqNCGr1lJUZJnc1HegcT9gAvyf98KfH0wFncU0w==", + "dev": true, + "dependencies": { + "@microsoft/api-extractor-model": "7.30.5", + "@microsoft/tsdoc": "~0.15.1", + "@microsoft/tsdoc-config": "~0.17.1", + "@rushstack/node-core-library": "5.13.0", + "@rushstack/rig-package": "0.5.3", + "@rushstack/terminal": "0.15.2", + "@rushstack/ts-command-line": "5.0.0", + "lodash": "~4.17.15", + "minimatch": "~3.0.3", + "resolve": "~1.22.1", + "semver": "~7.5.4", + "source-map": "~0.6.1", + "typescript": "5.8.2" + }, + "bin": { + "api-extractor": "bin/api-extractor" + } + }, + "node_modules/@microsoft/api-extractor-model": { + "version": "7.30.5", + "resolved": "https://registry.npmjs.org/@microsoft/api-extractor-model/-/api-extractor-model-7.30.5.tgz", + "integrity": "sha512-0ic4rcbcDZHz833RaTZWTGu+NpNgrxVNjVaor0ZDUymfDFzjA/Uuk8hYziIUIOEOSTfmIQqyzVwlzxZxPe7tOA==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "~0.15.1", + "@microsoft/tsdoc-config": "~0.17.1", + "@rushstack/node-core-library": "5.13.0" + } + }, + "node_modules/@microsoft/api-extractor/node_modules/minimatch": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.8.tgz", + "integrity": "sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@microsoft/api-extractor/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@microsoft/tsdoc": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.1.tgz", + "integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==", + "dev": true + }, + "node_modules/@microsoft/tsdoc-config": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.1.tgz", + "integrity": "sha512-UtjIFe0C6oYgTnad4q1QP4qXwLhe6tIpNTRStJ2RZEPIkqQPREAwE5spzVxsdn9UaEMUqhh0AqSx3X4nWAKXWw==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.1", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@rollup/plugin-typescript": { + "version": "12.1.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-typescript/-/plugin-typescript-12.1.2.tgz", + "integrity": "sha512-cdtSp154H5sv637uMr1a8OTWB0L1SWDSm1rDGiyfcGcvQ6cuTs4MDk2BVEBGysUWago4OJN4EQZqOTl/QY3Jgg==", + "dev": true, + "dependencies": { + "@rollup/pluginutils": "^5.1.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.14.0||^3.0.0||^4.0.0", + "tslib": "*", + "typescript": ">=3.7.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + }, + "tslib": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.4.tgz", + "integrity": "sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ==", + "dev": true, + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.37.0.tgz", + "integrity": "sha512-l7StVw6WAa8l3vA1ov80jyetOAEo1FtHvZDbzXDO/02Sq/QVvqlHkYoFwDJPIMj0GKiistsBudfx5tGFnwYWDQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.37.0.tgz", + "integrity": "sha512-6U3SlVyMxezt8Y+/iEBcbp945uZjJwjZimu76xoG7tO1av9VO691z8PkhzQ85ith2I8R2RddEPeSfcbyPfD4hA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.37.0.tgz", + "integrity": "sha512-+iTQ5YHuGmPt10NTzEyMPbayiNTcOZDWsbxZYR1ZnmLnZxG17ivrPSWFO9j6GalY0+gV3Jtwrrs12DBscxnlYA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.37.0.tgz", + "integrity": "sha512-m8W2UbxLDcmRKVjgl5J/k4B8d7qX2EcJve3Sut7YGrQoPtCIQGPH5AMzuFvYRWZi0FVS0zEY4c8uttPfX6bwYQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.37.0.tgz", + "integrity": "sha512-FOMXGmH15OmtQWEt174v9P1JqqhlgYge/bUjIbiVD1nI1NeJ30HYT9SJlZMqdo1uQFyt9cz748F1BHghWaDnVA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.37.0.tgz", + "integrity": "sha512-SZMxNttjPKvV14Hjck5t70xS3l63sbVwl98g3FlVVx2YIDmfUIy29jQrsw06ewEYQ8lQSuY9mpAPlmgRD2iSsA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.37.0.tgz", + "integrity": "sha512-hhAALKJPidCwZcj+g+iN+38SIOkhK2a9bqtJR+EtyxrKKSt1ynCBeqrQy31z0oWU6thRZzdx53hVgEbRkuI19w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.37.0.tgz", + "integrity": "sha512-jUb/kmn/Gd8epbHKEqkRAxq5c2EwRt0DqhSGWjPFxLeFvldFdHQs/n8lQ9x85oAeVb6bHcS8irhTJX2FCOd8Ag==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.37.0.tgz", + "integrity": "sha512-oNrJxcQT9IcbcmKlkF+Yz2tmOxZgG9D9GRq+1OE6XCQwCVwxixYAa38Z8qqPzQvzt1FCfmrHX03E0pWoXm1DqA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.37.0.tgz", + "integrity": "sha512-pfxLBMls+28Ey2enpX3JvjEjaJMBX5XlPCZNGxj4kdJyHduPBXtxYeb8alo0a7bqOoWZW2uKynhHxF/MWoHaGQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loongarch64-gnu": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.37.0.tgz", + "integrity": "sha512-yCE0NnutTC/7IGUq/PUHmoeZbIwq3KRh02e9SfFh7Vmc1Z7atuJRYWhRME5fKgT8aS20mwi1RyChA23qSyRGpA==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.37.0.tgz", + "integrity": "sha512-NxcICptHk06E2Lh3a4Pu+2PEdZ6ahNHuK7o6Np9zcWkrBMuv21j10SQDJW3C9Yf/A/P7cutWoC/DptNLVsZ0VQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.37.0.tgz", + "integrity": "sha512-PpWwHMPCVpFZLTfLq7EWJWvrmEuLdGn1GMYcm5MV7PaRgwCEYJAwiN94uBuZev0/J/hFIIJCsYw4nLmXA9J7Pw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.37.0.tgz", + "integrity": "sha512-DTNwl6a3CfhGTAOYZ4KtYbdS8b+275LSLqJVJIrPa5/JuIufWWZ/QFvkxp52gpmguN95eujrM68ZG+zVxa8zHA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.37.0.tgz", + "integrity": "sha512-hZDDU5fgWvDdHFuExN1gBOhCuzo/8TMpidfOR+1cPZJflcEzXdCy1LjnklQdW8/Et9sryOPJAKAQRw8Jq7Tg+A==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.37.0.tgz", + "integrity": "sha512-pKivGpgJM5g8dwj0ywBwe/HeVAUSuVVJhUTa/URXjxvoyTT/AxsLTAbkHkDHG7qQxLoW2s3apEIl26uUe08LVQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.37.0.tgz", + "integrity": "sha512-E2lPrLKE8sQbY/2bEkVTGDEk4/49UYRVWgj90MY8yPjpnGBQ+Xi1Qnr7b7UIWw1NOggdFQFOLZ8+5CzCiz143w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.37.0.tgz", + "integrity": "sha512-Jm7biMazjNzTU4PrQtr7VS8ibeys9Pn29/1bm4ph7CP2kf21950LgN+BaE2mJ1QujnvOc6p54eWWiVvn05SOBg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.37.0.tgz", + "integrity": "sha512-e3/1SFm1OjefWICB2Ucstg2dxYDkDTZGDYgwufcbsxTHyqQps1UQf33dFEChBNmeSsTOyrjw2JJq0zbG5GF6RA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.37.0.tgz", + "integrity": "sha512-LWbXUBwn/bcLx2sSsqy7pK5o+Nr+VCoRoAohfJ5C/aBio9nfJmGQqHAhU6pwxV/RmyTk5AqdySma7uwWGlmeuA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rushstack/node-core-library": { + "version": "5.13.0", + "resolved": "https://registry.npmjs.org/@rushstack/node-core-library/-/node-core-library-5.13.0.tgz", + "integrity": "sha512-IGVhy+JgUacAdCGXKUrRhwHMTzqhWwZUI+qEPcdzsb80heOw0QPbhhoVsoiMF7Klp8eYsp7hzpScMXmOa3Uhfg==", + "dev": true, + "dependencies": { + "ajv": "~8.13.0", + "ajv-draft-04": "~1.0.0", + "ajv-formats": "~3.0.1", + "fs-extra": "~11.3.0", + "import-lazy": "~4.0.0", + "jju": "~1.4.0", + "resolve": "~1.22.1", + "semver": "~7.5.4" + }, + "peerDependencies": { + "@types/node": "*" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@rushstack/node-core-library/node_modules/ajv": { + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.13.0.tgz", + "integrity": "sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.4.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@rushstack/node-core-library/node_modules/ajv-draft-04": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz", + "integrity": "sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==", + "dev": true, + "peerDependencies": { + "ajv": "^8.5.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/@rushstack/node-core-library/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/@rushstack/node-core-library/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@rushstack/rig-package": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/@rushstack/rig-package/-/rig-package-0.5.3.tgz", + "integrity": "sha512-olzSSjYrvCNxUFZowevC3uz8gvKr3WTpHQ7BkpjtRpA3wK+T0ybep/SRUMfr195gBzJm5gaXw0ZMgjIyHqJUow==", + "dev": true, + "dependencies": { + "resolve": "~1.22.1", + "strip-json-comments": "~3.1.1" + } + }, + "node_modules/@rushstack/terminal": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/@rushstack/terminal/-/terminal-0.15.2.tgz", + "integrity": "sha512-7Hmc0ysK5077R/IkLS9hYu0QuNafm+TbZbtYVzCMbeOdMjaRboLKrhryjwZSRJGJzu+TV1ON7qZHeqf58XfLpA==", + "dev": true, + "dependencies": { + "@rushstack/node-core-library": "5.13.0", + "supports-color": "~8.1.1" + }, + "peerDependencies": { + "@types/node": "*" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@rushstack/ts-command-line": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@rushstack/ts-command-line/-/ts-command-line-5.0.0.tgz", + "integrity": "sha512-SW6nqZVxH26Rxz25+lJQRlnXI/YCrNH7NfDEWPPm9i0rwkSE6Rgtmzw96cuZgQjacOh0sw77d6V4SvgarAfr8g==", + "dev": true, + "dependencies": { + "@rushstack/terminal": "0.15.2", + "@types/argparse": "1.0.38", + "argparse": "~1.0.9", + "string-argv": "~0.3.1" + } + }, + "node_modules/@rushstack/ts-command-line/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, "node_modules/@sinonjs/commons": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz", @@ -70,6 +889,98 @@ "integrity": "sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ==", "dev": true }, + "node_modules/@stylistic/eslint-plugin": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-3.1.0.tgz", + "integrity": "sha512-pA6VOrOqk0+S8toJYhQGv2MWpQQR0QpeUo9AhNkC49Y26nxBQ/nH1rta9bUU1rPw2fJ1zZEMV5oCX5AazT7J2g==", + "dev": true, + "dependencies": { + "@typescript-eslint/utils": "^8.13.0", + "eslint-visitor-keys": "^4.2.0", + "espree": "^10.3.0", + "estraverse": "^5.3.0", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "peerDependencies": { + "eslint": ">=8.40.0" + } + }, + "node_modules/@stylistic/eslint-plugin/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "peer": true + }, + "node_modules/@types/argparse": { + "version": "1.0.38", + "resolved": "https://registry.npmjs.org/@types/argparse/-/argparse-1.0.38.tgz", + "integrity": "sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA==", + "dev": true + }, + "node_modules/@types/chai": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.1.tgz", + "integrity": "sha512-iu1JLYmGmITRzUgNiLMZD3WCoFzpYtueuyAgHTXqgwSRAMIlFTnZqG6/xenkpUGRJEzSfklUTI4GNSzks/dc0w==", + "dev": true, + "dependencies": { + "@types/deep-eql": "*" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true + }, + "node_modules/@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "dev": true + }, + "node_modules/@types/mocha": { + "version": "10.0.10", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.10.tgz", + "integrity": "sha512-xPyYSz1cMPnJQhl0CLMH68j3gprKZaTjG3s5Vi+fDgx+uhG9NOXwbVt52eFS8ECyXhyKcjDLCBEqBExKuiZb7Q==", + "dev": true + }, "node_modules/@types/node": { "version": "18.19.53", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.53.tgz", @@ -78,6 +989,257 @@ "undici-types": "~5.26.4" } }, + "node_modules/@types/sinon": { + "version": "17.0.4", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-17.0.4.tgz", + "integrity": "sha512-RHnIrhfPO3+tJT0s7cFaXGZvsL4bbR3/k7z3P312qMS4JaS2Tk+KiwiLx1S0rQ56ERj00u1/BtdyVd0FY+Pdew==", + "dev": true, + "dependencies": { + "@types/sinonjs__fake-timers": "*" + } + }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "8.1.5", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.5.tgz", + "integrity": "sha512-mQkU2jY8jJEF7YHjHvsQO8+3ughTL1mcnn96igfhONmR+fUPSKIkefQYpSe8bsly2Ep7oQbn/6VG5/9/0qcArQ==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.26.1.tgz", + "integrity": "sha512-2X3mwqsj9Bd3Ciz508ZUtoQQYpOhU/kWoUqIf49H8Z0+Vbh6UF/y0OEYp0Q0axOGzaBGs7QxRwq0knSQ8khQNA==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.26.1", + "@typescript-eslint/type-utils": "8.26.1", + "@typescript-eslint/utils": "8.26.1", + "@typescript-eslint/visitor-keys": "8.26.1", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.26.1.tgz", + "integrity": "sha512-w6HZUV4NWxqd8BdeFf81t07d7/YV9s7TCWrQQbG5uhuvGUAW+fq1usZ1Hmz9UPNLniFnD8GLSsDpjP0hm1S4lQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "8.26.1", + "@typescript-eslint/types": "8.26.1", + "@typescript-eslint/typescript-estree": "8.26.1", + "@typescript-eslint/visitor-keys": "8.26.1", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.26.1.tgz", + "integrity": "sha512-6EIvbE5cNER8sqBu6V7+KeMZIC1664d2Yjt+B9EWUXrsyWpxx4lEZrmvxgSKRC6gX+efDL/UY9OpPZ267io3mg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.26.1", + "@typescript-eslint/visitor-keys": "8.26.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.26.1.tgz", + "integrity": "sha512-Kcj/TagJLwoY/5w9JGEFV0dclQdyqw9+VMndxOJKtoFSjfZhLXhYjzsQEeyza03rwHx2vFEGvrJWJBXKleRvZg==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "8.26.1", + "@typescript-eslint/utils": "8.26.1", + "debug": "^4.3.4", + "ts-api-utils": "^2.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.26.1.tgz", + "integrity": "sha512-n4THUQW27VmQMx+3P+B0Yptl7ydfceUj4ON/AQILAASwgYdZ/2dhfymRMh5egRUrvK5lSmaOm77Ry+lmXPOgBQ==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.26.1.tgz", + "integrity": "sha512-yUwPpUHDgdrv1QJ7YQal3cMVBGWfnuCdKbXw1yyjArax3353rEJP1ZA+4F8nOlQ3RfS2hUN/wze3nlY+ZOhvoA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.26.1", + "@typescript-eslint/visitor-keys": "8.26.1", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.26.1.tgz", + "integrity": "sha512-V4Urxa/XtSUroUrnI7q6yUTD3hDtfJ2jzVfeT3VK0ciizfK2q/zGC0iDh1lFMUZR8cImRrep6/q0xd/1ZGPQpg==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.26.1", + "@typescript-eslint/types": "8.26.1", + "@typescript-eslint/typescript-estree": "8.26.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.26.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.26.1.tgz", + "integrity": "sha512-AjOC3zfnxd6S4Eiy3jwktJPclqhFHNyd8L6Gycf9WUPoKZpgM5PjkxY1X7uSy61xVpiJDhhk7XT2NVsN3ALTWg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.26.1", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true + }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "peer": true, + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/adm-zip": { "version": "0.5.10", "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.10.tgz", @@ -86,6 +1248,61 @@ "node": ">=6.0" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "dev": true, + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ajv-formats/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, "node_modules/ansi-colors": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", @@ -132,12 +1349,37 @@ "node": ">= 8" } }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "peer": true + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dev": true, + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, "node_modules/assertion-error": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", @@ -147,6 +1389,27 @@ "node": "*" } }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true + }, + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.13.2.tgz", + "integrity": "sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw==", + "dev": true + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -173,6 +1436,15 @@ } ] }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dev": true, + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, "node_modules/binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", @@ -227,12 +1499,12 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -268,6 +1540,15 @@ "ieee754": "^1.1.13" } }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/camelcase": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", @@ -280,6 +1561,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", + "dev": true + }, "node_modules/chai": { "version": "4.3.8", "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.8.tgz", @@ -397,12 +1684,63 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==", + "dev": true + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "peer": true + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -474,6 +1812,30 @@ "node": ">=4.0.0" } }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/detect-libc": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.2.tgz", @@ -492,6 +1854,28 @@ "node": ">=0.3.1" } }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "dev": true, + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -508,9 +1892,9 @@ } }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, "engines": { "node": ">=6" @@ -528,6 +1912,208 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", + "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "dev": true, + "dependencies": { + "acorn": "^8.14.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "dev": true + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -537,29 +2123,102 @@ "node": ">=6" } }, - "node_modules/file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", "dev": true }, - "node_modules/fill-keys": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/fill-keys/-/fill-keys-1.0.2.tgz", - "integrity": "sha512-tcgI872xXjwFF4xgQmLxi76GnwJG3g/3isB1l4/G5Z4zrbddGpBjqZCO9oEAcB5wX0Hj/5iQB3toxfO7in1hHA==", + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", + "dev": true, + "engines": [ + "node >=0.6.0" + ] + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", "dev": true, "dependencies": { - "is-object": "~1.0.1", - "merge-descriptors": "~1.0.0" + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.6.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fast-uri": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", + "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" } }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "dev": true + }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" @@ -593,12 +2252,85 @@ "flat": "cli.js" } }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flat-cache/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true + }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", "dev": true }, + "node_modules/fs-extra": { + "version": "11.3.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", + "integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -643,6 +2375,15 @@ "node": "*" } }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -693,6 +2434,56 @@ "node": "*" } }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "deprecated": "this library is no longer supported", + "dev": true, + "dependencies": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", @@ -723,6 +2514,21 @@ "he": "bin/he" } }, + "node_modules/http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + } + }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -743,6 +2549,49 @@ } ] }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-lazy": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-4.0.0.tgz", + "integrity": "sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -828,13 +2677,13 @@ "node": ">=0.12.0" } }, - "node_modules/is-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz", - "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==", + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">=8" } }, "node_modules/is-plain-obj": { @@ -846,6 +2695,12 @@ "node": ">=8" } }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", + "dev": true + }, "node_modules/is-unicode-supported": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", @@ -864,6 +2719,31 @@ "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", "dev": true }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", + "dev": true + }, + "node_modules/jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "optional": true + }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -876,6 +2756,69 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dev": true, + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsprim": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "dev": true, + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + }, + "engines": { + "node": ">=0.6.0" + } + }, "node_modules/just-extend": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz", @@ -897,6 +2840,28 @@ "node": ">=12.9.0" } }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -912,12 +2877,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, "node_modules/lodash.get": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", "dev": true }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, "node_modules/log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", @@ -948,11 +2925,76 @@ "get-func-name": "^2.0.0" } }, - "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==", - "dev": true + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/magic-string": { + "version": "0.30.17", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "peer": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } }, "node_modules/mimic-response": { "version": "3.1.0", @@ -1054,6 +3096,15 @@ "url": "https://opencollective.com/mochajs" } }, + "node_modules/mocha-appveyor-reporter": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/mocha-appveyor-reporter/-/mocha-appveyor-reporter-0.4.2.tgz", + "integrity": "sha512-toYTeM5GI4DPghD0Fh17wCDEXvrUZLB5zUkBUORUxxAf/XxJPZmyMVw0Xaue3gFjdTE4eR4IOZO1wloR2Cfniw==", + "dev": true, + "dependencies": { + "request-json": "^0.6.4" + } + }, "node_modules/mocha-jenkins-reporter": { "version": "0.4.8", "resolved": "https://registry.npmjs.org/mocha-jenkins-reporter/-/mocha-jenkins-reporter-0.4.8.tgz", @@ -1089,12 +3140,6 @@ "node": ">=10" } }, - "node_modules/module-not-found-error": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/module-not-found-error/-/module-not-found-error-1.0.1.tgz", - "integrity": "sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g==", - "dev": true - }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -1119,6 +3164,12 @@ "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", "dev": true }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, "node_modules/nise": { "version": "5.1.4", "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.4.tgz", @@ -1153,39 +3204,6 @@ "node": ">=10" } }, - "node_modules/node-abi/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-abi/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-abi/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/node-addon-api": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz", @@ -1201,6 +3219,15 @@ "node": ">=0.10.0" } }, + "node_modules/oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -1210,6 +3237,23 @@ "wrappy": "1" } }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -1240,6 +3284,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -1258,6 +3314,15 @@ "node": ">=0.10.0" } }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", @@ -1282,6 +3347,19 @@ "node": "*" } }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "optional": true + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -1320,15 +3398,25 @@ "node": ">=10" } }, - "node_modules/proxyquire": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/proxyquire/-/proxyquire-2.1.3.tgz", - "integrity": "sha512-BQWfCqYM+QINd+yawJz23tbBM40VIGXOdDw3X344KcclI/gtBbdWF6SlQ4nK/bYhF9d27KYug9WzljHC6B9Ysg==", + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/psl": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz", + "integrity": "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==", "dev": true, "dependencies": { - "fill-keys": "^1.0.2", - "module-not-found-error": "^1.0.1", - "resolve": "^1.11.1" + "punycode": "^2.3.1" + }, + "funding": { + "url": "https://github.com/sponsors/lupomontero" } }, "node_modules/pump": { @@ -1341,6 +3429,44 @@ "once": "^1.3.1" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", + "dev": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -1386,6 +3512,51 @@ "node": ">=8.10.0" } }, + "node_modules/request": { + "version": "2.88.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", + "dev": true, + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/request-json": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/request-json/-/request-json-0.6.5.tgz", + "integrity": "sha512-bpJ0MZPeb3+/8ux/jM+CLRghTOQ8Oh2VuqtnrPu9ZnSIjr/77sOj/rSWfK9cPRpp3U0UWAIv7rsRvSlyRwbmsw==", + "dev": true, + "dependencies": { + "depd": "1.1.2", + "request": "2.88.0" + }, + "engines": { + "node": "*" + } + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -1395,6 +3566,15 @@ "node": ">=0.10.0" } }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/resolve": { "version": "1.22.4", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.4.tgz", @@ -1412,6 +3592,25 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, "node_modules/rimraf": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", @@ -1424,6 +3623,90 @@ "rimraf": "bin.js" } }, + "node_modules/rollup": { + "version": "4.37.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.37.0.tgz", + "integrity": "sha512-iAtQy/L4QFU+rTJ1YUjXqJOJzuwEghqWzCEYD2FEghT7Gsy1VdABntrO4CLopA5IkflTyqNiLNwPcOJ3S7UKLg==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.6" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.37.0", + "@rollup/rollup-android-arm64": "4.37.0", + "@rollup/rollup-darwin-arm64": "4.37.0", + "@rollup/rollup-darwin-x64": "4.37.0", + "@rollup/rollup-freebsd-arm64": "4.37.0", + "@rollup/rollup-freebsd-x64": "4.37.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.37.0", + "@rollup/rollup-linux-arm-musleabihf": "4.37.0", + "@rollup/rollup-linux-arm64-gnu": "4.37.0", + "@rollup/rollup-linux-arm64-musl": "4.37.0", + "@rollup/rollup-linux-loongarch64-gnu": "4.37.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.37.0", + "@rollup/rollup-linux-riscv64-gnu": "4.37.0", + "@rollup/rollup-linux-riscv64-musl": "4.37.0", + "@rollup/rollup-linux-s390x-gnu": "4.37.0", + "@rollup/rollup-linux-x64-gnu": "4.37.0", + "@rollup/rollup-linux-x64-musl": "4.37.0", + "@rollup/rollup-win32-arm64-msvc": "4.37.0", + "@rollup/rollup-win32-ia32-msvc": "4.37.0", + "@rollup/rollup-win32-x64-msvc": "4.37.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/rollup-plugin-dts": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/rollup-plugin-dts/-/rollup-plugin-dts-6.2.1.tgz", + "integrity": "sha512-sR3CxYUl7i2CHa0O7bA45mCrgADyAQ0tVtGSqi3yvH28M+eg1+g5d7kQ9hLvEz5dorK3XVsH5L2jwHLQf72DzA==", + "dev": true, + "dependencies": { + "magic-string": "^0.30.17" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/Swatinem" + }, + "optionalDependencies": { + "@babel/code-frame": "^7.26.2" + }, + "peerDependencies": { + "rollup": "^3.29.4 || ^4", + "typescript": "^4.5 || ^5.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -1444,6 +3727,24 @@ } ] }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/serialize-javascript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", @@ -1453,6 +3754,27 @@ "randombytes": "^2.1.0" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -1537,6 +3859,46 @@ "node": ">=8" } }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/sshpk": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.18.0.tgz", + "integrity": "sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ==", + "dev": true, + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", @@ -1552,6 +3914,15 @@ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", "dev": true }, + "node_modules/string-argv": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz", + "integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==", + "dev": true, + "engines": { + "node": ">=0.6.19" + } + }, "node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -1672,6 +4043,12 @@ "node": ">=6.0.0" } }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -1684,6 +4061,119 @@ "node": ">=8.0" } }, + "node_modules/tough-cookie": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "dev": true, + "dependencies": { + "psl": "^1.1.24", + "punycode": "^1.4.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tough-cookie/node_modules/punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", + "dev": true + }, + "node_modules/ts-api-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.0.1.tgz", + "integrity": "sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w==", + "dev": true, + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/ts-mocha": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/ts-mocha/-/ts-mocha-11.1.0.tgz", + "integrity": "sha512-yT7FfzNRCu8ZKkYvAOiH01xNma/vLq6Vit7yINKYFNVP8e5UyrYXSOMIipERTpzVKJQ4Qcos5bQo1tNERNZevQ==", + "dev": true, + "bin": { + "ts-mocha": "bin/ts-mocha" + }, + "engines": { + "node": ">= 6.X.X" + }, + "peerDependencies": { + "mocha": "^3.X.X || ^4.X.X || ^5.X.X || ^6.X.X || ^7.X.X || ^8.X.X || ^9.X.X || ^10.X.X || ^11.X.X", + "ts-node": "^7.X.X || ^8.X.X || ^9.X.X || ^10.X.X", + "tsconfig-paths": "^4.X.X" + }, + "peerDependenciesMeta": { + "tsconfig-paths": { + "optional": true + } + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "peer": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true + }, "node_modules/tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -1696,6 +4186,24 @@ "node": "*" } }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", @@ -1705,17 +4213,115 @@ "node": ">=4" } }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", + "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "dev": true }, + "node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "dev": true, + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "peer": true + }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/workerpool": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", @@ -1760,6 +4366,12 @@ "node": ">=10" } }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", @@ -1802,6 +4414,16 @@ "node": ">=10" } }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/package.json b/package.json index c1c72aa30..7c5c0f2e2 100644 --- a/package.json +++ b/package.json @@ -18,20 +18,35 @@ "graphdb" ], "license": "Apache-2.0", - "types": "./index.d.ts", + "types": "dist/cassandra-driver-public.d.ts", + "main": "out/index.js", "dependencies": { "@types/node": "^18.11.18", "adm-zip": "~0.5.10", "long": "~5.2.3" }, "devDependencies": { + "@microsoft/api-extractor": "^7.52.5", + "@rollup/plugin-typescript": "^12.1.2", + "@stylistic/eslint-plugin": "^3.1.0", + "@types/chai": "^5.2.1", + "@types/mocha": "^10.0.10", + "@types/sinon": "^17.0.4", + "@typescript-eslint/eslint-plugin": "^8.26.1", + "@typescript-eslint/parser": "^8.26.1", "chai": "~4.3.8", + "eslint": "^8.57.1", "kerberos": "~2.0.3", "mocha": "~10.2.0", - "mocha-jenkins-reporter": "~0.4.8", - "proxyquire": "~2.1.3", + "mocha-appveyor-reporter": "^0.4.2", + "mocha-jenkins-reporter": "^0.4.8", + "rollup": "^4.37.0", + "rollup-plugin-dts": "^6.2.1", "sinon": "~15.2.0", - "temp": ">= 0.8.3" + "temp": ">= 0.8.3", + "ts-mocha": "^11.1.0", + "tslib": "^2.8.1", + "typescript": "^5.8.2" }, "repository": { "type": "git", @@ -41,15 +56,18 @@ "url": "https://groups.google.com/a/lists.datastax.com/forum/#!forum/nodejs-driver-user" }, "scripts": { - "test": "./node_modules/.bin/mocha test/unit -R spec -t 5000 --recursive", - "unit": "./node_modules/.bin/mocha test/unit -R spec -t 5000 --recursive", - "integration_short": "./node_modules/.bin/mocha test/integration/short -R spec -t 5000 --recursive", - "integration_long": "./node_modules/.bin/mocha test/integration/long -R spec -t 5000 --recursive", - "ci_jenkins": "./node_modules/.bin/mocha test/unit test/integration/short --recursive -R mocha-jenkins-reporter --exit", - "ci_appveyor": ".\\\"node_modules/.bin/mocha\" test/unit test/integration/short --recursive -R mocha-appveyor-reporter --exit", - "ci_unit_appveyor": ".\\\"node_modules/.bin/mocha\" test/unit --recursive -R mocha-appveyor-reporter --exit", - "server_api": "./node_modules/.bin/mocha test/integration/short -g '@SERVER_API' --recursive --exit", - "eslint": "eslint lib test" + "bundle": "npx tsc -p . && npx rollup --config && npx api-extractor run --local && cat namespace-to-append.txt >> dist/cassandra-driver-public.d.ts", + "test": "npx ts-mocha test/unit --extension .ts -R spec -t 5000 --recursive --exit", + "unit": "npx ts-mocha test/unit --extension .ts -R spec -t 5000 --recursive --exit", + "integration_short": "npx ts-mocha test/integration/short --extension .ts -R spec -t 5000 --recursive --exit", + "integration_long": "npx ts-mocha test/integration/long --extension .ts -R spec -t 5000 --recursive", + "ci_jenkins": "npx ts-mocha test/unit test/integration/short --extension .ts --recursive -R ./node_modules/mocha-jenkins-reporter --exit", + "ci_appveyor": "npx ts-mocha test/unit test/integration/short --extension .ts --recursive -R ./node_modules/mocha-appveyor-reporter --exit", + "ci_unit_appveyor": "npx ts-mocha test/unit --extension .ts --recursive -R ./node_modules/mocha-appveyor-reporter --exit", + "server_api": "npx ts-mocha test/integration/short --extension .ts -g '@SERVER_API' --recursive --exit", + "eslint": "npx eslint lib test", + "eslint-fix": "npx eslint lib test --fix", + "circular-dep": "npx madge -c lib" }, "engines": { "node": ">=18" diff --git a/rollup.config.mjs b/rollup.config.mjs new file mode 100644 index 000000000..bafc6368e --- /dev/null +++ b/rollup.config.mjs @@ -0,0 +1,13 @@ +// Contents of the file /rollup.config.js +import dts from "rollup-plugin-dts"; +const config = [{ + input: 'out/index.d.ts', + output: { + file: 'out/cassandra-rollup.d.ts', + format: 'es' + }, + external: ['events', 'stream', 'util', 'tls', 'net'], + plugins: [dts()] + } +]; +export default config; \ No newline at end of file diff --git a/test/integration/long/client-metadata-tests.js b/test/integration/long/client-metadata-tests.ts similarity index 97% rename from test/integration/long/client-metadata-tests.js rename to test/integration/long/client-metadata-tests.ts index 6833f9134..af2cadf72 100644 --- a/test/integration/long/client-metadata-tests.js +++ b/test/integration/long/client-metadata-tests.ts @@ -13,15 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Encoder from "../../../lib/encoder"; +import Client from "../../../lib/client"; +import utils from "../../../lib/utils"; +import tokenizer from "../../../lib/tokenizer"; -const helper = require('../../test-helper'); -const Encoder = require('../../../lib/encoder'); -const Client = require('../../../lib/client'); -const utils = require('../../../lib/utils'); -const tokenizer = require('../../../lib/tokenizer'); describe('Client', function () { this.timeout(240000); diff --git a/test/integration/long/event-tests.js b/test/integration/long/event-tests.ts similarity index 95% rename from test/integration/long/event-tests.js rename to test/integration/long/event-tests.ts index b3cfc3d6d..61b4a67e2 100644 --- a/test/integration/long/event-tests.js +++ b/test/integration/long/event-tests.ts @@ -13,14 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../../test-helper'); -const Host = require('../../../lib/host').Host; -const utils = require('../../../lib/utils'); -const Client = require('../../../lib/client'); +import assert from "assert"; +import helper from "../../test-helper"; +import utils from "../../../lib/utils"; +import Client from "../../../lib/client"; +import { Host } from "../../../lib/host"; describe('Client', function () { describe('events', function () { diff --git a/test/integration/long/load-balancing-tests.js b/test/integration/long/load-balancing-tests.ts similarity index 96% rename from test/integration/long/load-balancing-tests.js rename to test/integration/long/load-balancing-tests.ts index 2755f9e01..12c9c9d1a 100644 --- a/test/integration/long/load-balancing-tests.js +++ b/test/integration/long/load-balancing-tests.ts @@ -13,14 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import utils from "../../../lib/utils"; +import loadBalancing from "../../../lib/policies/load-balancing"; + -const helper = require('../../test-helper.js'); -const Client = require('../../../lib/client.js'); -const utils = require('../../../lib/utils.js'); -const loadBalancing = require('../../../lib/policies/load-balancing.js'); const DCAwareRoundRobinPolicy = loadBalancing.DCAwareRoundRobinPolicy; const TokenAwarePolicy = loadBalancing.TokenAwarePolicy; diff --git a/test/integration/short/auth/dse-gssapi-auth-provider-tests.js b/test/integration/short/auth/dse-gssapi-auth-provider-tests.ts similarity index 91% rename from test/integration/short/auth/dse-gssapi-auth-provider-tests.js rename to test/integration/short/auth/dse-gssapi-auth-provider-tests.ts index 327d1d11e..42610859b 100644 --- a/test/integration/short/auth/dse-gssapi-auth-provider-tests.js +++ b/test/integration/short/auth/dse-gssapi-auth-provider-tests.ts @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../../../test-helper'); -const DseGssapiAuthProvider = require('../../../../lib/auth/dse-gssapi-auth-provider'); -const Client = require('../../../../lib/client'); -const errors = require('../../../../lib/errors'); + +import DseGssapiAuthProvider from '../../../../lib/auth/dse-gssapi-auth-provider'; +import assert from "assert"; +import helper from "../../../test-helper"; +import Client from "../../../../lib/client"; +import errors from "../../../../lib/errors"; const ads = helper.ads; const cDescribe = helper.conditionalDescribe( helper.requireOptional('kerberos') && helper.isDseGreaterThan('5.0'), 'kerberos and DSE required to run'); diff --git a/test/integration/short/auth/dse-plain-text-auth-provider-tests.js b/test/integration/short/auth/dse-plain-text-auth-provider-tests.ts similarity index 92% rename from test/integration/short/auth/dse-plain-text-auth-provider-tests.js rename to test/integration/short/auth/dse-plain-text-auth-provider-tests.ts index 202ffda9a..95f187878 100644 --- a/test/integration/short/auth/dse-plain-text-auth-provider-tests.js +++ b/test/integration/short/auth/dse-plain-text-auth-provider-tests.ts @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const { assert } = require('chai'); -const helper = require('../../../test-helper'); -const DsePlainTextAuthProvider = require('../../../../lib/auth/dse-plain-text-auth-provider'); -const Client = require('../../../../lib/client'); + +import DsePlainTextAuthProvider from '../../../../lib/auth/dse-plain-text-auth-provider'; +import { assert } from "chai"; +import helper from "../../../test-helper"; +import Client from "../../../../lib/client"; const vdescribe = helper.vdescribe; vdescribe('dse-5.0', 'DsePlainTextAuthProvider @SERVER_API', function () { diff --git a/test/integration/short/auth/proxy-authentication-test.js b/test/integration/short/auth/proxy-authentication-test.ts similarity index 94% rename from test/integration/short/auth/proxy-authentication-test.js rename to test/integration/short/auth/proxy-authentication-test.ts index cd6c9ba37..f5b31d87b 100644 --- a/test/integration/short/auth/proxy-authentication-test.js +++ b/test/integration/short/auth/proxy-authentication-test.ts @@ -13,15 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../../../test-helper'); -const DsePlainTextAuthProvider = require('../../../../lib/auth/dse-plain-text-auth-provider'); -const DseGssapiAuthProvider = require('../../../../lib/auth/dse-gssapi-auth-provider'); -const Client = require('../../../../lib/client'); -const utils = require('../../../../lib/utils'); -const errors = require('../../../../lib/errors'); -const types = require('../../../../lib/types'); + +import DsePlainTextAuthProvider from '../../../../lib/auth/dse-plain-text-auth-provider'; +import DseGssapiAuthProvider from '../../../../lib/auth/dse-gssapi-auth-provider'; +import assert from "assert"; +import helper from "../../../test-helper"; +import Client from "../../../../lib/client"; +import utils from "../../../../lib/utils"; +import errors from "../../../../lib/errors"; +import types from "../../../../lib/types/index"; const vdescribe = helper.vdescribe; const ads = helper.ads; const cDescribe = helper.conditionalDescribe(helper.requireOptional('kerberos'), 'kerberos required to run'); diff --git a/test/integration/short/client-batch-tests.js b/test/integration/short/client-batch-tests.ts similarity index 98% rename from test/integration/short/client-batch-tests.js rename to test/integration/short/client-batch-tests.ts index 46564d951..c2caeb6de 100644 --- a/test/integration/short/client-batch-tests.js +++ b/test/integration/short/client-batch-tests.ts @@ -13,15 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; + -const helper = require('../../test-helper.js'); -const Client = require('../../../lib/client.js'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils.js'); -const errors = require('../../../lib/errors.js'); const vit = helper.vit; describe('Client @SERVER_API', function () { @@ -565,7 +565,7 @@ describe('Client @SERVER_API', function () { ]; return client.batch(queries, {prepare: true, keyspace: keyspace}) - .then((result) => + .then((_result) => client.execute(util.format(selectQuery, table1Short), [id], {prepare: true, keyspace: keyspace}) ) .then((result) => { @@ -591,7 +591,7 @@ describe('Client @SERVER_API', function () { {query: util.format(insertQuery, table1Short), params: [id, ts2, 2000]} ]; return client.batch(queries, {prepare: true, keyspace: keyspace}) - .then((result) => { + .then((_result) => { throw new Error('should have failed'); }) .catch(function (err) { diff --git a/test/integration/short/client-each-row-tests.js b/test/integration/short/client-each-row-tests.ts similarity index 98% rename from test/integration/short/client-each-row-tests.js rename to test/integration/short/client-each-row-tests.ts index af1240ad5..1c652eb04 100644 --- a/test/integration/short/client-each-row-tests.js +++ b/test/integration/short/client-each-row-tests.ts @@ -13,17 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import util from "util"; +import sinon from "sinon"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; +import { assert } from "chai"; -"use strict"; -const assert = require('chai').assert; -const util = require('util'); -const sinon = require('sinon'); -const helper = require('../../test-helper.js'); -const Client = require('../../../lib/client.js'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils.js'); -const errors = require('../../../lib/errors.js'); const vit = helper.vit; describe('Client', function () { @@ -584,7 +583,7 @@ describe('Client', function () { assert.ok(result); if (result.nextPage) { // make schema change. - client.execute(util.format('alter table %s add b int', table), (sErr, sResult) => { + client.execute(util.format('alter table %s add b int', table), (sErr, _sResult) => { assert.ifError(sErr); schemaChangeMade = true; // retrieve next page after schema change is made. diff --git a/test/integration/short/client-execute-prepared-tests.js b/test/integration/short/client-execute-prepared-tests.ts similarity index 99% rename from test/integration/short/client-execute-prepared-tests.js rename to test/integration/short/client-execute-prepared-tests.ts index 864eaee21..e0b10cf60 100644 --- a/test/integration/short/client-execute-prepared-tests.js +++ b/test/integration/short/client-execute-prepared-tests.ts @@ -13,23 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; +import loadBalancing from "../../../lib/policies/load-balancing"; +import numericTests from "./numeric-tests"; +import pagingTests from "./paging-tests"; + -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils'); -const errors = require('../../../lib/errors'); -const loadBalancing = require('../../../lib/policies/load-balancing'); const vit = helper.vit; const vdescribe = helper.vdescribe; const Uuid = types.Uuid; const commonKs = helper.getRandomName('ks'); -const numericTests = require('./numeric-tests'); -const pagingTests = require('./paging-tests'); - describe('Client @SERVER_API', function () { this.timeout(120000); describe('#execute(query, params, {prepare: 1}, callback)', function () { @@ -1176,7 +1175,7 @@ describe('Client @SERVER_API', function () { } const client = setupInfo.client; return client.execute('select * from local', {prepare: true}, {keyspace: 'system'}) - .then((result) => { + .then((_result) => { throw new Error('should have failed'); }) .catch(function (err) { diff --git a/test/integration/short/client-execute-simulator-tests.js b/test/integration/short/client-execute-simulator-tests.ts similarity index 93% rename from test/integration/short/client-execute-simulator-tests.js rename to test/integration/short/client-execute-simulator-tests.ts index b79a6c299..26cd47b52 100644 --- a/test/integration/short/client-execute-simulator-tests.js +++ b/test/integration/short/client-execute-simulator-tests.ts @@ -13,19 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import simulacron from "../simulacron"; +import helper from "../../test-helper"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; +import { responseErrorCodes } from "../../../lib/types/index"; +import Client from "../../../lib/client"; +import {AllowListPolicy, DCAwareRoundRobinPolicy} from "../../../lib/policies/load-balancing"; -'use strict'; - -const { assert } = require('chai'); - -const simulacron = require('../simulacron'); -const helper = require('../../test-helper'); -const utils = require('../../../lib/utils'); -const errors = require('../../../lib/errors'); - -const { responseErrorCodes } = require('../../../lib/types'); -const Client = require('../../../lib/client'); -const { AllowListPolicy, DCAwareRoundRobinPolicy } = require('../../../lib/policies').loadBalancing; const query = "select * from data"; const clusterSize = 3; @@ -103,7 +99,7 @@ describe('Client', function() { consistency_level: 'LOCAL_ONE' } }, () => { - client.execute(query, [], { host: host }, (err, result) => { + client.execute(query, [], { host: host }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); assert.strictEqual(Object.keys(err.innerErrors).length, 1); diff --git a/test/integration/short/client-execute-tests.js b/test/integration/short/client-execute-tests.ts similarity index 98% rename from test/integration/short/client-execute-tests.js rename to test/integration/short/client-execute-tests.ts index 30575693f..8d85a1ab1 100644 --- a/test/integration/short/client-execute-tests.js +++ b/test/integration/short/client-execute-tests.ts @@ -13,21 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; +import numericTests from "./numeric-tests"; +import pagingTests from "./paging-tests"; +import {ExecutionProfile} from "../../../lib/execution-profile"; + -const helper = require('../../test-helper.js'); -const Client = require('../../../lib/client.js'); -const ExecutionProfile = require('../../../lib/execution-profile.js').ExecutionProfile; -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils.js'); -const errors = require('../../../lib/errors.js'); const vit = helper.vit; const vdescribe = helper.vdescribe; -const numericTests = require('./numeric-tests'); -const pagingTests = require('./paging-tests'); - describe('Client @SERVER_API', function () { this.timeout(120000); describe('#execute(query, params, {prepare: 0}, callback)', function () { @@ -1032,7 +1031,7 @@ describe('Client @SERVER_API', function () { ip: types.InetAddress.fromString('12.10.126.11'), tup: new types.Tuple(1, 300), d: new types.LocalDate(2015, 6, 1), - t: new types.LocalTime.fromMilliseconds(10160088, 123) + t: types.LocalTime.fromMilliseconds(10160088, 123) }; utils.series([ function insert(next) { @@ -1141,7 +1140,7 @@ describe('Client @SERVER_API', function () { } const client = setupInfo.client; return client.execute('select * from local', null, {keyspace: 'system'}) - .then((result) => { + .then((_result) => { throw new Error('should have failed'); }) .catch(function (err) { diff --git a/test/integration/short/client-pool-tests.js b/test/integration/short/client-pool-tests.ts similarity index 96% rename from test/integration/short/client-pool-tests.js rename to test/integration/short/client-pool-tests.ts index 212d2f521..06310eca9 100644 --- a/test/integration/short/client-pool-tests.js +++ b/test/integration/short/client-pool-tests.ts @@ -13,22 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const { assert } = require('chai'); -const dns = require('dns'); -const util = require('util'); - -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const clientOptions = require('../../../lib/client-options'); -const utils = require('../../../lib/utils'); -const errors = require('../../../lib/errors'); -const types = require('../../../lib/types'); -const policies = require('../../../lib/policies'); -const RoundRobinPolicy = require('../../../lib/policies/load-balancing.js').RoundRobinPolicy; -const Murmur3Tokenizer = require('../../../lib/tokenizer.js').Murmur3Tokenizer; -const { PlainTextAuthProvider } = require('../../../lib/auth'); + +import { RoundRobinPolicy } from '../../../lib/policies/load-balancing'; +import { Murmur3Tokenizer } from '../../../lib/tokenizer'; +import { PlainTextAuthProvider } from '../../../lib/auth/index'; +import { assert } from "chai"; +import dns from "dns"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import clientOptions from "../../../lib/client-options"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; +import types from "../../../lib/types/index"; +import policies from "../../../lib/policies/index"; + const ConstantSpeculativeExecutionPolicy = policies.speculativeExecution.ConstantSpeculativeExecutionPolicy; const OrderedLoadBalancingPolicy = helper.OrderedLoadBalancingPolicy; const vit = helper.vit; @@ -1075,13 +1074,34 @@ describe('Client', function () { }); }); }); + //TODO: figure out why this test is failing. The driver before migration could pass this test. But then I messed around and even the version before migration will fail on this test. + // describe('customize test for spex', function(){ + // helper.setup(3, { initClient: false }); + // it('customize', async function(){ + // let policy = new ConstantSpeculativeExecutionPolicy(100, 1); + // helper.ccmHelper.pauseNode(1); + // const client = newInstance({ + // pooling: { warmup: true }, + // policies: { + // speculativeExecution: policy, + // loadBalancing: new OrderedLoadBalancingPolicy(), + // retry: new helper.FallthroughRetryPolicy() + // }, + // socketOptions: { + // readTimeout: 5000 + // } + // }); + // const result = await client.execute('SELECT * FROM system.local', null, { prepare: true, isIdempotent: true }); + // assert.strictEqual(result.info.queriedHost, client.hosts.keys()[1]); + // }) + // }) }); /** * @param {ClientOptions} [options] * @returns {Client} */ -function newInstance(options) { +function newInstance(options?) { const client = new Client(utils.deepExtend({}, helper.baseOptions, options)); helper.shutdownAfterThisTest(client); return client; diff --git a/test/integration/short/client-stream-tests.js b/test/integration/short/client-stream-tests.ts similarity index 97% rename from test/integration/short/client-stream-tests.js rename to test/integration/short/client-stream-tests.ts index 0ed8ab039..e81a87dd2 100644 --- a/test/integration/short/client-stream-tests.js +++ b/test/integration/short/client-stream-tests.ts @@ -13,17 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; -const helper = require('../../test-helper.js'); -const vit = helper.vit; -const Client = require('../../../lib/client.js'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils.js'); -const errors = require('../../../lib/errors.js'); +const vit = helper.vit; describe('Client', function () { this.timeout(120000); describe('#stream(query, params, {prepare: 0})', function () { diff --git a/test/integration/short/cloud/cloud-helper.js b/test/integration/short/cloud/cloud-helper.ts similarity index 93% rename from test/integration/short/cloud/cloud-helper.js rename to test/integration/short/cloud/cloud-helper.ts index 11eaf5f73..f3333cac4 100644 --- a/test/integration/short/cloud/cloud-helper.js +++ b/test/integration/short/cloud/cloud-helper.ts @@ -14,14 +14,11 @@ * limitations under the License. */ -'use strict'; - -const format = require('util').format; -const path = require('path'); -const exec = require('child_process').exec; - -const Client = require('../../../../lib/client'); -const helper = require('../../../test-helper'); +import path from "path"; +import Client from "../../../../lib/client"; +import helper from "../../../test-helper"; +import { format } from "util"; +import { exec } from "child_process"; const ccmCmdString = 'docker exec $(docker ps -a -q --filter ancestor=single_endpoint) ccm %s'; @@ -127,3 +124,4 @@ const cloudHelper = module.exports = { } }; +export default cloudHelper; \ No newline at end of file diff --git a/test/integration/short/cloud/cloud-tests.js b/test/integration/short/cloud/cloud-tests.ts similarity index 90% rename from test/integration/short/cloud/cloud-tests.js rename to test/integration/short/cloud/cloud-tests.ts index c0ab629c3..8a6fbf901 100644 --- a/test/integration/short/cloud/cloud-tests.js +++ b/test/integration/short/cloud/cloud-tests.ts @@ -13,20 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const { assert } = require('chai'); -const sinon = require('sinon'); -const proxyquire = require('proxyquire'); - -const cloudHelper = require('./cloud-helper'); -const helper = require('../../../test-helper'); -const policies = require('../../../../lib/policies'); -const errors = require('../../../../lib/errors'); -const auth = require('../../../../lib/auth'); -const utils = require('../../../../lib/utils'); -const types = require('../../../../lib/types'); -const promiseUtils = require('../../../../lib/promise-utils'); +import { assert } from "chai"; +import sinon from "sinon"; +import cloudHelper from "./cloud-helper"; +import helper from "../../../test-helper"; +import policies from "../../../../lib/policies/index"; +import errors from "../../../../lib/errors"; +import auth from "../../../../lib/auth/index"; +import utils from "../../../../lib/utils"; +import types from "../../../../lib/types/index"; +import promiseUtils from "../../../../lib/promise-utils"; +import Client from "../../../../lib/client"; + + const vdescribe = helper.vdescribe; const port = 9042; @@ -64,7 +63,8 @@ vdescribe('dse-6.7', 'Cloud support', function () { assert.strictEqual(client.options.queryOptions.consistency, types.consistencies.localQuorum); // It should use the proxy address to validate the server identity - assert.match(client.options.sslOptions.checkServerIdentity.toString(), /sni\.address/); + // It was 'sni.address' in the past, but now it's '["sni"].address' + assert.match(client.options.sslOptions.checkServerIdentity.toString(), /\["sni"\]\.address/); client.hosts.forEach(h => { assert.ok(h.isUp()); @@ -77,23 +77,20 @@ vdescribe('dse-6.7', 'Cloud support', function () { it('should use all the proxy resolved addresses', () => { let resolvedAddress; - const libPath = '../../../../lib'; - - const ControlConnection = proxyquire(`${libPath}/control-connection`, { - 'dns': { - resolve4: (name, cb) => { - resolvedAddress = name; - // Use different loopback addresses - cb(null, ['127.0.0.1', '127.0.0.2']); - } - } - }); - const Client = proxyquire(`${libPath}/client`, { './control-connection': ControlConnection }); + // eslint-disable-next-line global-require + const dnsStub = sinon.stub(require('dns'), 'resolve4').callsFake((name, cb) => { + resolvedAddress = name; + // Use different loopback addresses + (cb as Function)(null, ['127.0.0.1', '127.0.0.2']); + }); const client = new Client(cloudHelper.getOptions({ cloud: { secureConnectBundle: 'certs/bundles/creds-v1.zip' } })); - after(() => client.shutdown()); + after(() => { + client.shutdown(); + dnsStub.restore(); + }); return client.connect() .then(() => { @@ -148,7 +145,7 @@ vdescribe('dse-6.7', 'Cloud support', function () { try { await client.connect(); - } catch (err) { + } catch (_err) { // Ignore auth error } diff --git a/test/integration/short/concurrent/execute-concurrent-tests.js b/test/integration/short/concurrent/execute-concurrent-tests.ts similarity index 96% rename from test/integration/short/concurrent/execute-concurrent-tests.js rename to test/integration/short/concurrent/execute-concurrent-tests.ts index 312bf06fe..27aa2aea8 100644 --- a/test/integration/short/concurrent/execute-concurrent-tests.js +++ b/test/integration/short/concurrent/execute-concurrent-tests.ts @@ -14,16 +14,15 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const fs = require('fs'); -const types = require('../../../../lib/types'); -const errors = require('../../../../lib/errors'); -const helper = require('../../../test-helper'); -const Transform = require('stream').Transform; +import assert from "assert"; +import fs from "fs"; +import types from "../../../../lib/types/index"; +import errors from "../../../../lib/errors"; +import helper from "../../../test-helper"; +import { Transform } from "stream"; +import {executeConcurrent} from "../../../../lib/concurrent"; + const Uuid = types.Uuid; -const executeConcurrent = require('../../../../lib/concurrent').executeConcurrent; const insertQuery1 = 'INSERT INTO table1 (key1, key2, value) VALUES (?, ?, ?)'; const insertQuery2 = 'INSERT INTO table2 (id, value) VALUES (?, ?)'; diff --git a/test/integration/short/connection-tests.js b/test/integration/short/connection-tests.ts similarity index 94% rename from test/integration/short/connection-tests.js rename to test/integration/short/connection-tests.ts index 28d304a5d..0ae0d96ed 100644 --- a/test/integration/short/connection-tests.js +++ b/test/integration/short/connection-tests.ts @@ -13,19 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -"use strict"; -const assert = require('chai').assert; -const sinon = require('sinon'); - -const Connection = require('../../../lib/connection.js'); -const defaultOptions = require('../../../lib/client-options.js').defaultOptions(); -const utils = require('../../../lib/utils.js'); -const requests = require('../../../lib/requests.js'); -const protocolVersion = require('../../../lib/types').protocolVersion; -const helper = require('../../test-helper.js'); -const errors = require('../../../lib/errors'); -const types = require('../../../lib/types'); +import sinon from "sinon"; +import Connection from "../../../lib/connection"; +import utils from "../../../lib/utils"; +import requests from "../../../lib/requests"; +import helper from "../../test-helper"; +import errors from "../../../lib/errors"; +import types from "../../../lib/types/index"; +import { assert } from "chai"; +import { defaultOptions as defaultOptionsFunction} from "../../../lib/client-options"; +import { protocolVersion } from "../../../lib/types"; + +const defaultOptions = defaultOptionsFunction(); const vit = helper.vit; describe('Connection', function () { @@ -291,7 +290,7 @@ describe('Connection', function () { }); /** @returns {Connection} */ -function newInstance(address, protocolVersion, options){ +function newInstance(address?, protocolVersion?, options?){ if (!address) { address = helper.baseOptions.contactPoints[0]; } diff --git a/test/integration/short/control-connection-simulator-tests.js b/test/integration/short/control-connection-simulator-tests.ts similarity index 95% rename from test/integration/short/control-connection-simulator-tests.js rename to test/integration/short/control-connection-simulator-tests.ts index a5f8db99c..79b11e194 100644 --- a/test/integration/short/control-connection-simulator-tests.js +++ b/test/integration/short/control-connection-simulator-tests.ts @@ -13,15 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import assert from "assert"; +import net from "net"; +import simulacron from "../simulacron"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; +import Client from "../../../lib/client"; -const assert = require('assert'); -const net = require('net'); -const simulacron = require('../simulacron'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); - -const Client = require('../../../lib/client.js'); describe('ControlConnection', function() { this.timeout(5000); @@ -110,7 +108,7 @@ function testWithNodes(nodeVersions, expectedProtocolVersion, maxVersion) { }, next); }, function connect(next) { - client.connect(function (err, result) { + client.connect(function (err, _result) { if (expectedProtocolVersion < 3) { // An error is expected here since simulacron can't connect with < protocol v3. assert.ok(err); diff --git a/test/integration/short/control-connection-tests.js b/test/integration/short/control-connection-tests.ts similarity index 94% rename from test/integration/short/control-connection-tests.js rename to test/integration/short/control-connection-tests.ts index b48102160..b946e68c4 100644 --- a/test/integration/short/control-connection-tests.js +++ b/test/integration/short/control-connection-tests.ts @@ -13,18 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); - -const helper = require('../../test-helper'); -const Client = require('../../../lib/client.js'); -const ControlConnection = require('../../../lib/control-connection'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); -const clientOptions = require('../../../lib/client-options'); -const policies = require('../../../lib/policies'); -const ProfileManager = require('../../../lib/execution-profile').ProfileManager; + +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import ControlConnection from "../../../lib/control-connection"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; +import clientOptions from "../../../lib/client-options"; +import policies from "../../../lib/policies/index"; +import {ProfileManager} from "../../../lib/execution-profile"; describe('ControlConnection', function () { this.timeout(240000); diff --git a/test/integration/short/custom-payload-tests.js b/test/integration/short/custom-payload-tests.ts similarity index 97% rename from test/integration/short/custom-payload-tests.js rename to test/integration/short/custom-payload-tests.ts index 1fcfff276..ddff2c4a0 100644 --- a/test/integration/short/custom-payload-tests.js +++ b/test/integration/short/custom-payload-tests.ts @@ -13,14 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; + -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); const vit = helper.vit; describe('custom payload @SERVER_API', function () { diff --git a/test/integration/short/duration-type-tests.js b/test/integration/short/duration-type-tests.ts similarity index 94% rename from test/integration/short/duration-type-tests.js rename to test/integration/short/duration-type-tests.ts index 8bffa89ce..cfe53e411 100644 --- a/test/integration/short/duration-type-tests.js +++ b/test/integration/short/duration-type-tests.ts @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); -const helper = require('../../test-helper'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; + const vdescribe = helper.vdescribe; const Duration = types.Duration; diff --git a/test/integration/short/error-simulator-tests.js b/test/integration/short/error-simulator-tests.ts similarity index 93% rename from test/integration/short/error-simulator-tests.js rename to test/integration/short/error-simulator-tests.ts index 83536bb30..b7f297e62 100644 --- a/test/integration/short/error-simulator-tests.js +++ b/test/integration/short/error-simulator-tests.ts @@ -13,16 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const errors = require('../../../lib/errors'); -const types = require('../../../lib/types'); -const simulacron = require('../simulacron'); -const helper = require('../../test-helper'); -const utils = require('../../../lib/utils'); +import assert from "assert"; +import errors from "../../../lib/errors"; +import types from "../../../lib/types/index"; +import simulacron from "../simulacron"; +import helper from "../../test-helper"; +import utils from "../../../lib/utils"; +import Client from "../../../lib/client"; +import { OrderedLoadBalancingPolicy } from "../../test-helper"; -const Client = require('../../../lib/client'); -const { OrderedLoadBalancingPolicy } = require('../../test-helper'); const query = "select * from data"; @@ -66,7 +65,7 @@ describe('Client', function() { alive: 4, required: 5, consistency_level: 'LOCAL_QUORUM' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.unavailableException); @@ -81,7 +80,7 @@ describe('Client', function() { block_for: 2, consistency_level: 'TWO', data_present: false - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.readTimeout); @@ -97,7 +96,7 @@ describe('Client', function() { block_for: 2, consistency_level: 'TWO', data_present: false - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.readTimeout); @@ -113,7 +112,7 @@ describe('Client', function() { block_for: 2, consistency_level: 'TWO', data_present: true - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.readTimeout); @@ -133,7 +132,7 @@ describe('Client', function() { '127.0.0.1': 'READ_TOO_MANY_TOMBSTONES', '127.0.0.2': 'UNKNOWN' }, - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.readFailure); @@ -150,7 +149,7 @@ describe('Client', function() { block_for: 3, consistency_level: 'QUORUM', write_type: 'SIMPLE' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.writeTimeout); @@ -166,7 +165,7 @@ describe('Client', function() { block_for: 1, consistency_level: 'ONE', write_type: 'BATCH_LOG' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.writeTimeout); @@ -185,7 +184,7 @@ describe('Client', function() { }, consistency_level: 'THREE', write_type: 'COUNTER' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.writeFailure); @@ -202,7 +201,7 @@ describe('Client', function() { function: 'foo', arg_types: ['int', 'varchar', 'blob'], detail: 'Could not execute function' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.functionFailure); @@ -216,7 +215,7 @@ describe('Client', function() { message: 'The table already exists!', keyspace: 'myks', table: 'myTbl' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.alreadyExists); @@ -229,7 +228,7 @@ describe('Client', function() { message: 'The keyspace already exists!', keyspace: 'myks', table: '' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.alreadyExists); @@ -240,7 +239,7 @@ describe('Client', function() { it ('should error with configError', errorResultTest({ result: 'config_error', message: 'Invalid Configuration!' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.configError); @@ -249,7 +248,7 @@ describe('Client', function() { it ('should error with invalid', errorResultTest({ result: 'invalid', message: 'Invalid Query!' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.invalid); @@ -258,7 +257,7 @@ describe('Client', function() { it ('should error with protocolError', errorResultTest({ result: 'protocol_error', message: 'Protocol Error!' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.protocolError); @@ -267,7 +266,7 @@ describe('Client', function() { it ('should error with serverError', errorResultTest({ result: 'server_error', message: 'Server Error!', - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.serverError); @@ -276,7 +275,7 @@ describe('Client', function() { it ('should error with syntaxError', errorResultTest({ result: 'syntax_error', message: 'Invalid Syntax!', - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.syntaxError); @@ -285,7 +284,7 @@ describe('Client', function() { it ('should error with unauthorized', errorResultTest({ result: 'unauthorized', message: 'Unauthorized!', - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.ResponseError); assert.strictEqual(err.code, types.responseErrorCodes.unauthorized); @@ -295,7 +294,7 @@ describe('Client', function() { it ('should error with isBootstrapping', errorResultTest({ result: 'is_bootstrapping', message: 'Bootstrapping!' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); assert.strictEqual(Object.keys(err.innerErrors).length, 3); @@ -309,7 +308,7 @@ describe('Client', function() { it ('should error with overloaded', errorResultTest({ result: 'overloaded', message: 'Overloaded!' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); assert.strictEqual(Object.keys(err.innerErrors).length, 3); @@ -323,7 +322,7 @@ describe('Client', function() { it ('should error with truncateError', errorResultTest({ result: 'truncate_error', message: 'Timeout while truncating table' - }, (err, result) => { + }, (err, _result) => { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); assert.strictEqual(Object.keys(err.innerErrors).length, 3); diff --git a/test/integration/short/error-tests.js b/test/integration/short/error-tests.ts similarity index 94% rename from test/integration/short/error-tests.js rename to test/integration/short/error-tests.ts index 1e9a92422..8df4c2eaa 100644 --- a/test/integration/short/error-tests.js +++ b/test/integration/short/error-tests.ts @@ -13,14 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import errors from "../../../lib/errors"; + -const helper = require('../../test-helper'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils'); -const errors = require('../../../lib/errors'); const protocolVersion = types.protocolVersion; const vdescribe = helper.vdescribe; const vit = helper.vit; diff --git a/test/integration/short/execution-profile-tests.js b/test/integration/short/execution-profile-tests.ts similarity index 93% rename from test/integration/short/execution-profile-tests.js rename to test/integration/short/execution-profile-tests.ts index 154941d85..13d8580af 100644 --- a/test/integration/short/execution-profile-tests.js +++ b/test/integration/short/execution-profile-tests.ts @@ -13,18 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import simulacron from "../simulacron"; +import { AllowListPolicy, DCAwareRoundRobinPolicy } from "../../../lib/policies/load-balancing"; +import { ExecutionProfile } from "../../../lib/execution-profile"; -'use strict'; - -const assert = require('assert'); - -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils'); -const simulacron = require('../simulacron'); -const { AllowListPolicy, DCAwareRoundRobinPolicy } = require('../../../lib/policies/load-balancing'); -const { ExecutionProfile } = require('../../../lib/execution-profile'); describe('ProfileManager', function() { this.timeout(40000); diff --git a/test/integration/short/geometry/line-string-tests.js b/test/integration/short/geometry/line-string-tests.ts similarity index 97% rename from test/integration/short/geometry/line-string-tests.js rename to test/integration/short/geometry/line-string-tests.ts index 7effd3ccf..1e00ae09b 100644 --- a/test/integration/short/geometry/line-string-tests.js +++ b/test/integration/short/geometry/line-string-tests.ts @@ -13,15 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); -const helper = require('../../../test-helper'); -const Client = require('../../../../lib/client'); +import assert from "assert"; +import util from "util"; +import helper from "../../../test-helper"; +import Client from "../../../../lib/client"; +import geometry from "../../../../lib/geometry/index"; +import types from "../../../../lib/types/index"; +import utils from "../../../../lib/utils"; + + const vdescribe = helper.vdescribe; -const geometry = require('../../../../lib/geometry'); -const types = require('../../../../lib/types'); -const utils = require('../../../../lib/utils'); const Point = geometry.Point; const LineString = geometry.LineString; const Uuid = types.Uuid; diff --git a/test/integration/short/geometry/point-tests.js b/test/integration/short/geometry/point-tests.ts similarity index 97% rename from test/integration/short/geometry/point-tests.js rename to test/integration/short/geometry/point-tests.ts index 3a95c59f9..54e7eb52a 100644 --- a/test/integration/short/geometry/point-tests.js +++ b/test/integration/short/geometry/point-tests.ts @@ -13,15 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); -const helper = require('../../../test-helper'); -const Client = require('../../../../lib/client'); +import assert from "assert"; +import util from "util"; +import helper from "../../../test-helper"; +import Client from "../../../../lib/client"; +import geometry from "../../../../lib/geometry/index"; +import types from "../../../../lib/types/index"; +import utils from "../../../../lib/utils"; + + const vdescribe = helper.vdescribe; -const geometry = require('../../../../lib/geometry'); -const types = require('../../../../lib/types'); -const utils = require('../../../../lib/utils'); const Point = geometry.Point; const Uuid = types.Uuid; const Tuple = types.Tuple; diff --git a/test/integration/short/geometry/polygon-tests.js b/test/integration/short/geometry/polygon-tests.ts similarity index 97% rename from test/integration/short/geometry/polygon-tests.js rename to test/integration/short/geometry/polygon-tests.ts index 41715487e..9b3b5c533 100644 --- a/test/integration/short/geometry/polygon-tests.js +++ b/test/integration/short/geometry/polygon-tests.ts @@ -13,15 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); -const helper = require('../../../test-helper'); -const Client = require('../../../../lib/client'); +import assert from "assert"; +import util from "util"; +import helper from "../../../test-helper"; +import Client from "../../../../lib/client"; +import geometry from "../../../../lib/geometry/index"; +import types from "../../../../lib/types/index"; +import utils from "../../../../lib/utils"; + + const vdescribe = helper.vdescribe; -const geometry = require('../../../../lib/geometry'); -const types = require('../../../../lib/types'); -const utils = require('../../../../lib/utils'); const Point = geometry.Point; const Polygon = geometry.Polygon; const Uuid = types.Uuid; diff --git a/test/integration/short/graph/graph-olap-tests.js b/test/integration/short/graph/graph-olap-tests.ts similarity index 94% rename from test/integration/short/graph/graph-olap-tests.js rename to test/integration/short/graph/graph-olap-tests.ts index 41789bf1b..9ba3bafb8 100644 --- a/test/integration/short/graph/graph-olap-tests.js +++ b/test/integration/short/graph/graph-olap-tests.ts @@ -14,21 +14,19 @@ * limitations under the License. */ -'use strict'; +import { assert } from "chai"; +import util from "util"; +import Client from "../../../../lib/client"; +import graphModule from "../../../../lib/datastax/graph/index"; +import { ExecutionProfile } from "../../../../lib/execution-profile"; +import loadBalancing from "../../../../lib/policies/load-balancing"; +import promiseUtils from "../../../../lib/promise-utils"; +import utils from "../../../../lib/utils"; +import helper from "../../../test-helper"; +import graphTestHelper from "./graph-test-helper"; -const { assert } = require('chai'); -const util = require('util'); -const Client = require('../../../../lib/client'); -const promiseUtils = require('../../../../lib/promise-utils'); -const helper = require('../../../test-helper'); const vdescribe = helper.vdescribe; -const loadBalancing = require('../../../../lib/policies/load-balancing'); const DefaultLoadBalancingPolicy = loadBalancing.DefaultLoadBalancingPolicy; -const ExecutionProfile = require('../../../../lib/execution-profile').ExecutionProfile; -const utils = require('../../../../lib/utils'); -const graphModule = require('../../../../lib/datastax/graph'); -const graphTestHelper = require('./graph-test-helper'); - vdescribe('dse-5.0', 'Client with spark workload', function () { this.timeout(360000); @@ -198,7 +196,7 @@ async function waitForWorkers(client, expectedWorkers) { let master; try { master = await findSparkMasterAsync(client); - } catch (err) { + } catch (_err) { await promiseUtils.delay(delay); continue; } diff --git a/test/integration/short/graph/graph-test-helper.js b/test/integration/short/graph/graph-test-helper.ts similarity index 72% rename from test/integration/short/graph/graph-test-helper.js rename to test/integration/short/graph/graph-test-helper.ts index 3e2264e55..7d72da8ba 100644 --- a/test/integration/short/graph/graph-test-helper.js +++ b/test/integration/short/graph/graph-test-helper.ts @@ -13,41 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -'use strict'; - -const utils = require('../../../../lib/utils'); - -module.exports = { - - /** - * Creates the modern schema and graph - * @param {Client} client - * @param {Function} callback - */ - createModernGraph: function (client, callback) { - utils.series([ - next => client.executeGraph(modernSchema, null, {graphName: "name1"}, next), - next => client.executeGraph(modernGraph, null, {graphName: "name1"}, next) - ], callback); - }, - - /** - * Sets the schema mode to "production". - * @param {Client} client - */ - makeStrict: function (client) { - return client.executeGraph(makeStrictQuery, null, { graphName: 'name1'}); - }, - - /** - * Sets the allow_scan flag. - * @param {Client} client - */ - allowScans: function (client) { - return client.executeGraph(allowScansQuery, null, { graphName: 'name1'}); - } -}; +import utils from "../../../../lib/utils"; const makeStrictQuery = 'schema.config().option("graph.schema_mode").set("production")'; @@ -78,4 +44,39 @@ const modernGraph = 'marko.addEdge("created", lop, "weight", 0.4f);\n' + 'josh.addEdge("created", ripple, "weight", 1.0f);\n' + 'josh.addEdge("created", lop, "weight", 0.4f);\n' + - 'peter.addEdge("created", lop, "weight", 0.2f);'; \ No newline at end of file + 'peter.addEdge("created", lop, "weight", 0.2f);'; + + +/** + * Creates the modern schema and graph + * @param {Client} client + * @param {Function} callback + */ +const createModernGraph = function (client, callback) { + utils.series([ + next => client.executeGraph(modernSchema, null, { graphName: "name1" }, next), + next => client.executeGraph(modernGraph, null, { graphName: "name1" }, next) + ], callback); +}; + +/** + * Sets the schema mode to "production". + * @param {Client} client + */ +const makeStrict = function (client) { + return client.executeGraph(makeStrictQuery, null, { graphName: 'name1' }); +}; + +/** + * Sets the allow_scan flag. + * @param {Client} client + */ +const allowScans = function (client) { + return client.executeGraph(allowScansQuery, null, { graphName: 'name1' }); +}; + +export default { + createModernGraph, + makeStrict, + allowScans +}; \ No newline at end of file diff --git a/test/integration/short/graph/graph-tests.js b/test/integration/short/graph/graph-tests.ts similarity index 98% rename from test/integration/short/graph/graph-tests.js rename to test/integration/short/graph/graph-tests.ts index 81e641e71..8e18944a4 100644 --- a/test/integration/short/graph/graph-tests.js +++ b/test/integration/short/graph/graph-tests.ts @@ -13,25 +13,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const sinon = require('sinon'); -const { assert } = require('chai'); -const util = require('util'); -const Client = require('../../../../lib/client'); -const helper = require('../../../test-helper'); +import sinon from "sinon"; +import { assert } from "chai"; +import util from "util"; +import Client from "../../../../lib/client"; +import helper from "../../../test-helper"; +import { LineString, Point, Polygon } from "../../../../lib/geometry/index"; +import types from "../../../../lib/types/index"; +import utils from "../../../../lib/utils"; +import graphModule from "../../../../lib/datastax/graph/index"; +import { graphProtocol } from "../../../../lib/datastax/graph/options"; +import graphTestHelper from "./graph-test-helper"; +import { ExecutionProfile } from "../../../../lib/execution-profile"; const vdescribe = helper.vdescribe; const vit = helper.vit; -const { Point, LineString, Polygon } = require('../../../../lib/geometry'); -const types = require('../../../../lib/types'); const { InetAddress, Uuid, Tuple } = types; -const ExecutionProfile = require('../../../../lib/execution-profile').ExecutionProfile; -const utils = require('../../../../lib/utils'); -const graphModule = require('../../../../lib/datastax/graph'); const { asInt, asFloat, asUdt, t, Edge, direction } = graphModule; -const { graphProtocol } = require('../../../../lib/datastax/graph/options'); -const graphTestHelper = require('./graph-test-helper'); - const graphLanguageBytecode = 'bytecode-json'; let schemaCounter = 1000; diff --git a/test/integration/short/graph/graph-timeout-tests.js b/test/integration/short/graph/graph-timeout-tests.ts similarity index 95% rename from test/integration/short/graph/graph-timeout-tests.js rename to test/integration/short/graph/graph-timeout-tests.ts index 09b5c863b..79e999279 100644 --- a/test/integration/short/graph/graph-timeout-tests.js +++ b/test/integration/short/graph/graph-timeout-tests.ts @@ -14,16 +14,16 @@ * limitations under the License. */ -'use strict'; +import assert from "assert"; +import errors from "../../../../lib/errors"; +import helper from "../../../test-helper"; +import utils from "../../../../lib/utils"; +import Client from "../../../../lib/client"; +import {ExecutionProfile} from "../../../../lib/execution-profile"; +import {RetryPolicy as DefaultRetryPolicy} from "../../../../lib/policies/retry"; + -const assert = require('assert'); -const errors = require('../../../../lib/errors'); -const helper = require('../../../test-helper'); const vdescribe = helper.vdescribe; -const utils = require('../../../../lib/utils'); -const Client = require('../../../../lib/client'); -const ExecutionProfile = require('../../../../lib/execution-profile').ExecutionProfile; -const DefaultRetryPolicy = require('../../../../lib/policies/retry').RetryPolicy; vdescribe('dse-5.0', 'graph query client timeouts', function () { this.timeout(120000); diff --git a/test/integration/short/graph/graph-with-down-node-tests.js b/test/integration/short/graph/graph-with-down-node-tests.ts similarity index 93% rename from test/integration/short/graph/graph-with-down-node-tests.js rename to test/integration/short/graph/graph-with-down-node-tests.ts index 7b1308536..9a9e4ddf4 100644 --- a/test/integration/short/graph/graph-with-down-node-tests.js +++ b/test/integration/short/graph/graph-with-down-node-tests.ts @@ -14,18 +14,16 @@ * limitations under the License. */ -'use strict'; +import assert from "assert"; +import Client from "../../../../lib/client"; +import helper from "../../../test-helper"; +import types from "../../../../lib/types/index"; +import utils from "../../../../lib/utils"; +import graphTestHelper from "./graph-test-helper"; +import {ExecutionProfile} from "../../../../lib/execution-profile"; -const assert = require('assert'); -const Client = require('../../../../lib/client'); -const helper = require('../../../test-helper'); const vdescribe = helper.vdescribe; -const types = require('../../../../lib/types'); const cl = types.consistencies; -const ExecutionProfile = require('../../../../lib/execution-profile').ExecutionProfile; -const utils = require('../../../../lib/utils'); -const graphTestHelper = require('./graph-test-helper'); - // DSP-15333 prevents this suite to be tested against DSE 5.0 vdescribe('dse-5.1', 'Client with down node', function () { this.timeout(270000); diff --git a/test/integration/short/insights-simulator-tests.js b/test/integration/short/insights-simulator-tests.ts similarity index 93% rename from test/integration/short/insights-simulator-tests.js rename to test/integration/short/insights-simulator-tests.ts index e2f364d8d..4f233adfb 100644 --- a/test/integration/short/insights-simulator-tests.js +++ b/test/integration/short/insights-simulator-tests.ts @@ -13,17 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import simulacron from "../simulacron"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import utils from "../../../lib/utils"; +import InsightsClient from "../../../lib/insights-client"; -'use strict'; -const assert = require('assert'); -const simulacron = require('../simulacron'); -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const utils = require('../../../lib/utils'); const vdescribe = helper.vdescribe; -const InsightsClient = require('../../../lib/insights-client'); - const insightsRpcQuery = 'CALL InsightsRpc.reportInsight(?)'; vdescribe('dse-6.7', 'InsightsClient', function () { diff --git a/test/integration/short/load-balancing-simulator-tests.js b/test/integration/short/load-balancing-simulator-tests.ts similarity index 96% rename from test/integration/short/load-balancing-simulator-tests.js rename to test/integration/short/load-balancing-simulator-tests.ts index cbcfe7d28..40a78d6bf 100644 --- a/test/integration/short/load-balancing-simulator-tests.js +++ b/test/integration/short/load-balancing-simulator-tests.ts @@ -13,18 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const util = require('util'); -const simulacron = require('../simulacron'); -const utils = require('../../../lib/utils'); -const helper = require('../../test-helper'); -const policies = require('../../../lib/policies'); -const errors = require('../../../lib/errors'); -const promiseUtils = require('../../../lib/promise-utils'); -const { ExecutionProfile } = require('../../../lib/execution-profile'); -const Client = require('../../../lib/client'); +import assert from "assert"; +import util from "util"; +import simulacron from "../simulacron"; +import utils from "../../../lib/utils"; +import helper from "../../test-helper"; +import policies from "../../../lib/policies/index"; +import errors from "../../../lib/errors"; +import promiseUtils from "../../../lib/promise-utils"; +import { ExecutionProfile } from "../../../lib/execution-profile"; +import Client from "../../../lib/client"; + + const { loadBalancing } = policies; const queryOptions = { prepare: true, routingKey: utils.allocBuffer(16), keyspace: 16 }; diff --git a/test/integration/short/load-balancing-tests.js b/test/integration/short/load-balancing-tests.ts similarity index 96% rename from test/integration/short/load-balancing-tests.js rename to test/integration/short/load-balancing-tests.ts index d1d111936..1086e1076 100644 --- a/test/integration/short/load-balancing-tests.js +++ b/test/integration/short/load-balancing-tests.ts @@ -13,15 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; +import { AllowListPolicy, RoundRobinPolicy, TokenAwarePolicy} from "../../../lib/policies/load-balancing"; + -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); -const { RoundRobinPolicy, AllowListPolicy, TokenAwarePolicy} = require('../../../lib/policies/load-balancing'); const vdescribe = helper.vdescribe; const maxInFlightRequests = 16; @@ -210,7 +210,7 @@ context('with a reusable 3 node cluster', function () { }); it('should target the correct replica using user-provided Buffer routingKey', function (done) { // Use [0] which should map to node 1 - testWithQueryOptions((client) => ({ + testWithQueryOptions((_client) => ({ routingKey: Buffer.from([0]) }), '1', done); }); diff --git a/test/integration/short/mapping/custom-queries-tests.js b/test/integration/short/mapping/custom-queries-tests.ts similarity index 90% rename from test/integration/short/mapping/custom-queries-tests.js rename to test/integration/short/mapping/custom-queries-tests.ts index 4847f9d02..7ae3b2e67 100644 --- a/test/integration/short/mapping/custom-queries-tests.js +++ b/test/integration/short/mapping/custom-queries-tests.ts @@ -13,17 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import types from "../../../../lib/types/index"; +import Result from "../../../../lib/mapping/result"; +import mapperTestHelper from "./mapper-test-helper"; +import helper from "../../../test-helper"; -'use strict'; -const assert = require('assert'); -const types = require('../../../../lib/types'); -const Result = require('../../../../lib/mapping/result'); const Uuid = types.Uuid; -const mapperTestHelper = require('./mapper-test-helper'); const assertRowMatchesDoc = mapperTestHelper.assertRowMatchesDoc; -const helper = require('../../../test-helper'); - describe('ModelMapper', function () { mapperTestHelper.setupOnce(this); diff --git a/test/integration/short/mapping/from-and-to-model-function-tests.js b/test/integration/short/mapping/from-and-to-model-function-tests.ts similarity index 93% rename from test/integration/short/mapping/from-and-to-model-function-tests.js rename to test/integration/short/mapping/from-and-to-model-function-tests.ts index 354575812..93afd82ad 100644 --- a/test/integration/short/mapping/from-and-to-model-function-tests.js +++ b/test/integration/short/mapping/from-and-to-model-function-tests.ts @@ -13,15 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import { Uuid } from "../../../../lib/types/index"; +import mapperTestHelper from "./mapper-test-helper"; +import helper from "../../../test-helper"; +import { UnderscoreCqlToCamelCaseMappings } from "../../../../lib/mapping/table-mappings"; -'use strict'; - -const { assert } = require('chai'); - -const { Uuid } = require('../../../../lib/types'); -const mapperTestHelper = require('./mapper-test-helper'); -const helper = require('../../../test-helper'); -const { UnderscoreCqlToCamelCaseMappings } = require('../../../../lib/mapping/table-mappings'); describe('Mapper', function () { mapperTestHelper.setupOnce(this); diff --git a/test/integration/short/mapping/mapper-test-helper.js b/test/integration/short/mapping/mapper-test-helper.ts similarity index 94% rename from test/integration/short/mapping/mapper-test-helper.js rename to test/integration/short/mapping/mapper-test-helper.ts index cccf0b9d2..67f1ce30d 100644 --- a/test/integration/short/mapping/mapper-test-helper.js +++ b/test/integration/short/mapping/mapper-test-helper.ts @@ -13,25 +13,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import types from "../../../../lib/types/index"; +import helper from "../../../test-helper"; +import tableMappingsModule from "../../../../lib/mapping/table-mappings"; +import Mapper from "../../../../lib/mapping/mapper"; +import Client from "../../../../lib/client"; +import utils from "../../../../lib/utils"; -'use strict'; -const assert = require('assert'); -const types = require('../../../../lib/types'); const Uuid = types.Uuid; -const helper = require('../../../test-helper'); -const tableMappingsModule = require('../../../../lib/mapping/table-mappings'); const UnderscoreCqlToCamelCaseMappings = tableMappingsModule.UnderscoreCqlToCamelCaseMappings; -const Mapper = require('../../../../lib/mapping/mapper'); -const Client = require('../../../../lib/client'); -const utils = require('../../../../lib/utils'); - const videoColumnsToProperties = new Map([ ['videoid', 'id'], ['userid', 'userId'], ['added_date', 'addedDate'], ['location_type', 'locationType'], ['preview_image_location', 'preview'], ['preview_thumbnails', 'thumbnails']]); let hasBeenSetup = false; -const mapperHelper = module.exports = { +const mapperHelper = { setupOnce: function (testInstance) { testInstance.timeout(60000); @@ -191,3 +189,6 @@ const mapperHelper = module.exports = { }); } }; + + +export default mapperHelper; \ No newline at end of file diff --git a/test/integration/short/mapping/mapper-tests.js b/test/integration/short/mapping/mapper-tests.ts similarity index 94% rename from test/integration/short/mapping/mapper-tests.js rename to test/integration/short/mapping/mapper-tests.ts index dda7d8e50..ba937413c 100644 --- a/test/integration/short/mapping/mapper-tests.js +++ b/test/integration/short/mapping/mapper-tests.ts @@ -14,19 +14,18 @@ * limitations under the License. */ -'use strict'; +import assert from "assert"; +import types from "../../../../lib/types/index"; +import utils from "../../../../lib/utils"; +import Mapper from "../../../../lib/mapping/mapper"; +import Client from "../../../../lib/client"; +import mapperTestHelper from "./mapper-test-helper"; +import helper from "../../../test-helper"; +import Result from "../../../../lib/mapping/result"; +import { q } from "../../../../lib/mapping/q"; -const assert = require('assert'); -const types = require('../../../../lib/types'); -const utils = require('../../../../lib/utils'); -const Mapper = require('../../../../lib/mapping/mapper'); -const Client = require('../../../../lib/client'); const Uuid = types.Uuid; -const mapperTestHelper = require('./mapper-test-helper'); const assertRowMatchesDoc = mapperTestHelper.assertRowMatchesDoc; -const helper = require('../../../test-helper'); -const Result = require('../../../../lib/mapping/result'); -const q = require('../../../../lib/mapping/q').q; describe('Mapper', function () { diff --git a/test/integration/short/mapping/model-mapper-tests.js b/test/integration/short/mapping/model-mapper-tests.ts similarity index 98% rename from test/integration/short/mapping/model-mapper-tests.js rename to test/integration/short/mapping/model-mapper-tests.ts index 7892ad47a..a8f68aab9 100644 --- a/test/integration/short/mapping/model-mapper-tests.js +++ b/test/integration/short/mapping/model-mapper-tests.ts @@ -14,19 +14,18 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const util = require('util'); -const helper = require('../../../test-helper'); -const mapperTestHelper = require('./mapper-test-helper'); -const types = require('../../../../lib/types'); -const utils = require('../../../../lib/utils'); +import assert from "assert"; +import util from "util"; +import helper from "../../../test-helper"; +import mapperTestHelper from "./mapper-test-helper"; +import types from "../../../../lib/types/index"; +import utils from "../../../../lib/utils"; +import Mapper from "../../../../lib/mapping/mapper"; +import Result from "../../../../lib/mapping/result"; +import {q} from "../../../../lib/mapping/q"; + const Uuid = types.Uuid; -const q = require('../../../../lib/mapping/q').q; -const Mapper = require('../../../../lib/mapping/mapper'); const assertRowMatchesDoc = mapperTestHelper.assertRowMatchesDoc; -const Result = require('../../../../lib/mapping/result'); const vit = helper.vit; describe('ModelMapper', function () { diff --git a/test/integration/short/metadata-simulator-tests.js b/test/integration/short/metadata-simulator-tests.ts similarity index 94% rename from test/integration/short/metadata-simulator-tests.js rename to test/integration/short/metadata-simulator-tests.ts index acce50779..fda013686 100644 --- a/test/integration/short/metadata-simulator-tests.js +++ b/test/integration/short/metadata-simulator-tests.ts @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('chai').assert; -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types/index'); -const simulacron = require('../simulacron'); -const util = require('util'); + +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; +import simulacron from "../simulacron"; +import util from "util"; +import {assert} from "chai"; describe('Metadata', function () { this.timeout(20000); @@ -43,7 +43,7 @@ describe('Metadata', function () { coordinator: "127.0.0.101", duration: 10000, request: 'Execute CQL3 query', - started_at: new types.LocalTime.now().getTotalNanoseconds() + started_at: types.LocalTime.now().getTotalNanoseconds() } ], column_types: { diff --git a/test/integration/short/metadata-tests.js b/test/integration/short/metadata-tests.ts similarity index 99% rename from test/integration/short/metadata-tests.js rename to test/integration/short/metadata-tests.ts index 9941e1a3b..f00e9c3d8 100644 --- a/test/integration/short/metadata-tests.js +++ b/test/integration/short/metadata-tests.ts @@ -14,14 +14,13 @@ * limitations under the License. */ -"use strict"; -const assert = require('chai').assert; - -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); -const packageInfo = require('../../../package.json'); +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; +import packageInfo from "../../../package.json"; +import { assert } from "chai"; + const vit = helper.vit; const vdescribe = helper.vdescribe; diff --git a/test/integration/short/numeric-tests.js b/test/integration/short/numeric-tests.ts similarity index 96% rename from test/integration/short/numeric-tests.js rename to test/integration/short/numeric-tests.ts index 0b5c6a329..f8debcd56 100644 --- a/test/integration/short/numeric-tests.js +++ b/test/integration/short/numeric-tests.ts @@ -14,12 +14,11 @@ * limitations under the License. */ -'use strict'; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import helper from "../../test-helper"; +import { expect } from 'chai'; -const expect = require('chai').expect; -const Client = require('../../../lib/client'); -const types = require('../../../lib/types'); -const helper = require('../../test-helper'); const Uuid = types.Uuid; const createTableNumericValuesCql = @@ -44,7 +43,10 @@ const createTableNumericCollectionsCql = set_int set)`; // Exported to be called on other fixtures to take advantage from existing setups -module.exports = function (keyspace, prepare) { + + +// Exported to be called on other fixtures to take advantage from existing setups +export default function (keyspace, prepare) { context('with numeric values', () => { @@ -221,4 +223,4 @@ module.exports = function (keyspace, prepare) { }); }); }); -}; \ No newline at end of file +} \ No newline at end of file diff --git a/test/integration/short/paging-tests.js b/test/integration/short/paging-tests.ts similarity index 93% rename from test/integration/short/paging-tests.js rename to test/integration/short/paging-tests.ts index 3d4fd8b93..1894790e3 100644 --- a/test/integration/short/paging-tests.js +++ b/test/integration/short/paging-tests.ts @@ -13,17 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import helper from "../../test-helper"; +import promiseUtils from "../../../lib/promise-utils"; -'use strict'; -const { assert } = require('chai'); -const Client = require('../../../lib/client'); -const types = require('../../../lib/types'); -const helper = require('../../test-helper'); -const promiseUtils = require('../../../lib/promise-utils'); +// Exported to be called on other fixtures to take advantage from existing setups // Exported to be called on other fixtures to take advantage from existing setups -module.exports = function (keyspace, prepare) { +export default function (keyspace, prepare) { context('with paging', function () { const client = new Client({ @@ -138,4 +138,4 @@ module.exports = function (keyspace, prepare) { }); } }); -}; \ No newline at end of file +} \ No newline at end of file diff --git a/test/integration/short/pool-simulator-tests.js b/test/integration/short/pool-simulator-tests.ts similarity index 98% rename from test/integration/short/pool-simulator-tests.js rename to test/integration/short/pool-simulator-tests.ts index 0b7aa8726..59f9161fd 100644 --- a/test/integration/short/pool-simulator-tests.js +++ b/test/integration/short/pool-simulator-tests.ts @@ -13,22 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import simulacron from "../simulacron"; +import helper from "../../test-helper"; +import errors from "../../../lib/errors"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; +import policies from "../../../lib/policies/index"; +import promiseUtils from "../../../lib/promise-utils"; +import { version } from "../../../index"; +import Client from "../../../lib/client"; -'use strict'; - -const { assert } = require('chai'); -const simulacron = require('../simulacron'); -const helper = require('../../test-helper'); -const errors = require('../../../lib/errors'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); -const policies = require('../../../lib/policies'); -const promiseUtils = require('../../../lib/promise-utils'); -const { version } = require('../../../index'); -const { distance } = types; - -const Client = require('../../../lib/client'); +const { distance } = types; const healthResponseCountInterval = 200; describe('pool', function () { diff --git a/test/integration/short/prepare-simulator-tests.js b/test/integration/short/prepare-simulator-tests.ts similarity index 94% rename from test/integration/short/prepare-simulator-tests.js rename to test/integration/short/prepare-simulator-tests.ts index d697df7f6..9287b24b0 100644 --- a/test/integration/short/prepare-simulator-tests.js +++ b/test/integration/short/prepare-simulator-tests.ts @@ -13,16 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import { assert } from "chai"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import helper from "../../test-helper"; +import reconnection from "../../../lib/policies/reconnection"; +import simulacron from "../simulacron"; -const { assert } = require('chai'); - -const Client = require('../../../lib/client'); -const types = require('../../../lib/types/index'); -const utils = require('../../../lib/utils'); -const helper = require('../../test-helper'); -const reconnection = require('../../../lib/policies/reconnection'); -const simulacron = require('../simulacron'); describe('Client', function () { this.timeout(20000); diff --git a/test/integration/short/retry-simulator-tests.js b/test/integration/short/retry-simulator-tests.ts similarity index 97% rename from test/integration/short/retry-simulator-tests.js rename to test/integration/short/retry-simulator-tests.ts index 6aca93f43..9d7990c62 100644 --- a/test/integration/short/retry-simulator-tests.js +++ b/test/integration/short/retry-simulator-tests.ts @@ -13,16 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import Client from "../../../lib/client"; +import metrics from "../../../lib/metrics/index"; +import errors from "../../../lib/errors"; +import simulacron from "../simulacron"; +import helper from "../../test-helper"; +import policies from "../../../lib/policies/index"; -'use strict'; -const assert = require('assert'); -const Client = require('../../../lib/client'); -const metrics = require('../../../lib/metrics'); -const errors = require('../../../lib/errors'); -const simulacron = require('../simulacron'); -const helper = require('../../test-helper'); -const policies = require('../../../lib/policies'); const RetryPolicy = policies.retry.RetryPolicy; const queries = { diff --git a/test/integration/short/search/date-range-tests.js b/test/integration/short/search/date-range-tests.ts similarity index 96% rename from test/integration/short/search/date-range-tests.js rename to test/integration/short/search/date-range-tests.ts index 5326a3931..6dafd412a 100644 --- a/test/integration/short/search/date-range-tests.js +++ b/test/integration/short/search/date-range-tests.ts @@ -13,15 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); -const helper = require('../../../test-helper'); +import assert from "assert"; +import util from "util"; +import helper from "../../../test-helper"; +import utils from "../../../../lib/utils"; +import types from "../../../../lib/types/index"; +import * as dateRangeModule from "../../../../lib/datastax/search/date-range"; +import Client from "../../../../lib/client"; + + const vdescribe = helper.vdescribe; -const utils = require('../../../../lib/utils'); -const types = require('../../../../lib/types'); -const dateRangeModule = require('../../../../lib/datastax/search/date-range'); -const Client = require('../../../../lib/client'); const DateRange = dateRangeModule.DateRange; vdescribe('dse-5.1', 'DateRange', function () { diff --git a/test/integration/short/speculative-execution-simulator-tests.js b/test/integration/short/speculative-execution-simulator-tests.ts similarity index 95% rename from test/integration/short/speculative-execution-simulator-tests.js rename to test/integration/short/speculative-execution-simulator-tests.ts index b5984a3b4..64cba7365 100644 --- a/test/integration/short/speculative-execution-simulator-tests.js +++ b/test/integration/short/speculative-execution-simulator-tests.ts @@ -13,17 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const responseErrorCodes = require('../../../lib/types').responseErrorCodes; -const simulacron = require('../simulacron'); -const helper = require('../../test-helper'); -const utils = require('../../../lib/utils'); - -const Client = require('../../../lib/client.js'); -const ConstantSpeculativeExecutionPolicy = require('../../../lib/policies/speculative-execution').ConstantSpeculativeExecutionPolicy; -const NoSpeculativeExecutionPolicy = require('../../../lib/policies/speculative-execution').NoSpeculativeExecutionPolicy; -const OrderedLoadBalancingPolicy = require('../../test-helper').OrderedLoadBalancingPolicy; + +import assert from "assert"; +import simulacron from "../simulacron"; +import helper from "../../test-helper"; +import utils from "../../../lib/utils"; +import Client from "../../../lib/client"; +import { responseErrorCodes } from "../../../lib/types"; +import { ConstantSpeculativeExecutionPolicy, NoSpeculativeExecutionPolicy } from "../../../lib/policies/speculative-execution"; +import { OrderedLoadBalancingPolicy } from "../../test-helper"; const query = "select * from data"; const delay = 100; @@ -295,7 +293,7 @@ describe('Client', function() { }, next); }, function executeQuery(next) { - client.execute(query, [], { isIdempotent: true }, function (err, result) { + client.execute(query, [], { isIdempotent: true }, function (err, _result) { assert.ok(err); assert.strictEqual(Object.keys(err.innerErrors).length, 3); diff --git a/test/integration/short/ssl-tests.js b/test/integration/short/ssl-tests.ts similarity index 89% rename from test/integration/short/ssl-tests.js rename to test/integration/short/ssl-tests.ts index acdd21591..9491aa0a0 100644 --- a/test/integration/short/ssl-tests.js +++ b/test/integration/short/ssl-tests.ts @@ -13,15 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import errors from "../../../lib/errors"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; -const helper = require('../../test-helper'); -const Client = require('../../../lib/client'); -const errors = require('../../../lib/errors'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); describe('Client @SERVER_API', function () { this.timeout(60000); diff --git a/test/integration/short/timeout-simulator-tests.js b/test/integration/short/timeout-simulator-tests.ts similarity index 95% rename from test/integration/short/timeout-simulator-tests.js rename to test/integration/short/timeout-simulator-tests.ts index b7cb4bb2d..48849a58f 100644 --- a/test/integration/short/timeout-simulator-tests.js +++ b/test/integration/short/timeout-simulator-tests.ts @@ -13,16 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import Client from "../../../lib/client"; +import errors from "../../../lib/errors"; +import promiseUtils from "../../../lib/promise-utils"; +import helper, {OrderedLoadBalancingPolicy} from "../../test-helper"; +import simulacron from "../simulacron"; -'use strict'; -const { assert } = require('chai'); -const Client = require('../../../lib/client'); -const errors = require('../../../lib/errors'); -const promiseUtils = require('../../../lib/promise-utils'); -const helper = require('../../test-helper'); -const simulacron = require('../simulacron'); -const { OrderedLoadBalancingPolicy } = helper; const queryDelayedOnNode0 = 'INSERT INTO paused_on_first_node'; const queryDelayedOnAllNodes = 'INSERT INTO paused_on_all_nodes'; diff --git a/test/integration/short/tracker-simulator-tests.js b/test/integration/short/tracker-simulator-tests.ts similarity index 95% rename from test/integration/short/tracker-simulator-tests.js rename to test/integration/short/tracker-simulator-tests.ts index 89ddd4d33..3b57d2f74 100644 --- a/test/integration/short/tracker-simulator-tests.js +++ b/test/integration/short/tracker-simulator-tests.ts @@ -14,18 +14,15 @@ * limitations under the License. */ -'use strict'; - -const { assert } = require('chai'); -const sinon = require('sinon'); - -const tracker = require('../../../lib/tracker'); -const errors = require('../../../lib/errors'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); -const Client = require('../../../lib/client'); -const Host = require('../../../lib/host').Host; -const simulacron = require('../simulacron'); +import { assert } from "chai"; +import sinon from "sinon"; +import tracker from "../../../lib/tracker/index"; +import errors from "../../../lib/errors"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; +import Client from "../../../lib/client"; +import simulacron from "../simulacron"; +import { Host } from "../../../lib/host"; const queryDelayed = 'INSERT INTO delayed (id) VALUES (?)'; diff --git a/test/integration/short/udf-tests.js b/test/integration/short/udf-tests.ts similarity index 99% rename from test/integration/short/udf-tests.js rename to test/integration/short/udf-tests.ts index 1da74fd1a..24207c896 100644 --- a/test/integration/short/udf-tests.js +++ b/test/integration/short/udf-tests.ts @@ -13,14 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); +import assert from "assert"; +import helper from "../../test-helper"; +import Client from "../../../lib/client"; +import utils from "../../../lib/utils"; +import types from "../../../lib/types/index"; + -const helper = require('../../test-helper'); const vit = helper.vit; -const Client = require('../../../lib/client'); -const utils = require('../../../lib/utils'); -const types = require('../../../lib/types'); const vdescribe = helper.vdescribe; vdescribe('2.2', 'Metadata @SERVER_API', function () { diff --git a/test/integration/short/vector-tests.js b/test/integration/short/vector-tests.ts similarity index 94% rename from test/integration/short/vector-tests.js rename to test/integration/short/vector-tests.ts index 77a3b522d..90631bf00 100644 --- a/test/integration/short/vector-tests.js +++ b/test/integration/short/vector-tests.ts @@ -13,13 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../../test-helper.js'); +import assert from "assert"; +import helper from "../../test-helper"; +import { types } from "../../../index"; +import Vector from "../../../lib/types/vector"; +import util from "node:util"; + -const { types } = require('../../../index.js'); -const Vector = require('../../../lib/types/vector.js'); -const util = require('node:util'); const vdescribe = helper.vdescribe; vdescribe('5.0.0', 'Vector tests', function () { this.timeout(120000); @@ -63,7 +63,7 @@ vdescribe('5.0.0', 'Vector tests', function () { const updatedValues = data.value.slice(); updatedValues[0] = updatedValues[1]; - client.execute(`UPDATE ${table} SET ${subtypeStringToColumnName(data.subtypeString)} = ? WHERE id = ?`, [new Vector(updatedValues, data.subtypeString), id], { prepare: true }, function (err, result) { + client.execute(`UPDATE ${table} SET ${subtypeStringToColumnName(data.subtypeString)} = ? WHERE id = ?`, [new Vector(updatedValues, data.subtypeString), id], { prepare: true }, function (err, _result) { if (err) { return done(err); } done(); }); diff --git a/test/integration/simulacron.js b/test/integration/simulacron.ts similarity index 98% rename from test/integration/simulacron.js rename to test/integration/simulacron.ts index 4f1449243..969cc6710 100644 --- a/test/integration/simulacron.js +++ b/test/integration/simulacron.ts @@ -13,14 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const helper = require('../test-helper'); -const http = require('http'); -const spawn = require('child_process').spawn; -const util = require('util'); -const fs = require('fs'); -const utils = require('../../lib/utils.js'); -const Client = require('../../lib/client.js'); +import helper from "../test-helper"; +import http from "http"; +import util from "util"; +import fs from "fs"; +import utils from "../../lib/utils"; +import Client from "../../lib/client"; +import { spawn } from "child_process"; + const simulacronHelper = { _execute: function(processName, params, cb) { @@ -573,4 +573,4 @@ SimulacronNode.prototype._filterLogs = function(data) { return data.data_centers[0].nodes[0].queries; }; -module.exports = simulacronHelper; +export default simulacronHelper; diff --git a/test/other/memory/basic-profile.js b/test/other/memory/basic-profile.ts similarity index 94% rename from test/other/memory/basic-profile.js rename to test/other/memory/basic-profile.ts index 75b452811..8e2ce967c 100644 --- a/test/other/memory/basic-profile.js +++ b/test/other/memory/basic-profile.ts @@ -13,9 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); + +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import cassandra from "../../../index"; +import utils from "../../../lib/utils"; + let heapdump; const heapdumpPath = '/var/log/nodejs-driver'; try { @@ -26,13 +30,8 @@ try { catch (e) { console.error('There was an error while trying to import heapdump', e); } - -const helper = require('../../test-helper.js'); -const cassandra = require('../../../index.js'); const Client = cassandra.Client; const types = cassandra.types; -const utils = require('../../../lib/utils'); - let client = new Client(utils.extend({ encoding: { copyBuffer: true}}, helper.baseOptions)); const keyspace = helper.getRandomName('ks'); const table = keyspace + '.' + helper.getRandomName('tbl'); diff --git a/test/other/memory/profile-keeping-ref.js b/test/other/memory/profile-keeping-ref.ts similarity index 94% rename from test/other/memory/profile-keeping-ref.js rename to test/other/memory/profile-keeping-ref.ts index 246be4d47..f59469aa9 100644 --- a/test/other/memory/profile-keeping-ref.js +++ b/test/other/memory/profile-keeping-ref.ts @@ -13,10 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import cassandra from "../../../index"; +import utils from "../../../lib/utils"; + + /* eslint-disable no-console, no-undef */ -const assert = require('assert'); -const util = require('util'); let heapdump; const heapdumpPath = '/var/log/nodejs-driver'; try { @@ -26,13 +30,8 @@ try { catch (e) { console.log(e); } - -const helper = require('../../test-helper.js'); -const cassandra = require('../../../index.js'); const Client = cassandra.Client; const types = cassandra.types; -const utils = require('../../../lib/utils'); - let client = new Client(utils.extend({ encoding: { copyBuffer: true}}, helper.baseOptions)); const keyspace = helper.getRandomName('ks'); const table = keyspace + '.' + helper.getRandomName('tbl'); diff --git a/test/unit/typescript/api-generation-test.ts b/test/other/typescript/api-generation-test.ts similarity index 92% rename from test/unit/typescript/api-generation-test.ts rename to test/other/typescript/api-generation-test.ts index cb6f94c5e..831e3a93a 100644 --- a/test/unit/typescript/api-generation-test.ts +++ b/test/other/typescript/api-generation-test.ts @@ -14,20 +14,18 @@ * limitations under the License. */ -import { auth, concurrent, errors, datastax, geometry, mapping, metadata, metrics, policies, tracker, types } from "../../../index"; -import * as root from "../../../index"; +import { auth, concurrent, errors, geometry, mapping, metadata, metrics, policies, tracker, types, graph } from "cassandra-driver"; +import * as root from "cassandra-driver"; -import graph = datastax.graph; +// import graph = datastax.graph; let counter:number = 0; /** * Should be executed to output a ts file: - * - pushd test/unit/typescript/ + * - pushd test/other/typescript/ + * - npm install * - tsc -p . - * - node -e "require('./api-generation-test').generate()" > generated.ts - * - tsc generated.ts - * - popd */ export function generate(): void { @@ -47,7 +45,7 @@ export function generate(): void { * limitations under the License. */ -'use strict'; + import { auth, concurrent, errors, datastax, mapping, geometry, metadata, metrics, policies, tracker, types } from "../../../index"; import * as root from "../../../index"; diff --git a/test/unit/typescript/client-tests.ts b/test/other/typescript/client-tests.ts similarity index 99% rename from test/unit/typescript/client-tests.ts rename to test/other/typescript/client-tests.ts index cb7edf537..cc7210742 100644 --- a/test/unit/typescript/client-tests.ts +++ b/test/other/typescript/client-tests.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { auth, Client, ExecutionProfile, policies, types } from "../../../index"; +import { auth, Client, ExecutionProfile, policies, types } from "cassandra-driver"; /* * TypeScript definitions compilation tests for Client class. diff --git a/test/other/typescript/generated.ts b/test/other/typescript/generated.ts new file mode 100644 index 000000000..793ac0295 --- /dev/null +++ b/test/other/typescript/generated.ts @@ -0,0 +1,254 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +import { auth, concurrent, errors, datastax, mapping, geometry, metadata, metrics, policies, tracker, types } from "../../../dist/cassandra-driver-public"; +import * as root from "../../../dist/cassandra-driver-public"; + +import graph = datastax.graph; + +export async function generatedFn() { + let n:number; + let s:string; + let o:object; + let f:Function; + + // root classes and interfaces + let c1: root.Client; + let c2: root.ExecutionProfile; + let c3: root.ExecutionOptions; + + // root namespaces/objects + o = root.types; + o = root.errors; + o = root.policies; + o = root.auth; + o = root.mapping; + o = root.tracker; + o = root.metrics; + o = root.concurrent; + o = root.metadata; + o = root.geometry; + o = root.datastax; + + // auth classes and interfaces + let c4: auth.Authenticator; + let c5: auth.AuthProvider; + let c6: auth.DseGssapiAuthProvider; + let c7: auth.DsePlainTextAuthProvider; + let c8: auth.PlainTextAuthProvider; + + // errors classes and interfaces + let c9: errors.ArgumentError; + let c10: errors.AuthenticationError; + let c11: errors.BusyConnectionError; + let c12: errors.DriverError; + let c13: errors.OperationTimedOutError; + let c14: errors.DriverInternalError; + let c15: errors.NoHostAvailableError; + let c16: errors.NotSupportedError; + let c17: errors.ResponseError; + let c18: errors.VIntOutOfRangeException; + + // concurrent static functions + f = concurrent.executeConcurrent; + + // concurrent classes and interfaces + let c19: concurrent.ResultSetGroup; + + // metadata classes and interfaces + let c20: metadata.Metadata; + + // metrics classes and interfaces + let c21: metrics.ClientMetrics; + let c22: metrics.DefaultMetrics; + + // tracker classes and interfaces + let c23: tracker.RequestLogger; + let c24: tracker.RequestTracker; + + // geometry classes and interfaces + let c25: geometry.LineString; + let c26: geometry.Point; + let c27: geometry.Polygon; + + // graph classes and interfaces + let c28: graph.Edge; + let c29: graph.Element; + let c30: graph.Path; + let c31: graph.Property; + let c32: graph.Vertex; + let c33: graph.VertexProperty; + let c34: graph.GraphResultSet; + + // types.dataTypes enum values + n = types.dataTypes.custom; + n = types.dataTypes.ascii; + n = types.dataTypes.bigint; + n = types.dataTypes.blob; + n = types.dataTypes.boolean; + n = types.dataTypes.counter; + n = types.dataTypes.decimal; + n = types.dataTypes.double; + n = types.dataTypes.float; + n = types.dataTypes.int; + n = types.dataTypes.text; + n = types.dataTypes.timestamp; + n = types.dataTypes.uuid; + n = types.dataTypes.varchar; + n = types.dataTypes.varint; + n = types.dataTypes.timeuuid; + n = types.dataTypes.inet; + n = types.dataTypes.date; + n = types.dataTypes.time; + n = types.dataTypes.smallint; + n = types.dataTypes.tinyint; + n = types.dataTypes.duration; + n = types.dataTypes.list; + n = types.dataTypes.map; + n = types.dataTypes.set; + n = types.dataTypes.udt; + n = types.dataTypes.tuple; + + // types.consistencies enum values + n = types.consistencies.any; + n = types.consistencies.one; + n = types.consistencies.two; + n = types.consistencies.three; + n = types.consistencies.quorum; + n = types.consistencies.all; + n = types.consistencies.localQuorum; + n = types.consistencies.eachQuorum; + n = types.consistencies.serial; + n = types.consistencies.localSerial; + n = types.consistencies.localOne; + + // types.protocolVersion enum values + n = types.protocolVersion.v1; + n = types.protocolVersion.v2; + n = types.protocolVersion.v3; + n = types.protocolVersion.v4; + n = types.protocolVersion.v5; + n = types.protocolVersion.v6; + n = types.protocolVersion.dseV1; + n = types.protocolVersion.dseV2; + n = types.protocolVersion.maxSupported; + n = types.protocolVersion.minSupported; + + // types.distance enum values + n = types.distance.local; + n = types.distance.remote; + n = types.distance.ignored; + + // types.responseErrorCodes enum values + n = types.responseErrorCodes.serverError; + n = types.responseErrorCodes.protocolError; + n = types.responseErrorCodes.badCredentials; + n = types.responseErrorCodes.unavailableException; + n = types.responseErrorCodes.overloaded; + n = types.responseErrorCodes.isBootstrapping; + n = types.responseErrorCodes.truncateError; + n = types.responseErrorCodes.writeTimeout; + n = types.responseErrorCodes.readTimeout; + n = types.responseErrorCodes.readFailure; + n = types.responseErrorCodes.functionFailure; + n = types.responseErrorCodes.writeFailure; + n = types.responseErrorCodes.syntaxError; + n = types.responseErrorCodes.unauthorized; + n = types.responseErrorCodes.invalid; + n = types.responseErrorCodes.configError; + n = types.responseErrorCodes.alreadyExists; + n = types.responseErrorCodes.unprepared; + n = types.responseErrorCodes.clientWriteFailure; + + o = types.unset; + + + // types classes and interfaces + let c35: types.BigDecimal; + let c36: types.Duration; + let c37: types.InetAddress; + let c38: types.Integer; + let c39: types.LocalDate; + let c40: types.LocalTime; + let c41: types.Long; + let c42: types.ResultSet; + let c43: types.ResultStream; + let c44: types.Row; + let c45: types.TimeUuid; + let c46: types.Tuple; + let c47: types.Vector; + let c48: types.Uuid; + + // policies.addressResolution classes and interfaces + let c49: policies.addressResolution.AddressTranslator; + let c50: policies.addressResolution.EC2MultiRegionTranslator; + + // policies.loadBalancing classes and interfaces + let c51: policies.loadBalancing.AllowListPolicy; + let c52: policies.loadBalancing.DCAwareRoundRobinPolicy; + let c53: policies.loadBalancing.DefaultLoadBalancingPolicy; + let c54: policies.loadBalancing.LoadBalancingPolicy; + let c55: policies.loadBalancing.RoundRobinPolicy; + let c56: policies.loadBalancing.TokenAwarePolicy; + let c57: policies.loadBalancing.WhiteListPolicy; + + // policies.reconnection classes and interfaces + let c58: policies.reconnection.ReconnectionPolicy; + let c59: policies.reconnection.ConstantReconnectionPolicy; + let c60: policies.reconnection.ExponentialReconnectionPolicy; + + // policies.retry classes and interfaces + let c61: policies.retry.IdempotenceAwareRetryPolicy; + let c62: policies.retry.FallthroughRetryPolicy; + let c63: policies.retry.RetryPolicy; + + // policies static functions + f = policies.defaultAddressTranslator; + f = policies.defaultLoadBalancingPolicy; + f = policies.defaultRetryPolicy; + f = policies.defaultReconnectionPolicy; + f = policies.defaultSpeculativeExecutionPolicy; + f = policies.defaultTimestampGenerator; + + // mapping classes and interfaces + let c64: mapping.Mapper; + let c65: mapping.ModelMapper; + let c66: mapping.ModelBatchMapper; + let c67: mapping.ModelBatchItem; + let c68: mapping.Result; + let c69: mapping.TableMappings; + let c70: mapping.DefaultTableMappings; + let c71: mapping.UnderscoreCqlToCamelCaseMappings; + + // mapping.q static functions + f = mapping.q.in_; + f = mapping.q.gt; + f = mapping.q.gte; + f = mapping.q.lt; + f = mapping.q.lte; + f = mapping.q.notEq; + f = mapping.q.and; + f = mapping.q.incr; + f = mapping.q.decr; + f = mapping.q.append; + f = mapping.q.prepend; + f = mapping.q.remove; + + +} + diff --git a/test/unit/typescript/graph-tests.ts b/test/other/typescript/graph-tests.ts similarity index 96% rename from test/unit/typescript/graph-tests.ts rename to test/other/typescript/graph-tests.ts index d75012684..673fce037 100644 --- a/test/unit/typescript/graph-tests.ts +++ b/test/other/typescript/graph-tests.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Client, datastax } from "../../../index"; +import { Client, datastax } from "cassandra-driver"; import GraphResultSet = datastax.graph.GraphResultSet; diff --git a/test/unit/typescript/mapping-tests.ts b/test/other/typescript/mapping-tests.ts similarity index 92% rename from test/unit/typescript/mapping-tests.ts rename to test/other/typescript/mapping-tests.ts index dc65dca9d..a3442e185 100644 --- a/test/unit/typescript/mapping-tests.ts +++ b/test/other/typescript/mapping-tests.ts @@ -14,11 +14,7 @@ * limitations under the License. */ -import { Client, mapping, types } from "../../../index"; -import Mapper = mapping.Mapper; -import ModelMapper = mapping.ModelMapper; -import Uuid = types.Uuid; -import Result = mapping.Result; +import { Client, Mapper, ModelMapper, Uuid, Result } from "cassandra-driver"; /* * TypeScript definitions compilation tests for types module. diff --git a/test/unit/typescript/metadata-tests.ts b/test/other/typescript/metadata-tests.ts similarity index 95% rename from test/unit/typescript/metadata-tests.ts rename to test/other/typescript/metadata-tests.ts index 92339528b..3a0578180 100644 --- a/test/unit/typescript/metadata-tests.ts +++ b/test/other/typescript/metadata-tests.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Client, Host, metadata, types } from "../../../index"; +import { Client, Host, metadata, types } from "../../../dist/cassandra-driver-public"; import TableMetadata = metadata.TableMetadata; import QueryTrace = metadata.QueryTrace; diff --git a/test/other/typescript/package-lock.json b/test/other/typescript/package-lock.json new file mode 100644 index 000000000..01af84d6f --- /dev/null +++ b/test/other/typescript/package-lock.json @@ -0,0 +1,52 @@ +{ + "name": "cassandra-driver-public-typescript-tests", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "cassandra-driver-public-typescript-tests", + "dependencies": { + "cassandra-driver": "file:../../../" + } + }, + "../../..": { + "version": "4.8.0", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "adm-zip": "~0.5.10", + "long": "~5.2.3" + }, + "devDependencies": { + "@microsoft/api-extractor": "^7.52.5", + "@rollup/plugin-typescript": "^12.1.2", + "@stylistic/eslint-plugin": "^3.1.0", + "@types/chai": "^5.2.1", + "@types/mocha": "^10.0.10", + "@types/sinon": "^17.0.4", + "@typescript-eslint/eslint-plugin": "^8.26.1", + "@typescript-eslint/parser": "^8.26.1", + "chai": "~4.3.8", + "eslint": "^8.57.1", + "kerberos": "~2.0.3", + "mocha": "~10.2.0", + "mocha-appveyor-reporter": "^0.4.2", + "mocha-jenkins-reporter": "^0.4.8", + "rollup": "^4.37.0", + "rollup-plugin-dts": "^6.2.1", + "sinon": "~15.2.0", + "temp": ">= 0.8.3", + "ts-mocha": "^11.1.0", + "tslib": "^2.8.1", + "typescript": "^5.8.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cassandra-driver": { + "resolved": "../../..", + "link": true + } + } +} diff --git a/test/other/typescript/package.json b/test/other/typescript/package.json new file mode 100644 index 000000000..2c0681cf4 --- /dev/null +++ b/test/other/typescript/package.json @@ -0,0 +1,6 @@ +{ + "name": "cassandra-driver-public-typescript-tests", + "dependencies": { + "cassandra-driver": "file:../../../" + } +} diff --git a/test/unit/typescript/policy-tests.ts b/test/other/typescript/policy-tests.ts similarity index 76% rename from test/unit/typescript/policy-tests.ts rename to test/other/typescript/policy-tests.ts index 1f0dfc192..30cd5df2e 100644 --- a/test/unit/typescript/policy-tests.ts +++ b/test/other/typescript/policy-tests.ts @@ -14,14 +14,9 @@ * limitations under the License. */ -import { policies } from "../../../index"; +import { policies, TokenAwarePolicy, ConstantReconnectionPolicy, ExponentialReconnectionPolicy, RetryPolicy, addressResolution, AddressTranslator } from "../../../dist/cassandra-driver-public"; import LoadBalancingPolicy = policies.loadBalancing.LoadBalancingPolicy; -import TokenAwarePolicy = policies.loadBalancing.TokenAwarePolicy; import ReconnectionPolicy = policies.reconnection.ReconnectionPolicy; -import RetryPolicy = policies.retry.RetryPolicy; -import ConstantReconnectionPolicy = policies.reconnection.ConstantReconnectionPolicy; -import ExponentialReconnectionPolicy = policies.reconnection.ExponentialReconnectionPolicy; -import addressResolution = policies.addressResolution; /* * TypeScript definitions compilation tests for policy module. @@ -49,5 +44,5 @@ function myTest(): void { retryPolicy = new RetryPolicy(); - let ar: addressResolution.AddressTranslator = new addressResolution.EC2MultiRegionTranslator(); + let ar: AddressTranslator = new addressResolution.EC2MultiRegionTranslator(); } \ No newline at end of file diff --git a/test/unit/typescript/tsconfig.json b/test/other/typescript/tsconfig.json similarity index 71% rename from test/unit/typescript/tsconfig.json rename to test/other/typescript/tsconfig.json index 199fc2492..0f667cb47 100644 --- a/test/unit/typescript/tsconfig.json +++ b/test/other/typescript/tsconfig.json @@ -4,7 +4,9 @@ "lib": ["es2015"], "target": "es2015", "sourceMap": false, - "strict": true + "strict": true, + "outDir": "out", + "skipLibCheck": true }, "exclude": [ "node_modules" diff --git a/test/unit/typescript/types-test.ts b/test/other/typescript/types-test.ts similarity index 82% rename from test/unit/typescript/types-test.ts rename to test/other/typescript/types-test.ts index 788a06c25..c8ed34b4f 100644 --- a/test/unit/typescript/types-test.ts +++ b/test/other/typescript/types-test.ts @@ -14,13 +14,19 @@ * limitations under the License. */ -import { types, Client } from "../../../index"; -import Uuid = types.Uuid; -import TimeUuid = types.TimeUuid; -import Long = types.Long; -import BigDecimal = types.BigDecimal; -import InetAddress = types.InetAddress; -import Tuple = types.Tuple; +import { types, Client } from "../../../dist/cassandra-driver-public"; +const Uuid = types.Uuid; +type Uuid = types.Uuid; +const TimeUuid = types.TimeUuid; +type TimeUuid = types.TimeUuid; +const Long = types.Long; +type Long = types.Long; +const BigDecimal = types.BigDecimal; +type BigDecimal = types.BigDecimal; +const InetAddress = types.InetAddress; +type InetAddress = types.InetAddress; +const Tuple = types.Tuple; +type Tuple = types.Tuple; import ResultSet = types.ResultSet; import Row = types.Row; diff --git a/test/test-helper.js b/test/test-helper.ts similarity index 98% rename from test/test-helper.js rename to test/test-helper.ts index 2a01576b4..a07c031d8 100644 --- a/test/test-helper.js +++ b/test/test-helper.ts @@ -13,28 +13,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -'use strict'; - -const { assert } = require('chai'); -const sinon = require('sinon'); -const util = require('util'); -const path = require('path'); -const policies = require('../lib/policies'); -const types = require('../lib/types'); -// const { types } = require('../lib/types'); -const utils = require('../lib/utils'); -const spawn = require('child_process').spawn; -const childProcessExec = require('child_process').exec; -const http = require('http'); -const temp = require('temp').track(true); -const Client = require('../lib/client'); -const defaultOptions = require('../lib/client-options').defaultOptions; -const { Host, HostMap } = require('../lib/host'); -const OperationState = require('../lib/operation-state'); -const promiseUtils = require('../lib/promise-utils'); +import { assert } from "chai"; +import { spawn } from "child_process"; +import http from "http"; +import path from "path"; +import sinon from "sinon"; +import Temp from "temp"; +import util from "util"; +import Client from "../lib/client"; +import { defaultOptions } from "../lib/client-options"; +import { Host, HostMap } from "../lib/host"; +import OperationState from "../lib/operation-state"; +import policies from "../lib/policies/index"; +import promiseUtils from "../lib/promise-utils"; +import types from "../lib/types/index"; +import utils from "../lib/utils"; +import { exec as childProcessExec } from "child_process"; + + + +const temp = Temp.track(true); const Vector = types.Vector; + util.inherits(RetryMultipleTimes, policies.retry.RetryPolicy); const cassandraVersionByDse = { @@ -351,7 +352,7 @@ const helper = { * @param {Client} client * @returns {Client} */ - shutdownAfterThisTest: function(client) { + shutdownAfterThisTest: function(client: Client) { this.afterThisTest(() => client.shutdown()); return client; @@ -380,7 +381,7 @@ const helper = { /** * @returns {Function} A function with a single callback param, applying the fn with parameters */ - toTask: function (fn, context) { + toTask: function (fn: Function, context) { const params = Array.prototype.slice.call(arguments, 2); return (function (next) { params.push(next); @@ -606,7 +607,7 @@ const helper = { isTracing: function () { return (process.env.TEST_TRACE === 'on'); }, - trace: function (format) { + trace: function (_format) { if (!helper.isTracing()) { return; } @@ -868,7 +869,7 @@ const helper = { }); return op; }, - prepareOnceAsync: function (q, ks) { + prepareOnceAsync: function (q, _ks) { return new Promise((resolve, reject) => { h.prepareCalled++; @@ -2039,9 +2040,10 @@ class OrderedLoadBalancingPolicy extends policies.loadBalancing.RoundRobinPolicy return callback(null, hosts[Symbol.iterator]()); } } +helper.RetryMultipleTimes = RetryMultipleTimes; +helper.OrderedLoadBalancingPolicy = OrderedLoadBalancingPolicy; +export default helper; +export { OrderedLoadBalancingPolicy, RetryMultipleTimes }; -module.exports = helper; -module.exports.RetryMultipleTimes = RetryMultipleTimes; -module.exports.OrderedLoadBalancingPolicy = OrderedLoadBalancingPolicy; diff --git a/test/unit/address-resolution-tests.js b/test/unit/address-resolution-tests.ts similarity index 90% rename from test/unit/address-resolution-tests.js rename to test/unit/address-resolution-tests.ts index b6297bd24..522d6dc37 100644 --- a/test/unit/address-resolution-tests.js +++ b/test/unit/address-resolution-tests.ts @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const dns = require('dns'); +import assert from "assert"; +import dns from "dns"; +import * as addressResolution from "../../lib/policies/address-resolution"; + -const addressResolution = require('../../lib/policies/address-resolution'); const EC2MultiRegionTranslator = addressResolution.EC2MultiRegionTranslator; describe('EC2MultiRegionTranslator', function () { diff --git a/test/unit/api-tests.js b/test/unit/api-tests.ts similarity index 96% rename from test/unit/api-tests.js rename to test/unit/api-tests.ts index b62f67638..e73712a95 100644 --- a/test/unit/api-tests.js +++ b/test/unit/api-tests.ts @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const { assert } = require('chai'); -const api = require('../../index'); -const auth = require('../../lib/auth'); -const helper = require('../test-helper'); +import { assert } from "chai"; +import api from "../../index"; +import auth from "../../lib/auth/index"; +import helper from "../test-helper"; + describe('API', function () { it('should expose auth module', function () { diff --git a/test/unit/basic-tests.js b/test/unit/basic-tests.ts similarity index 96% rename from test/unit/basic-tests.js rename to test/unit/basic-tests.ts index e1b181197..8c36688ff 100644 --- a/test/unit/basic-tests.js +++ b/test/unit/basic-tests.ts @@ -14,27 +14,26 @@ * limitations under the License. */ -"use strict"; -const { assert } = require('chai'); -const sinon = require('sinon'); -const util = require('util'); -const events = require('events'); - -const Client = require('../../lib/client'); -const clientOptions = require('../../lib/client-options'); -const auth = require('../../lib/auth'); -const types = require('../../lib/types'); -const { dataTypes } = types; -const loadBalancing = require('../../lib/policies/load-balancing'); -const retry = require('../../lib/policies/retry'); -const speculativeExecution = require('../../lib/policies/speculative-execution'); -const timestampGeneration = require('../../lib/policies/timestamp-generation'); -const Encoder = require('../../lib/encoder'); -const utils = require('../../lib/utils'); -const writers = require('../../lib/writers'); -const OperationState = require('../../lib/operation-state'); -const helper = require('../test-helper'); +import * as auth from '../../lib/auth/index'; +import { assert } from "chai"; +import sinon from "sinon"; +import util from "util"; +import events from "events"; +import Client from "../../lib/client"; +import clientOptions from "../../lib/client-options"; +import types from "../../lib/types/index"; +import loadBalancing from "../../lib/policies/load-balancing"; +import retry from "../../lib/policies/retry"; +import speculativeExecution from "../../lib/policies/speculative-execution"; +import timestampGeneration from "../../lib/policies/timestamp-generation"; +import Encoder from "../../lib/encoder"; +import utils from "../../lib/utils"; +import * as writers from "../../lib/writers"; +import OperationState from "../../lib/operation-state"; +import helper from "../test-helper"; + +const { dataTypes } = types; const contactPoints = ['a']; describe('types', function () { @@ -911,7 +910,7 @@ describe('exports', function () { //test that the exposed API is the one expected //it looks like a dumb test and it is, but it is necessary! /* eslint-disable global-require */ - const api = require('../../index.js'); + const api = require('../../index'); assert.strictEqual(api.Client, Client); assert.ok(api.errors); assert.strictEqual(typeof api.errors.DriverError, 'function'); @@ -931,7 +930,7 @@ describe('exports', function () { assert.strictEqual(typeof api.policies.retry.RetryPolicy, 'function'); assert.strictEqual(typeof api.policies.retry.IdempotenceAwareRetryPolicy, 'function'); assert.instanceOf(api.policies.defaultRetryPolicy(), api.policies.retry.RetryPolicy); - assert.strictEqual(api.policies.reconnection, require('../../lib/policies/reconnection')); + assertPackageExposed(api.policies.reconnection, require('../../lib/policies/reconnection')); assert.strictEqual(typeof api.policies.reconnection.ReconnectionPolicy, 'function'); assert.instanceOf(api.policies.defaultReconnectionPolicy(), api.policies.reconnection.ReconnectionPolicy); assert.strictEqual(api.policies.speculativeExecution, speculativeExecution); @@ -942,7 +941,7 @@ describe('exports', function () { assert.strictEqual(typeof timestampGeneration.TimestampGenerator, 'function'); assert.strictEqual(typeof timestampGeneration.MonotonicTimestampGenerator, 'function'); assert.instanceOf(api.policies.defaultTimestampGenerator(), timestampGeneration.MonotonicTimestampGenerator); - assert.strictEqual(api.auth, require('../../lib/auth')); + assertPackageExposed(api.auth, require('../../lib/auth')); // mapping module assert.ok(api.mapping); @@ -960,12 +959,12 @@ describe('exports', function () { //metadata module with classes assert.ok(api.metadata); assert.strictEqual(typeof api.metadata.Metadata, 'function'); - assert.strictEqual(api.metadata.Metadata, require('../../lib/metadata')); + assertPackageExposed(api.metadata, require('../../lib/metadata')); assert.ok(api.Encoder); assert.strictEqual(typeof api.Encoder, 'function'); - assert.strictEqual(api.Encoder, require('../../lib/encoder')); + assertPackageExposed(api.Encoder, require('../../lib/encoder')); assert.ok(api.defaultOptions()); - assert.strictEqual(api.tracker, require('../../lib/tracker')); + assertPackageExposed(api.tracker, require('../../lib/tracker')); assert.strictEqual(typeof api.tracker.RequestTracker, 'function'); assert.strictEqual(typeof api.tracker.RequestLogger, 'function'); @@ -986,4 +985,13 @@ function assertConstructorExposed(obj, constructorRef) { assert.strictEqual(typeof constructorRef, 'function'); // Verify that is exposed with the same name as the class assert.strictEqual(obj[constructorRef.name], constructorRef); +} + +function assertPackageExposed(actual, expected){ + for (const key of Object.keys(expected)){ + if(key === 'default'){ + continue; + } + assert.strictEqual(actual[key], expected[key]); + } } \ No newline at end of file diff --git a/test/unit/big-decimal-tests.js b/test/unit/big-decimal-tests.ts similarity index 97% rename from test/unit/big-decimal-tests.js rename to test/unit/big-decimal-tests.ts index beace400f..2c8ae9fcf 100644 --- a/test/unit/big-decimal-tests.js +++ b/test/unit/big-decimal-tests.ts @@ -13,11 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import assert from "assert"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; -const assert = require('assert'); -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); describe('BigDecimal', function () { const BigDecimal = types.BigDecimal; diff --git a/test/unit/client-tests.js b/test/unit/client-tests.ts similarity index 92% rename from test/unit/client-tests.js rename to test/unit/client-tests.ts index 807d16ee8..fe96e96fe 100644 --- a/test/unit/client-tests.js +++ b/test/unit/client-tests.ts @@ -13,28 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const { assert } = require('chai'); -const util = require('util'); -const proxyquire = require('proxyquire'); -const sinon = require('sinon'); - -const Client = require('../../lib/client'); -const policies = require('../../lib/policies'); -const helper = require('../test-helper'); -const errors = require('../../lib/errors'); -const utils = require('../../lib/utils'); -const types = require('../../lib/types'); -const HostMap = require('../../lib/host').HostMap; -const Host = require('../../lib/host').Host; -const Metadata = require('../../lib/metadata'); -const Encoder = require('../../lib/encoder'); -const ProfileManager = require('../../lib/execution-profile').ProfileManager; -const ExecutionProfile = require('../../lib/execution-profile').ExecutionProfile; -const clientOptions = require('../../lib/client-options'); -const PrepareHandler = require('../../lib/prepare-handler'); - +import { assert } from "chai"; +import util from "util"; +import sinon from "sinon"; +import Client from "../../lib/client"; +import policies from "../../lib/policies/index"; +import helper from "../test-helper"; +import errors from "../../lib/errors"; +import utils from "../../lib/utils"; +import types from "../../lib/types/index"; +import Metadata from "../../lib/metadata/index"; +import Encoder from "../../lib/encoder"; +import clientOptions from "../../lib/client-options"; +import * as PrepareHandlerAggregate from "../../lib/prepare-handler"; +import {Host, HostMap} from "../../lib/host"; +import { ExecutionProfile, ProfileManager } from "../../lib/execution-profile"; +import * as RequestHandlerAggregate from "../../lib/request-handler"; +import PrepareHandler from "../../lib/prepare-handler"; + +const stubs : any[] = []; describe('Client', function () { + afterEach(function () { + stubs.forEach(stub => stub.restore()); + stubs.length = 0; + }); describe('constructor', function () { it('should throw an exception when contactPoints are not provided', function () { assert.throws(function () { @@ -163,26 +165,24 @@ describe('Client', function () { it('should connect once and queue if multiple calls in parallel', function (done) { let initCounter = 0; let emitCounter = 0; + const options = utils.extend({ contactPoints: helper.baseOptions.contactPoints, policies: { - loadBalancing: new policies.loadBalancing.RoundRobinPolicy() + loadBalancing: new policies.loadBalancing.RoundRobinPolicy(), }, pooling: { - warmup: false - } + warmup: false, + }, }); const ccMock = getControlConnectionMock(null, options); - ccMock.prototype.init = async () => { + ccMock.init = async () => { initCounter++; // Async await helper.delayAsync(20); }; - - const Client = proxyquire('../../lib/client.js', { - './control-connection': ccMock - }); const client = new Client(options); + sinon.stub(client, 'controlConnection').value(ccMock); client.on('connected', function () {emitCounter++;}); utils.times(1000, function (n, next) { client.connect(function (err) { @@ -548,11 +548,8 @@ describe('Client', function () { send: () => Promise.resolve() }; - const Client = proxyquire('../../lib/client.js', { - './request-handler': requestHandlerMock - }); - it('should internally call to connect', function (done) { + const stub = sinon.stub(RequestHandlerAggregate, 'default').value(requestHandlerMock); const client = new Client(helper.baseOptions); const connect = sinon.fake(() => Promise.resolve()); sinon.replace(client, '_connect', connect); @@ -560,6 +557,7 @@ describe('Client', function () { client.batch(['q1'], function (err) { assert.ifError(err); assert.isTrue(connect.calledOnce); + stub.restore(); done(); }); }); @@ -593,20 +591,24 @@ describe('Client', function () { }); context('with no callback specified', function () { it('should return a promise', function (done) { + const stub = sinon.stub(RequestHandlerAggregate, 'default').value(requestHandlerMock); const client = new Client(helper.baseOptions); const p = client.batch(['Q'], null); assert.instanceOf(p, Promise); p.catch(function (err) { assert.instanceOf(err, errors.NoHostAvailableError); + stub.restore(); done(); }); }); it('should reject the promise when queries is not an Array', function (done) { + const stub = sinon.stub(RequestHandlerAggregate, 'default').value(requestHandlerMock); const client = new Client(helper.baseOptions); const p = client.batch('Q', null); assert.instanceOf(p, Promise); p.catch(function (err) { assert.instanceOf(err, errors.ArgumentError); + stub.restore(); done(); }); }); @@ -648,11 +650,8 @@ describe('Client', function () { hosts.push(h1.address, h1); hosts.push(h2.address, h2); - const Client = proxyquire('../../lib/client', { - './control-connection': getControlConnectionMock(hosts) - }); - const client = new Client(options); + sinon.stub(client, 'controlConnection').value(getControlConnectionMock(hosts)); client.shutdown(function(){ assert.equal(client.connected, false); done(); @@ -669,10 +668,8 @@ describe('Client', function () { h2.pool.connections = getConnections(); hosts.push(h1.address, h1); hosts.push(h2.address, h2); - const Client = proxyquire('../../lib/client', { - './control-connection': getControlConnectionMock(hosts) - }); const client = new Client(options); + sinon.stub(client, 'controlConnection').value(getControlConnectionMock(hosts)); utils.series([ client.connect.bind(client), function shutDownMultiple(seriesNext) { @@ -693,11 +690,8 @@ describe('Client', function () { h2.pool.connections = getConnections(); hosts.push(h1.address, h1); hosts.push(h2.address, h2); - const Client = proxyquire('../../lib/client', { - './control-connection': getControlConnectionMock(hosts) - }); - const client = new Client(options); + sinon.stub(client, 'controlConnection').value(getControlConnectionMock(hosts)); utils.series([ client.connect.bind(client), function shutDownMultiple(seriesNext) { @@ -749,7 +743,7 @@ describe('Client', function () { client.hosts = { length: 5 }; let calls = 0; client.metadata = { - compareSchemaVersions: (c) => { + compareSchemaVersions: (_c) => { process.nextTick(() => clock.tick(500)); return Promise.resolve(++calls === 3); } @@ -765,7 +759,7 @@ describe('Client', function () { })); client.hosts = { length: 5 }; client.metadata = { - compareSchemaVersions: sinon.fake(c => Promise.resolve(false)) + compareSchemaVersions: sinon.fake(_c => Promise.resolve(false)) }; process.nextTick(() => clock.tick(5000)); @@ -779,7 +773,7 @@ describe('Client', function () { client.hosts = {length: 3}; const dummyError = new Error('dummy error'); client.metadata = { - compareSchemaVersions: c => Promise.reject(dummyError) + compareSchemaVersions: _c => Promise.reject(dummyError) }; const err = await helper.assertThrowsAsync(client._waitForSchemaAgreement(null)); @@ -788,18 +782,16 @@ describe('Client', function () { }); }); -function getControlConnectionMock(hosts, options) { - function ControlConnectionMock() { - this.hosts = hosts || new HostMap(); - this.metadata = new Metadata(options || {}); - this.profileManager = newProfileManager(options); - this.host = { setDistance: utils.noop }; - this.shutdown = utils.noop; - } - - ControlConnectionMock.prototype.init = async () => {}; - - return ControlConnectionMock; +function getControlConnectionMock(hosts, options?) { + return { + hosts: hosts || new HostMap(), + metadata: new Metadata(options || {}), + profileManager: newProfileManager(options), + host: { setDistance: utils.noop }, + shutdown: utils.noop, + init: async () => {}, + reset: async () => {} + }; } function getOptions(options) { @@ -811,10 +803,8 @@ function newProfileManager(options) { } function newConnectedInstance(requestHandlerMock, options, prepareHandlerMock) { - const Client = proxyquire('../../lib/client', { - './request-handler': requestHandlerMock || function () {}, - './prepare-handler': prepareHandlerMock || function () {} - }); + stubs.push(sinon.stub(RequestHandlerAggregate, 'default').value(requestHandlerMock || function () {})); + stubs.push(sinon.stub(PrepareHandlerAggregate, 'default').value(prepareHandlerMock || function () {})); const client = new Client(utils.extend({}, helper.baseOptions, options)); client._getEncoder = () => new Encoder(2, {}); diff --git a/test/unit/cloud/certificate-validation-tests.js b/test/unit/cloud/certificate-validation-tests.ts similarity index 96% rename from test/unit/cloud/certificate-validation-tests.js rename to test/unit/cloud/certificate-validation-tests.ts index 46c94bf78..fd4b02ad2 100644 --- a/test/unit/cloud/certificate-validation-tests.js +++ b/test/unit/cloud/certificate-validation-tests.ts @@ -13,11 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import { checkServerIdentity } from "../../../lib/datastax/cloud/index"; -'use strict'; - -const { assert } = require('chai'); -const { checkServerIdentity } = require('../../../lib/datastax/cloud'); describe('checkServerIdentity()', () => { const port = 32598; diff --git a/test/unit/concurrent/execute-concurrent-tests.js b/test/unit/concurrent/execute-concurrent-tests.ts similarity index 98% rename from test/unit/concurrent/execute-concurrent-tests.js rename to test/unit/concurrent/execute-concurrent-tests.ts index add4cb179..1f34bf32b 100644 --- a/test/unit/concurrent/execute-concurrent-tests.js +++ b/test/unit/concurrent/execute-concurrent-tests.ts @@ -14,14 +14,12 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const Readable = require('stream').Readable; -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils'); -const helper = require('../../test-helper'); -const executeConcurrent = require('../../../lib/concurrent').executeConcurrent; +import assert from "assert"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import helper from "../../test-helper"; +import { Readable } from "stream"; +import { executeConcurrent } from "../../../lib/concurrent"; describe('executeConcurrent(client, query, parameters)', function () { this.timeout(10000); diff --git a/test/unit/connection-tests.js b/test/unit/connection-tests.ts similarity index 90% rename from test/unit/connection-tests.js rename to test/unit/connection-tests.ts index d3100ad3e..f93d1475f 100644 --- a/test/unit/connection-tests.js +++ b/test/unit/connection-tests.ts @@ -13,20 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const EventEmitter = require('events'); -const proxyquire = require('proxyquire'); -const sinon = require('sinon'); -const Connection = require('../../lib/connection'); -const requests = require('../../lib/requests'); -const defaultOptions = require('../../lib/client-options').defaultOptions(); -const utils = require('../../lib/utils'); -const errors = require('../../lib/errors'); -const ExecutionOptions = require('../../lib/execution-options').ExecutionOptions; -const helper = require('../test-helper'); +import assert from "assert"; +import EventEmitter from "events"; +import sinon from "sinon"; +import Connection from "../../lib/connection"; +import requests from "../../lib/requests"; +import utils from "../../lib/utils"; +import errors from "../../lib/errors"; +import helper from "../test-helper"; +import ClientOptions from "../../lib/client-options"; +import { ExecutionOptions } from "../../lib/execution-options"; +import net from "net"; +const defaultOptions = ClientOptions.defaultOptions(); describe('Connection', function () { describe('constructor', function () { it('should parse host endpoint into address and port', function () { @@ -215,9 +215,9 @@ describe('Connection', function () { } } - const ConnectionInjected = proxyquire('../../lib/connection', { 'net': { Socket } }); + const stub = sinon.stub(net, 'Socket').value(Socket); - const c = new ConnectionInjected('127.0.0.1:9042', 9042, utils.extend({}, defaultOptions)); + const c = new Connection('127.0.0.1:9042', 9042, utils.extend({}, defaultOptions)); c.logEmitter = helper.noop; c.sendStream = function (r, o, cb) { cb(null, {}); @@ -234,6 +234,7 @@ describe('Connection', function () { c.close(function (err) { assert.ifError(err); assert.strictEqual(closeEmitted, 1); + stub.restore(); done(); }); }); @@ -249,8 +250,8 @@ describe('Connection', function () { } } - const ConnectionInjected = proxyquire('../../lib/connection', { 'net': { Socket } }); - const c = new ConnectionInjected('127.0.0.1:9042', 9042, utils.extend({}, defaultOptions)); + const stub = sinon.stub(net, 'Socket').value(Socket); + const c = new Connection('127.0.0.1:9042', 9042, utils.extend({}, defaultOptions)); c.logEmitter = helper.noop; c.sendStream = function (r, o, cb) { cb(null, {}); @@ -268,6 +269,7 @@ describe('Connection', function () { c.close(function (err) { assert.ifError(err); assert.strictEqual(closeEmitted, 1); + stub.restore(); done(); }); }); diff --git a/test/unit/control-connection-tests.js b/test/unit/control-connection-tests.ts similarity index 80% rename from test/unit/control-connection-tests.js rename to test/unit/control-connection-tests.ts index e4b690169..431340b12 100644 --- a/test/unit/control-connection-tests.js +++ b/test/unit/control-connection-tests.ts @@ -13,24 +13,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const { assert } = require('chai'); -const events = require('events'); -const proxyquire = require('proxyquire'); -const util = require('util'); - -const helper = require('../test-helper.js'); -const ControlConnection = require('../../lib/control-connection'); -const Host = require('../../lib/host').Host; -const utils = require('../../lib/utils'); -const Metadata = require('../../lib/metadata'); -const types = require('../../lib/types'); -const errors = require('../../lib/errors'); -const policies = require('../../lib/policies'); -const clientOptions = require('../../lib/client-options'); -const ProfileManager = require('../../lib/execution-profile').ProfileManager; +import { assert } from "chai"; +import sinon, { SinonStub } from "sinon"; +import dns from "dns"; +import events from "events"; +import util from "util"; +import helper from "../test-helper"; +import ControlConnection from "../../lib/control-connection"; +import utils from "../../lib/utils"; +import Metadata from "../../lib/metadata/index"; +import types from "../../lib/types/index"; +import errors from "../../lib/errors"; +import policies from "../../lib/policies/index"; +import clientOptions from "../../lib/client-options"; +import { Host } from "../../lib/host"; +import { ProfileManager } from "../../lib/execution-profile"; + +const stubs : SinonStub[] = []; describe('ControlConnection', function () { + afterEach(function () { + stubs.forEach(s => s.restore()); + stubs.length = 0; + }); describe('constructor', function () { it('should create a new metadata instance', function () { const cc = new ControlConnection(clientOptions.extend({}, helper.baseOptions)); @@ -42,14 +47,14 @@ describe('ControlConnection', function () { const localhost = 'localhost'; - async function testResolution(CcMock, expectedHosts, expectedResolved, hostName) { + async function testResolution(expectedHosts, expectedResolved?, hostName?) { if (!expectedResolved) { expectedResolved = expectedHosts; } const contactPointHostName = (hostName || localhost); const state = {}; - const cc = new CcMock(clientOptions.extend({ contactPoints: [contactPointHostName] }), null, getContext({ + const cc = new ControlConnection(clientOptions.extend({ contactPoints: [contactPointHostName] }), null, getContext({ failBorrow: 10, state })); @@ -78,106 +83,108 @@ describe('ControlConnection', function () { } it('should resolve IPv4 and IPv6 addresses, default host (localhost) and port', () => { - const ControlConnectionMock = proxyquire('../../lib/control-connection', { dns: { - resolve4: function (name, cb) { - cb(null, ifLocalhost(name, ['127.0.0.1'])); - }, - resolve6: function (name, cb) { - cb(null, ifLocalhost(name, ['::1'])); - }, - lookup: function () { - throw new Error('dns.lookup() should not be used'); - } - }}); - - return testResolution(ControlConnectionMock, + stubs.push(sinon.stub(dns, 'resolve4').callsFake((name, cb) => { + cb(null, ifLocalhost(name, ['127.0.0.1'])); + } + )); + stubs.push(sinon.stub(dns, 'resolve6').callsFake((name, cb) => { + cb(null, ifLocalhost(name, ['::1'])); + }) + ); + stubs.push(sinon.stub(dns, 'lookup').callsFake(() => { + throw new Error('dns.lookup() should not be used'); + }) + ); + + return testResolution( [ '127.0.0.1:9042', '::1:9042' ], [ '127.0.0.1:9042', '[::1]:9042' ]); }); it('should resolve IPv4 and IPv6 addresses with non default port', () => { - const ControlConnectionMock = proxyquire('../../lib/control-connection', { dns: { - resolve4: function (name, cb) { - cb(null, ifLocalhost(name, ['127.0.0.1'])); - }, - resolve6: function (name, cb) { - cb(null, ifLocalhost(name, ['::1'])); - }, - lookup: function () { - throw new Error('dns.lookup() should not be used'); - } - }}); + stubs.push(sinon.stub(dns, 'resolve4').callsFake((name, cb) => { + cb(null, ifLocalhost(name, ['127.0.0.1'])); + } + )); + stubs.push(sinon.stub(dns, 'resolve6').callsFake((name, cb) => { + cb(null, ifLocalhost(name, ['::1'])); + } + )); + stubs.push(sinon.stub(dns, 'lookup').callsFake(() => { + throw new Error('dns.lookup() should not be used'); + }) + ); - return testResolution(ControlConnectionMock, + return testResolution( [ '127.0.0.1:9999', '::1:9999' ], [ '127.0.0.1:9999', '[::1]:9999' ], 'localhost:9999'); }); it('should resolve all IPv4 and IPv6 addresses provided by dns.resolve()', () => { - const ControlConnectionMock = proxyquire('../../lib/control-connection', { dns: { - resolve4: function (name, cb) { - cb(null, ['1', '2']); - }, - resolve6: function (name, cb) { - cb(null, ['10', '20']); - }, - lookup: function () { - throw new Error('dns.lookup() should not be used'); - } - }}); + stubs.push(sinon.stub(dns, 'resolve4').callsFake((name, cb) => { + cb(null, ifLocalhost(name, ['1', '2'])); + } + )); + stubs.push(sinon.stub(dns, 'resolve6').callsFake((name, cb) => { + cb(null, ifLocalhost(name, ['10', '20'])); + } + )); + stubs.push(sinon.stub(dns, 'lookup').callsFake(() => { + throw new Error('dns.lookup() should not be used'); + }) + ); - return testResolution(ControlConnectionMock, + return testResolution( [ '1:9042', '2:9042', '10:9042', '20:9042' ], [ '1:9042', '2:9042', '[10]:9042', '[20]:9042' ]); }); it('should ignore IPv4 or IPv6 resolution errors', function () { - const ControlConnectionMock = proxyquire('../../lib/control-connection', { dns: { - resolve4: function (name, cb) { - cb(null, ['1', '2']); - }, - resolve6: function (name, cb) { - cb(new Error('Test error')); - }, - lookup: function () { - throw new Error('dns.lookup() should not be used'); - } - }}); - - return testResolution(ControlConnectionMock, [ '1:9042', '2:9042']); + stubs.push(sinon.stub(dns, 'resolve4').callsFake((name, cb) => { + cb(null, ['1', '2']); + } + )); + stubs.push(sinon.stub(dns, 'resolve6').callsFake((name, cb) => { + cb(new Error('Test error')); + } + )); + stubs.push(sinon.stub(dns, 'lookup').callsFake(() => { + throw new Error('dns.lookup() should not be used'); + }) + ); + return testResolution([ '1:9042', '2:9042']); }); it('should use dns.lookup() as failover', () => { - const ControlConnectionMock = proxyquire('../../lib/control-connection', { dns: { - resolve4: function (name, cb) { - cb(new Error('Test error')); - }, - resolve6: function (name, cb) { - cb(new Error('Test error')); - }, - lookup: function (name, options, cb) { - cb(null, [{ address: '123', family: 4 }]); - } - }}); + stubs.push(sinon.stub(dns, 'resolve4').callsFake((name, cb) => { + cb(new Error('Test error')); + } + )); + stubs.push(sinon.stub(dns, 'resolve6').callsFake((name, cb) => { + cb(new Error('Test error')); + } + )); + stubs.push(sinon.stub(dns, 'lookup').callsFake((name, options, cb) => { + cb(null, [{ address: '123', family: 4 }]); + }) + ); - return testResolution(ControlConnectionMock, [ '123:9042' ]); + return testResolution([ '123:9042' ]); }); it('should use dns.lookup() when no address was resolved', () => { - const ControlConnectionMock = proxyquire('../../lib/control-connection', { dns: { - resolve4: function (name, cb) { - cb(null); - }, - resolve6: function (name, cb) { - cb(null, []); - }, - lookup: function (name, options, cb) { - cb(null, [{ address: '1234', family: 4 }]); - } - }}); + stubs.push(sinon.stub(dns, 'resolve4').callsFake((name, cb) => { + cb(null); + })); + stubs.push(sinon.stub(dns, 'resolve6').callsFake((name, cb) => { + cb(null, []); + })); + stubs.push(sinon.stub(dns, 'lookup').callsFake((name, options, cb) => { + cb(null, [{ address: '1234', family: 4 }]); + })); - return testResolution(ControlConnectionMock, [ '1234:9042' ]); + return testResolution([ '1234:9042' ]); }); it('should continue iterating through the hosts when borrowing a connection fails',async () => { @@ -281,23 +288,23 @@ describe('ControlConnection', function () { const hostname = 'my-host-name'; const resolvedAddresses = ['1','2']; - const ControlConnectionMock = proxyquire('../../lib/control-connection', { dns: { - resolve4: function (name, cb) { - if (dnsWorks) { - cb(null, resolvedAddresses); - } - else { - cb(null, []); - } - }, - resolve6: function (name, cb) { - throw new Error('IPv6 resolution errors should be ignored'); - }, - lookup: function () { - throw new Error('dns.lookup() should not be used'); + stubs.push(sinon.stub(dns, 'resolve4').callsFake((name, cb) => { + if (dnsWorks) { + cb(null, resolvedAddresses); + } + else { + cb(null, []); } - }}); - const cc = new ControlConnectionMock( + } + )); + stubs.push(sinon.stub(dns, 'resolve6').callsFake((_name, _cb) => { + throw new Error('IPv6 resolution errors should be ignored'); + })); + stubs.push(sinon.stub(dns, 'lookup').callsFake(() => { + throw new Error('dns.lookup() should not be used'); + })); + + const cc = new ControlConnection( clientOptions.extend({ contactPoints: [hostname] }), null, getContext()); diff --git a/test/unit/default-load-balancing-policy-tests.js b/test/unit/default-load-balancing-policy-tests.ts similarity index 97% rename from test/unit/default-load-balancing-policy-tests.js rename to test/unit/default-load-balancing-policy-tests.ts index 315bc7d7e..ab5080a36 100644 --- a/test/unit/default-load-balancing-policy-tests.js +++ b/test/unit/default-load-balancing-policy-tests.ts @@ -13,19 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const { assert } = require('chai'); -const util = require('util'); -const helper = require('../test-helper'); -const policies = require('../../lib/policies'); -const clientOptions = require('../../lib/client-options'); -const { Host, HostMap } = require('../../lib/host'); -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); -const { ExecutionOptions } = require('../../lib/execution-options'); -const errors = require('../../lib/errors'); -const Client = require('../../lib/client'); +import { assert } from "chai"; +import util from "util"; +import helper from "../test-helper"; +import policies from "../../lib/policies/index"; +import clientOptions from "../../lib/client-options"; +import { Host, HostMap } from "../../lib/host"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import { ExecutionOptions } from "../../lib/execution-options"; +import errors from "../../lib/errors"; +import Client from "../../lib/client"; + const { loadBalancing } = policies; const { DefaultLoadBalancingPolicy } = loadBalancing; diff --git a/test/unit/dse-gssapi-auth-provider-tests.js b/test/unit/dse-gssapi-auth-provider-tests.ts similarity index 93% rename from test/unit/dse-gssapi-auth-provider-tests.js rename to test/unit/dse-gssapi-auth-provider-tests.ts index 9af2bcdfd..c6c52efcb 100644 --- a/test/unit/dse-gssapi-auth-provider-tests.js +++ b/test/unit/dse-gssapi-auth-provider-tests.ts @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const DseGssapiAuthProvider = require('../../lib/auth/dse-gssapi-auth-provider'); -const helper = require('../test-helper'); -const utils = require('../../lib/utils'); + +import DseGssapiAuthProvider from '../../lib/auth/dse-gssapi-auth-provider'; +import assert from "assert"; +import helper from "../test-helper"; +import utils from "../../lib/utils"; const cDescribe = helper.conditionalDescribe(helper.requireOptional('kerberos'), 'kerberos required to run'); const dseAuthenticatorName = 'com.datastax.bdp.cassandra.auth.DseAuthenticator'; diff --git a/test/unit/dse-plain-text-auth-provider-tests.js b/test/unit/dse-plain-text-auth-provider-tests.ts similarity index 82% rename from test/unit/dse-plain-text-auth-provider-tests.js rename to test/unit/dse-plain-text-auth-provider-tests.ts index 50fa67582..29a89e5fe 100644 --- a/test/unit/dse-plain-text-auth-provider-tests.js +++ b/test/unit/dse-plain-text-auth-provider-tests.ts @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const Authenticator = require('../../lib/auth/provider').Authenticator; -const DsePlainTextAuthProvider = require('../../lib/auth/dse-plain-text-auth-provider'); + +import { Authenticator } from '../../lib/auth/provider'; +import DsePlainTextAuthProvider from '../../lib/auth/dse-plain-text-auth-provider'; +import assert from "assert"; describe('DsePlainTextAuthProvider', function () { describe('#newAuthenticator()', function () { diff --git a/test/unit/duration-type-tests.js b/test/unit/duration-type-tests.ts similarity index 96% rename from test/unit/duration-type-tests.js rename to test/unit/duration-type-tests.ts index 151977a73..92b7d912b 100644 --- a/test/unit/duration-type-tests.js +++ b/test/unit/duration-type-tests.ts @@ -13,11 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); +import assert from "assert"; +import util from "util"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; + + const Duration = types.Duration; const Long = types.Long; diff --git a/test/unit/encoder-tests.js b/test/unit/encoder-tests.ts similarity index 98% rename from test/unit/encoder-tests.js rename to test/unit/encoder-tests.ts index 354dcba91..8ae86b52c 100644 --- a/test/unit/encoder-tests.js +++ b/test/unit/encoder-tests.ts @@ -13,20 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const { assert } = require('chai'); -const sinon = require('sinon'); -const util = require('util'); -const utils = require('../../lib/utils'); -const tokenizer = require('../../lib/tokenizer'); -const token = require('../../lib/token'); -const Vector = require('../../lib/types/vector'); -const Encoder = require('../../lib/encoder'); -const { types } = require('../../index.js'); -const ExecutionOptions = require('../../lib/execution-options').ExecutionOptions; -const dataTypes = types.dataTypes; -const helper = require('../test-helper'); +import { assert } from "chai"; +import sinon from "sinon"; +import util from "util"; +import utils from "../../lib/utils"; +import tokenizer from "../../lib/tokenizer"; +import token from "../../lib/token"; +import Vector from "../../lib/types/vector"; +import Encoder from "../../lib/encoder"; +import { types } from "../../index"; +import helper from "../test-helper"; +import { ExecutionOptions } from "../../lib/execution-options"; + +const dataTypes = types.dataTypes; const zeroLengthTypesSupported = new Set([ dataTypes.text, dataTypes.ascii, @@ -1359,16 +1359,6 @@ describe('encoder', function () { assert.strictEqual(typeof encoder.protocolVersion, 'number'); }); }); - - describe('prototype', function () { - it('should only expose encode() and decode() functions', function () { - const keys = Object.keys(Encoder.prototype); - assert.deepStrictEqual(keys, ['decode', 'encode']); - keys.forEach(function (k) { - assert.strictEqual(typeof Encoder.prototype[k], 'function'); - }); - }); - }); }); function getExecOptions(options) { diff --git a/test/unit/encoder-vector-tests.js b/test/unit/encoder-vector-tests.ts similarity index 93% rename from test/unit/encoder-vector-tests.js rename to test/unit/encoder-vector-tests.ts index 4b3cc3569..f68c2a323 100644 --- a/test/unit/encoder-vector-tests.js +++ b/test/unit/encoder-vector-tests.ts @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const { assert, util } = require('chai'); -const Encoder = require('../../lib/encoder'); -const { types } = require('../../index'); -const Vector = require('../../lib/types/vector'); -const helper = require('../test-helper'); +import { assert, util } from "chai"; +import Encoder from "../../lib/encoder"; +import { types } from "../../index"; +import Vector from "../../lib/types/vector"; +import helper from "../test-helper"; + describe('Vector tests', function () { const encoder = new Encoder(4, {}); diff --git a/test/unit/error-tests.js b/test/unit/error-tests.ts similarity index 93% rename from test/unit/error-tests.js rename to test/unit/error-tests.ts index de49d96dc..e8c457e2d 100644 --- a/test/unit/error-tests.js +++ b/test/unit/error-tests.ts @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import assert from "assert"; +import path from "path"; +import errors from "../../lib/errors"; +import helper from "../test-helper"; + -const assert = require('assert'); -const path = require('path'); -const errors = require('../../lib/errors'); -const helper = require('../test-helper'); const fileName = path.basename(__filename); describe('DriverError', function () { diff --git a/test/unit/event-debouncer-tests.js b/test/unit/event-debouncer-tests.ts similarity index 97% rename from test/unit/event-debouncer-tests.js rename to test/unit/event-debouncer-tests.ts index d2363e1ab..c76198995 100644 --- a/test/unit/event-debouncer-tests.js +++ b/test/unit/event-debouncer-tests.ts @@ -13,13 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import { assert } from "chai"; +import sinon from "sinon"; +import helper from "../test-helper"; +import EventDebouncer from "../../lib/metadata/event-debouncer"; -const { assert } = require('chai'); -const sinon = require('sinon'); - -const helper = require('../test-helper'); -const EventDebouncer = require('../../lib/metadata/event-debouncer'); describe('EventDebouncer', function () { describe('timeoutElapsed()', function () { diff --git a/test/unit/execution-options-tests.js b/test/unit/execution-options-tests.ts similarity index 94% rename from test/unit/execution-options-tests.js rename to test/unit/execution-options-tests.ts index 16dbdc03e..c5a049665 100644 --- a/test/unit/execution-options-tests.js +++ b/test/unit/execution-options-tests.ts @@ -14,15 +14,13 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const utils = require('../../lib/utils'); -const types = require('../../lib/types'); -const helper = require('../test-helper'); -const DefaultExecutionOptions = require('../../lib/execution-options').DefaultExecutionOptions; -const ExecutionProfile = require('../../lib/execution-profile').ExecutionProfile; -const defaultOptions = require('../../lib/client-options').defaultOptions; +import assert from "assert"; +import utils from "../../lib/utils"; +import types from "../../lib/types/index"; +import helper from "../test-helper"; +import { DefaultExecutionOptions } from "../../lib/execution-options"; +import { ExecutionProfile } from "../../lib/execution-profile"; +import { defaultOptions } from "../../lib/client-options"; describe('DefaultExecutionOptions', () => { describe('create()', () => { diff --git a/test/unit/execution-profile-tests.js b/test/unit/execution-profile-tests.ts similarity index 90% rename from test/unit/execution-profile-tests.js rename to test/unit/execution-profile-tests.ts index 7b393829d..fb75b5602 100644 --- a/test/unit/execution-profile-tests.js +++ b/test/unit/execution-profile-tests.ts @@ -13,14 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); - -const clientOptions = require('../../lib/client-options'); -const ExecutionProfile = require('../../lib/execution-profile').ExecutionProfile; -const ProfileManager = require('../../lib/execution-profile').ProfileManager; -const types = require('../../lib/types'); +import assert from "assert"; +import clientOptions from "../../lib/client-options"; +import types from "../../lib/types/index"; +import {ExecutionProfile} from "../../lib/execution-profile"; +import {ProfileManager} from "../../lib/execution-profile"; describe('ProfileManager', function () { describe('constructor', function () { diff --git a/test/unit/geometry/line-string-tests.js b/test/unit/geometry/line-string-tests.ts similarity index 95% rename from test/unit/geometry/line-string-tests.js rename to test/unit/geometry/line-string-tests.ts index 6dfaaa18e..6f4043850 100644 --- a/test/unit/geometry/line-string-tests.js +++ b/test/unit/geometry/line-string-tests.ts @@ -13,13 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../../test-helper'); -const utils = require('../../../lib/utils'); -const Point = require('../../../lib/geometry/point'); -const moduleName = '../../../lib/geometry/line-string'; -const LineString = require(moduleName); +import assert from "assert"; +import helper from "../../test-helper"; +import utils from "../../../lib/utils"; +import Point from "../../../lib/geometry/point"; +import { LineString } from "../../../lib/geometry"; + describe('LineString', function () { describe('constructor', function () { diff --git a/test/unit/geometry/point-tests.js b/test/unit/geometry/point-tests.ts similarity index 95% rename from test/unit/geometry/point-tests.js rename to test/unit/geometry/point-tests.ts index 2c509462d..709e56153 100644 --- a/test/unit/geometry/point-tests.js +++ b/test/unit/geometry/point-tests.ts @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../../test-helper'); -const utils = require('../../../lib/utils'); -const moduleName = '../../../lib/geometry/point'; -const Point = require(moduleName); + +import assert from "assert"; +import helper from "../../test-helper"; +import utils from "../../../lib/utils"; +import Point from "../../../lib/geometry/point"; describe('Point', function () { describe('constructor', function () { diff --git a/test/unit/geometry/polygon-tests.js b/test/unit/geometry/polygon-tests.ts similarity index 96% rename from test/unit/geometry/polygon-tests.js rename to test/unit/geometry/polygon-tests.ts index 7e62956db..66c413166 100644 --- a/test/unit/geometry/polygon-tests.js +++ b/test/unit/geometry/polygon-tests.ts @@ -13,13 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../../test-helper'); -const utils = require('../../../lib/utils'); -const Point = require('../../../lib/geometry/point'); -const moduleName = '../../../lib/geometry/polygon'; -const Polygon = require(moduleName); + +import assert from "assert"; +import helper from "../../test-helper"; +import utils from "../../../lib/utils"; +import Point from "../../../lib/geometry/point"; +import { Polygon } from "../../../lib/geometry"; describe('Polygon', function () { describe('constructor', function () { diff --git a/test/unit/graph/executor-tests.js b/test/unit/graph/executor-tests.ts similarity index 98% rename from test/unit/graph/executor-tests.js rename to test/unit/graph/executor-tests.ts index 439c13440..cf44be624 100644 --- a/test/unit/graph/executor-tests.js +++ b/test/unit/graph/executor-tests.ts @@ -14,17 +14,15 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const Client = require('../../../lib/client'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils'); -const policies = require('../../../lib/policies'); -const ExecutionProfile = require('../../../lib/execution-profile').ExecutionProfile; -const GraphExecutor = require('../../../lib/datastax/graph/graph-executor'); -const { GraphExecutionOptions, graphProtocol } = require('../../../lib/datastax/graph/options'); -const helper = require('../../test-helper'); +import assert from "assert"; +import Client from "../../../lib/client"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import policies from "../../../lib/policies/index"; +import GraphExecutor from "../../../lib/datastax/graph/graph-executor"; +import { GraphExecutionOptions, graphProtocol } from "../../../lib/datastax/graph/options"; +import helper from "../../test-helper"; +import { ExecutionProfile } from "../../../lib/execution-profile"; const proxyExecuteKey = 'ProxyExecute'; diff --git a/test/unit/graph/graph-result-tests.js b/test/unit/graph/graph-result-tests.ts similarity index 95% rename from test/unit/graph/graph-result-tests.js rename to test/unit/graph/graph-result-tests.ts index 0ae6f53c2..e69983c44 100644 --- a/test/unit/graph/graph-result-tests.js +++ b/test/unit/graph/graph-result-tests.ts @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import assert from "assert"; +import utils from "../../../lib/utils"; +import ResultSet from "../../../lib/types/result-set"; +import GraphResultSet from "../../../lib/datastax/graph/result-set"; -const assert = require('assert'); -const utils = require('../../../lib/utils'); -const ResultSet = require('../../../lib/types/result-set'); -const GraphResultSet = require('../../../lib/datastax/graph/result-set'); const resultVertex = getResultSet([ { "gremlin": JSON.stringify({ diff --git a/test/unit/graph/graphson-tests.js b/test/unit/graph/graphson-tests.ts similarity index 92% rename from test/unit/graph/graphson-tests.js rename to test/unit/graph/graphson-tests.ts index a7485db82..1e8138f3f 100644 --- a/test/unit/graph/graphson-tests.js +++ b/test/unit/graph/graphson-tests.ts @@ -13,16 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import { assert } from "chai"; +import helper from "../../test-helper"; +import { GraphSON2Reader, GraphSON3Reader, GraphSON3Writer } from "../../../lib/datastax/graph/graph-serializer"; +import getCustomTypeSerializers from "../../../lib/datastax/graph/custom-type-serializers"; +import graphModule from "../../../lib/datastax/graph/index"; +import types from "../../../lib/types/index"; +import utils from "../../../lib/utils"; +import geometry from "../../../lib/geometry/index"; + -const { assert } = require('chai'); -const helper = require('../../test-helper'); -const { GraphSON2Reader, GraphSON3Reader, GraphSON3Writer } = require('../../../lib/datastax/graph/graph-serializer'); -const getCustomTypeSerializers = require('../../../lib/datastax/graph/custom-type-serializers'); -const graphModule = require('../../../lib/datastax/graph'); -const types = require('../../../lib/types'); -const utils = require('../../../lib/utils'); -const geometry = require('../../../lib/geometry'); const { Tuple } = types; const { asInt, asDouble, asTimestamp } = graphModule; @@ -41,7 +41,7 @@ describe('GraphSON2Reader', function () { [ 'dse:Blob', Buffer, buffer.toString('base64'), buffer ], [ 'dse:Point', geometry.Point, new geometry.Point(1, 2.1)], [ 'dse:LineString', geometry.LineString, geometry.LineString.fromString('LINESTRING (1 1, 2 2, 3 3)')], - [ 'dse:Polygon', geometry.Polygon, new geometry.Polygon.fromString('POLYGON ((3 1, 4 4, 2 4, 1 2, 3 1))')] + [ 'dse:Polygon', geometry.Polygon, geometry.Polygon.fromString('POLYGON ((3 1, 4 4, 2 4, 1 2, 3 1))')] ].forEach(function (item) { it('should read ' + item[0], function () { const obj = { diff --git a/test/unit/host-tests.js b/test/unit/host-tests.ts similarity index 97% rename from test/unit/host-tests.js rename to test/unit/host-tests.ts index ccf90e540..8b3c2b314 100644 --- a/test/unit/host-tests.js +++ b/test/unit/host-tests.ts @@ -13,25 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import { assert } from "chai"; +import sinon from "sinon"; +import util from "util"; +import events from "events"; +import hostModule from "../../lib/host"; +import HostConnectionPool from "../../lib/host-connection-pool"; +import Metadata from "../../lib/metadata/index"; +import types from "../../lib/types/index"; +import clientOptions from "../../lib/client-options"; +import utils from "../../lib/utils"; +import policies from "../../lib/policies/index"; +import helper from "../test-helper"; -const { assert } = require('chai'); -const sinon = require('sinon'); -const util = require('util'); -const events = require('events'); -const hostModule = require('../../lib/host'); const Host = hostModule.Host; -const HostConnectionPool = require('../../lib/host-connection-pool'); -const Metadata = require('../../lib/metadata'); const HostMap = hostModule.HostMap; -const types = require('../../lib/types'); -const clientOptions = require('../../lib/client-options'); const defaultOptions = clientOptions.defaultOptions(); defaultOptions.pooling.coreConnectionsPerHost = clientOptions.coreConnectionsPerHostV3; -const utils = require('../../lib/utils.js'); -const policies = require('../../lib/policies'); -const helper = require('../test-helper'); const reconnection = policies.reconnection; describe('HostConnectionPool', function () { diff --git a/test/unit/inet-address-tests.js b/test/unit/inet-address-tests.ts similarity index 97% rename from test/unit/inet-address-tests.js rename to test/unit/inet-address-tests.ts index 509f99237..64bc9ef5a 100644 --- a/test/unit/inet-address-tests.js +++ b/test/unit/inet-address-tests.ts @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../test-helper'); -const utils = require('../../lib/utils'); -const InetAddress = require('../../lib/types').InetAddress; + +import assert from "assert"; +import helper from "../test-helper"; +import utils from "../../lib/utils"; +import { InetAddress } from "../../lib/types"; describe('InetAddress', function () { describe('constructor', function () { diff --git a/test/unit/insights-client-tests.js b/test/unit/insights-client-tests.ts similarity index 96% rename from test/unit/insights-client-tests.js rename to test/unit/insights-client-tests.ts index 6ea176af7..1570eb941 100644 --- a/test/unit/insights-client-tests.js +++ b/test/unit/insights-client-tests.ts @@ -14,20 +14,18 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const os = require('os'); -const Client = require('../../lib/client'); -const ClientState = require('../../lib/metadata/client-state'); -const InsightsClient = require('../../lib/insights-client'); -const ExecutionProfile = require('../../lib/execution-profile').ExecutionProfile; -const utils = require('../../lib/utils'); -const types = require('../../lib/types'); -const policies = require('../../lib/policies'); -const coreConnectionsPerHostV3 = require('../../lib/client-options').coreConnectionsPerHostV3; -const packageInfo = require('../../package.json'); -const helper = require('../test-helper'); +import assert from "assert"; +import os from "os"; +import Client from "../../lib/client"; +import ClientState from "../../lib/metadata/client-state"; +import InsightsClient from "../../lib/insights-client"; +import utils from "../../lib/utils"; +import types from "../../lib/types/index"; +import policies from "../../lib/policies/index"; +import packageInfo from "../../package.json"; +import helper from "../test-helper"; +import { ExecutionProfile } from "../../lib/execution-profile"; +import { coreConnectionsPerHostV3 } from "../../lib/client-options"; const kerberosModule = helper.requireOptional('kerberos'); const kerberosDescribe = kerberosModule ? describe : xdescribe; @@ -381,7 +379,7 @@ describe('InsightsClient', function () { const statusEventDelay = 100; const client = getClient({ - rpcCallback: m => messages++ + rpcCallback: _m => messages++ }); const insights = new InsightsClient(client, { @@ -453,7 +451,7 @@ function getClient(options) { const client = new Client(helper.getOptions(clientOptions)); // Provide a fake control connection - client.controlConnection.query = (request, w) => { + client.controlConnection.query = (request, _w) => { rpcCallback(request.params[0]); const err = errorGetter(); diff --git a/test/unit/license-tests.js b/test/unit/license-tests.ts similarity index 95% rename from test/unit/license-tests.js rename to test/unit/license-tests.ts index 0a6c794b8..ca4f17d9b 100644 --- a/test/unit/license-tests.js +++ b/test/unit/license-tests.ts @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require("assert"); -const path = require("path"); -const fs = require("fs"); +import assert from "assert"; +import path from "path"; +import fs from "fs"; + const licenseHeaderRegex = new RegExp( `/\\* @@ -45,7 +45,7 @@ describe('All source files', function() { // Files to capture and validate header on. const candidateRE = /.*\.(js|ts)$/; // List of directories to ignore, this may not be comprehensive depending on your local workspace. - const dirsToIgnoreRE = /(node_modules)|(\.git)|(\.idea)|(coverage)|(out)|(examples)/; + const dirsToIgnoreRE = /(node_modules)|(\.git)|(\.idea)|(coverage)|(out)|(examples)|(dist)/; const filesToIgnoreRE = /([\\/]test[\\/]unit[\\/]typescript[\\/].*\.js)|(integer\.js)|(generated\.(?:js|ts))|(lib[\\/]datastax[\\/]graph[\\/](?:graph-serializer|type-serializers)\.js)/; function validateLicenses(dir) { diff --git a/test/unit/load-balancing-tests.js b/test/unit/load-balancing-tests.ts similarity index 97% rename from test/unit/load-balancing-tests.js rename to test/unit/load-balancing-tests.ts index 524649287..7e1b87fb2 100644 --- a/test/unit/load-balancing-tests.js +++ b/test/unit/load-balancing-tests.ts @@ -13,19 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); +import assert from "assert"; +import helper from "../test-helper"; +import errors from "../../lib/errors"; +import Client from "../../lib/client"; +import clientOptions from "../../lib/client-options"; +import { Host, HostMap } from "../../lib/host"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import { ExecutionOptions } from "../../lib/execution-options"; +import { AllowListPolicy, DCAwareRoundRobinPolicy, LoadBalancingPolicy, RoundRobinPolicy, TokenAwarePolicy } from "../../lib/policies/load-balancing"; -const helper = require('../test-helper.js'); -const errors = require('../../lib/errors'); -const Client = require('../../lib/client.js'); -const clientOptions = require('../../lib/client-options'); -const { Host, HostMap } = require('../../lib/host'); -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); -const { ExecutionOptions } = require('../../lib/execution-options'); -const { AllowListPolicy, LoadBalancingPolicy, TokenAwarePolicy, RoundRobinPolicy, DCAwareRoundRobinPolicy } = - require('../../lib/policies/load-balancing'); describe('RoundRobinPolicy', function () { it('should yield an error when the hosts are not set', function(done) { diff --git a/test/unit/mapping/cache-tests.js b/test/unit/mapping/cache-tests.ts similarity index 97% rename from test/unit/mapping/cache-tests.js rename to test/unit/mapping/cache-tests.ts index 2d62b0652..1bac26837 100644 --- a/test/unit/mapping/cache-tests.js +++ b/test/unit/mapping/cache-tests.ts @@ -14,11 +14,9 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const Cache = require('../../../lib/mapping/cache'); -const q = require('../../../lib/mapping/q').q; +import assert from "assert"; +import Cache from "../../../lib/mapping/cache"; +import {q} from "../../../lib/mapping/q"; describe('Cache', function() { this.timeout(5000); diff --git a/test/unit/mapping/mapper-tests.js b/test/unit/mapping/mapper-tests.ts similarity index 96% rename from test/unit/mapping/mapper-tests.js rename to test/unit/mapping/mapper-tests.ts index 9bbf50351..0c3c46440 100644 --- a/test/unit/mapping/mapper-tests.js +++ b/test/unit/mapping/mapper-tests.ts @@ -13,15 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import sinon from "sinon"; +import Mapper from "../../../lib/mapping/mapper"; +import ModelMapper from "../../../lib/mapping/model-mapper"; +import helper from "../../test-helper"; +import mapperTestHelper from "./mapper-unit-test-helper"; -'use strict'; - -const { assert } = require('chai'); -const sinon = require('sinon'); -const Mapper = require('../../../lib/mapping/mapper'); -const ModelMapper = require('../../../lib/mapping/model-mapper'); -const helper = require('../../test-helper'); -const mapperTestHelper = require('./mapper-unit-test-helper'); describe('Mapper', () => { describe('constructor', () => { diff --git a/test/unit/mapping/mapper-unit-test-helper.js b/test/unit/mapping/mapper-unit-test-helper.ts similarity index 96% rename from test/unit/mapping/mapper-unit-test-helper.js rename to test/unit/mapping/mapper-unit-test-helper.ts index 9167d20e8..32c234629 100644 --- a/test/unit/mapping/mapper-unit-test-helper.js +++ b/test/unit/mapping/mapper-unit-test-helper.ts @@ -14,16 +14,14 @@ * limitations under the License. */ -'use strict'; +import assert from "assert"; +import types from "../../../lib/types/index"; +import ModelMapper from "../../../lib/mapping/model-mapper"; +import Mapper from "../../../lib/mapping/mapper"; -const assert = require('assert'); -const types = require('../../../lib/types'); -const ModelMapper = require('../../../lib/mapping/model-mapper'); const ResultSet = types.ResultSet; const dataTypes = types.dataTypes; -const Mapper = require('../../../lib/mapping/mapper'); - -const mapperHelper = module.exports = { +const mapperHelper = { /** * Gets a fake client instance that returns metadata for a single table * @param {Array|Function} columns @@ -172,3 +170,4 @@ const mapperHelper = module.exports = { } }; +export default mapperHelper; \ No newline at end of file diff --git a/test/unit/mapping/mapping-handler-tests.js b/test/unit/mapping/mapping-handler-tests.ts similarity index 92% rename from test/unit/mapping/mapping-handler-tests.js rename to test/unit/mapping/mapping-handler-tests.ts index 64b39850e..6260df53e 100644 --- a/test/unit/mapping/mapping-handler-tests.js +++ b/test/unit/mapping/mapping-handler-tests.ts @@ -14,14 +14,12 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const mapperTestHelper = require('./mapper-unit-test-helper'); -const MappingHandler = require('../../../lib/mapping/mapping-handler'); -const ModelMappingInfo = require('../../../lib/mapping/model-mapping-info'); -const DefaultTableMappings = require('../../../lib/mapping/table-mappings').DefaultTableMappings; -const q = require('../../../lib/mapping/q').q; +import assert from "assert"; +import mapperTestHelper from "./mapper-unit-test-helper"; +import MappingHandler from "../../../lib/mapping/mapping-handler"; +import ModelMappingInfo from "../../../lib/mapping/model-mapping-info"; +import { DefaultTableMappings } from "../../../lib/mapping/table-mappings"; +import { q } from "../../../lib/mapping/q"; describe('MappingHandler', () => { describe('#getSelectExecutor()', () => { diff --git a/test/unit/mapping/model-mapper-mutation-tests.js b/test/unit/mapping/model-mapper-mutation-tests.ts similarity index 98% rename from test/unit/mapping/model-mapper-mutation-tests.js rename to test/unit/mapping/model-mapper-mutation-tests.ts index d28846c51..108b928ca 100644 --- a/test/unit/mapping/model-mapper-mutation-tests.js +++ b/test/unit/mapping/model-mapper-mutation-tests.ts @@ -14,14 +14,11 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const q = require('../../../lib/mapping/q').q; -const types = require('../../../lib/types'); -const helper = require('../../test-helper'); -const mapperTestHelper = require('./mapper-unit-test-helper'); -const dataTypes = types.dataTypes; +import assert from "assert"; +import { q } from "../../../lib/mapping/q"; +import { dataTypes } from "../../../lib/types/index"; +import helper from "../../test-helper"; +import mapperTestHelper from "./mapper-unit-test-helper"; describe('ModelMapper', () => { describe('#insert()', () => { diff --git a/test/unit/mapping/model-mapper-select-tests.js b/test/unit/mapping/model-mapper-select-tests.ts similarity index 98% rename from test/unit/mapping/model-mapper-select-tests.js rename to test/unit/mapping/model-mapper-select-tests.ts index a40f23e83..59eab03a5 100644 --- a/test/unit/mapping/model-mapper-select-tests.js +++ b/test/unit/mapping/model-mapper-select-tests.ts @@ -14,13 +14,11 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const q = require('../../../lib/mapping/q').q; -const dataTypes = require('../../../lib/types').dataTypes; -const helper = require('../../test-helper'); -const mapperTestHelper = require('./mapper-unit-test-helper'); +import assert from "assert"; +import helper from "../../test-helper"; +import mapperTestHelper from "./mapper-unit-test-helper"; +import { q } from "../../../lib/mapping/q"; +import { dataTypes } from "../../../lib/types"; const emptyResponse = { meta: { columns: [] }, rows: [] }; diff --git a/test/unit/mapping/model-mapping-info-tests.js b/test/unit/mapping/model-mapping-info-tests.ts similarity index 95% rename from test/unit/mapping/model-mapping-info-tests.js rename to test/unit/mapping/model-mapping-info-tests.ts index bffbc9a8d..223f32d59 100644 --- a/test/unit/mapping/model-mapping-info-tests.js +++ b/test/unit/mapping/model-mapping-info-tests.ts @@ -13,12 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import ModelMappingInfo from "../../../lib/mapping/model-mapping-info"; -'use strict'; - -const { assert } = require('chai'); - -const ModelMappingInfo = require('../../../lib/mapping/model-mapping-info'); describe('ModelMappingInfo', function () { describe('parse()', function () { diff --git a/test/unit/mapping/result-mapper-tests.js b/test/unit/mapping/result-mapper-tests.ts similarity index 94% rename from test/unit/mapping/result-mapper-tests.js rename to test/unit/mapping/result-mapper-tests.ts index b06758601..e60a4ba9c 100644 --- a/test/unit/mapping/result-mapper-tests.js +++ b/test/unit/mapping/result-mapper-tests.ts @@ -13,13 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { assert } from "chai"; +import sinon from "sinon"; +import ResultMapper from "../../../lib/mapping/result-mapper"; -'use strict'; - -const { assert } = require('chai'); -const sinon = require('sinon'); - -const ResultMapper = require('../../../lib/mapping/result-mapper'); describe('ResultMapper', function () { describe('getSelectAdapter()', function () { diff --git a/test/unit/mapping/result-tests.js b/test/unit/mapping/result-tests.ts similarity index 95% rename from test/unit/mapping/result-tests.js rename to test/unit/mapping/result-tests.ts index 70d47906b..a5aab7de9 100644 --- a/test/unit/mapping/result-tests.js +++ b/test/unit/mapping/result-tests.ts @@ -13,12 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import Result from "../../../lib/mapping/result"; +import util from "util"; -'use strict'; - -const assert = require('assert'); -const Result = require('../../../lib/mapping/result'); -const util = require('util'); const expected = [ { id: 1, name: 'name1', adapted: true }, { id: 2, name: 'name2', adapted: true }]; diff --git a/test/unit/mapping/table-mappings-tests.js b/test/unit/mapping/table-mappings-tests.ts similarity index 92% rename from test/unit/mapping/table-mappings-tests.js rename to test/unit/mapping/table-mappings-tests.ts index 369374002..f01202b3d 100644 --- a/test/unit/mapping/table-mappings-tests.js +++ b/test/unit/mapping/table-mappings-tests.ts @@ -13,11 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import tableMappingsModule from "../../../lib/mapping/table-mappings"; -'use strict'; -const assert = require('assert'); -const tableMappingsModule = require('../../../lib/mapping/table-mappings'); const UnderscoreCqlToCamelCaseMappings = tableMappingsModule.UnderscoreCqlToCamelCaseMappings; describe('UnderscoreCqlToCamelCaseMappings', () => { diff --git a/test/unit/mapping/tree-tests.js b/test/unit/mapping/tree-tests.ts similarity index 97% rename from test/unit/mapping/tree-tests.js rename to test/unit/mapping/tree-tests.ts index 2eb1282c8..4cab9a3ba 100644 --- a/test/unit/mapping/tree-tests.js +++ b/test/unit/mapping/tree-tests.ts @@ -13,11 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import assert from "assert"; +import Tree from "../../../lib/mapping/tree"; -'use strict'; - -const assert = require('assert'); -const Tree = require('../../../lib/mapping/tree'); describe('Tree', function () { this.timeout(20000); diff --git a/test/unit/metadata-tests.js b/test/unit/metadata-tests.ts similarity index 99% rename from test/unit/metadata-tests.js rename to test/unit/metadata-tests.ts index f15aa59e3..6c95aae57 100644 --- a/test/unit/metadata-tests.js +++ b/test/unit/metadata-tests.ts @@ -13,29 +13,26 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -'use strict'; - -const { assert } = require('chai'); -const sinon = require('sinon'); -const events = require('events'); - -const helper = require('../test-helper.js'); -const clientOptions = require('../../lib/client-options.js'); -const Host = require('../../lib/host.js').Host; -const HostMap = require('../../lib/host').HostMap; -const Metadata = require('../../lib/metadata'); -const TableMetadata = require('../../lib/metadata/table-metadata'); -const Murmur3Token = require('../../lib/token').Murmur3Token; -const TokenRange = require('../../lib/token').TokenRange; -const tokenizer = require('../../lib/tokenizer'); -const types = require('../../lib/types'); -const MutableLong = require('../../lib/types/mutable-long'); +import { assert } from "chai"; +import sinon from "sinon"; +import events from "events"; +import helper from "../test-helper"; +import clientOptions from "../../lib/client-options"; +import Metadata from "../../lib/metadata/index"; +import TableMetadata from "../../lib/metadata/table-metadata"; +import tokenizer from "../../lib/tokenizer"; +import types from "../../lib/types/index"; +import MutableLong from "../../lib/types/mutable-long"; +import utils from "../../lib/utils"; +import errors from "../../lib/errors"; +import Encoder from "../../lib/encoder"; + + +import { Host, HostMap } from "../../lib/host"; +import { Murmur3Token, TokenRange } from "../../lib/token"; +import SchemaParser from "../../lib/metadata/schema-parser"; const dataTypes = types.dataTypes; -const utils = require('../../lib/utils'); -const errors = require('../../lib/errors'); -const Encoder = require('../../lib/encoder'); -const isDoneForToken = require('../../lib/metadata/schema-parser').isDoneForToken; +const isDoneForToken = SchemaParser.isDoneForToken; describe('Metadata', function () { this.timeout(5000); @@ -901,7 +898,7 @@ describe('Metadata', function () { }; let calls = 0; - const cc = getControlConnectionForResponse(r => { + const cc = getControlConnectionForResponse(_r => { //try with empty result and null duration let rows = []; if (++calls > 1) { @@ -1793,7 +1790,7 @@ describe('Metadata', function () { {"keyspace_name":"ks_udf","function_name":"plus","signature":["bigint","bigint"],"argument_names":["s","v"],"argument_types":["org.apache.cassandra.db.marshal.LongType","org.apache.cassandra.db.marshal.LongType"],"body":"return s+v;","called_on_null_input":false,"language":"java","return_type":"org.apache.cassandra.db.marshal.LongType"} ]; let called = 0; - const cc = getControlConnectionForResponse(q => { + const cc = getControlConnectionForResponse(_q => { if (called++ < 5) { return {rows: []}; } @@ -1828,7 +1825,7 @@ describe('Metadata', function () { const rows = [ {"keyspace_name":"ks_udf","function_name":"plus","signature":["bigint","bigint"],"argument_names":["s","v"],"argument_types":["org.apache.cassandra.db.marshal.LongType","org.apache.cassandra.db.marshal.LongType"],"body":"return s+v;","called_on_null_input":false,"language":"java","return_type":"org.apache.cassandra.db.marshal.LongType"} ]; - const cc = getControlConnectionForResponse(q => { + const cc = getControlConnectionForResponse(_q => { if (called++ < 5) { return new Error('Dummy error'); } @@ -2168,7 +2165,7 @@ describe('Metadata', function () { {"keyspace_name":"ks_udf","aggregate_name":"sum","signature":["bigint"],"argument_types":["org.apache.cassandra.db.marshal.LongType"],"final_func":null,"initcond":utils.allocBufferFromArray([0,0,0,0,0,0,0,0]),"return_type":"org.apache.cassandra.db.marshal.LongType","state_func":"plus","state_type":"org.apache.cassandra.db.marshal.LongType"} ]; - const cc = getControlConnectionForResponse(q => { + const cc = getControlConnectionForResponse(_q => { if (called++ < 5) { return { rows: [] }; } @@ -2204,7 +2201,7 @@ describe('Metadata', function () { const rows = [ {"keyspace_name":"ks_udf","aggregate_name":"sum","signature":["bigint"],"argument_types":["org.apache.cassandra.db.marshal.LongType"],"final_func":null,"initcond":utils.allocBufferFromArray([0,0,0,0,0,0,0,0]),"return_type":"org.apache.cassandra.db.marshal.LongType","state_func":"plus","state_type":"org.apache.cassandra.db.marshal.LongType"} ]; - const cc = getControlConnectionForResponse(q => { + const cc = getControlConnectionForResponse(_q => { if (called++ < 5) { return new Error('Dummy'); } diff --git a/test/unit/mutable-long-tests.js b/test/unit/mutable-long-tests.ts similarity index 97% rename from test/unit/mutable-long-tests.js rename to test/unit/mutable-long-tests.ts index 8bbb32fc9..736f8b76c 100644 --- a/test/unit/mutable-long-tests.js +++ b/test/unit/mutable-long-tests.ts @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -"use strict"; -const assert = require('assert'); -const format = require('util').format; -const Long = require('long'); -const MutableLong = require('../../lib/types/mutable-long'); +import assert from "assert"; +import Long from "long"; +import MutableLong from "../../lib/types/mutable-long"; +import { format } from "util"; describe('MutableLong', function () { describe('fromNumber() and #toNumber()', function () { diff --git a/test/unit/parser-tests.js b/test/unit/parser-tests.ts similarity index 99% rename from test/unit/parser-tests.js rename to test/unit/parser-tests.ts index b0d21dd86..d30f1e722 100644 --- a/test/unit/parser-tests.js +++ b/test/unit/parser-tests.ts @@ -13,16 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const events = require('events'); +import assert from "assert"; +import events from "events"; +import Encoder from "../../lib/encoder"; +import streams from "../../lib/streams"; +import errors from "../../lib/errors"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import helper from "../test-helper"; -const Encoder = require('../../lib/encoder'); -const streams = require('../../lib/streams'); -const errors = require('../../lib/errors'); -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); -const helper = require('../test-helper'); /** * Tests for the transform streams that are involved in the reading of a response diff --git a/test/unit/prepare-handler-tests.js b/test/unit/prepare-handler-tests.ts similarity index 94% rename from test/unit/prepare-handler-tests.js rename to test/unit/prepare-handler-tests.ts index 1a2afb1a3..8e172b906 100644 --- a/test/unit/prepare-handler-tests.js +++ b/test/unit/prepare-handler-tests.ts @@ -13,15 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const { assert } = require('chai'); -const events = require('events'); -const helper = require('../test-helper'); -const PrepareHandler = require('../../lib/prepare-handler'); -const defaultOptions = require('../../lib/client-options').defaultOptions; -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); + +import { assert } from "chai"; +import events from "events"; +import helper from "../test-helper"; +import PrepareHandler from "../../lib/prepare-handler"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import { defaultOptions } from "../../lib/client-options"; describe('PrepareHandler', function () { diff --git a/test/unit/protocol-stream-tests.js b/test/unit/protocol-stream-tests.ts similarity index 96% rename from test/unit/protocol-stream-tests.js rename to test/unit/protocol-stream-tests.ts index 254a25ed1..b4f681407 100644 --- a/test/unit/protocol-stream-tests.js +++ b/test/unit/protocol-stream-tests.ts @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const Protocol = require('../../lib/streams').Protocol; -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); +import assert from "assert"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import {Protocol} from "../../lib/streams"; describe('Protocol', function () { it('should emit a single frame with 0-length body', function (done) { diff --git a/test/unit/protocol-version-tests.js b/test/unit/protocol-version-tests.ts similarity index 94% rename from test/unit/protocol-version-tests.js rename to test/unit/protocol-version-tests.ts index 780cd99a4..8953ac064 100644 --- a/test/unit/protocol-version-tests.js +++ b/test/unit/protocol-version-tests.ts @@ -13,12 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const protocolVersion = require('../../lib/types/').protocolVersion; -const Host = require('../../lib/host').Host; -const options = require('../../lib/client-options').defaultOptions(); +import assert from "assert"; +import {protocolVersion} from "../../lib/types"; +import {Host} from "../../lib/host"; +import clientOptions from "../../lib/client-options"; + +const options = clientOptions.defaultOptions(); describe('protocolVersion', function () { describe('#getHighestCommon()', function () { diff --git a/test/unit/reconnection-test.js b/test/unit/reconnection-test.ts similarity index 95% rename from test/unit/reconnection-test.js rename to test/unit/reconnection-test.ts index 4eeadd9d1..69b5760bb 100644 --- a/test/unit/reconnection-test.js +++ b/test/unit/reconnection-test.ts @@ -13,13 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -//project modules -const utils = require('../../lib/utils'); -const helper = require('../test-helper'); -const reconnection = require('../../lib/policies/reconnection'); +import assert from "assert"; +import utils from "../../lib/utils"; +import helper from "../test-helper"; +import reconnection from "../../lib/policies/reconnection"; + +//project modules describe('ConstantReconnectionPolicy', function () { it('should yield the same wait time', function (done) { const delay = 2000; diff --git a/test/unit/request-handler-tests.js b/test/unit/request-handler-tests.ts similarity index 96% rename from test/unit/request-handler-tests.js rename to test/unit/request-handler-tests.ts index 62875e652..55f7bf272 100644 --- a/test/unit/request-handler-tests.js +++ b/test/unit/request-handler-tests.ts @@ -13,28 +13,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const util = require('util'); - -const RequestHandler = require('../../lib/request-handler'); -const requests = require('../../lib/requests'); -const helper = require('../test-helper'); -const errors = require('../../lib/errors'); -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); -const retry = require('../../lib/policies/retry'); -const speculativeExecution = require('../../lib/policies/speculative-execution'); -const execProfileModule = require('../../lib/execution-profile'); + +import assert from "assert"; +import util from "util"; +import RequestHandler from "../../lib/request-handler"; +import requests from "../../lib/requests"; +import helper from "../test-helper"; +import errors from "../../lib/errors"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import retry from "../../lib/policies/retry"; +import speculativeExecution from "../../lib/policies/speculative-execution"; +import execProfileModule from "../../lib/execution-profile"; +import OperationState from "../../lib/operation-state"; +import * as execOptionsModule from "../../lib/execution-options"; +import ClientMetrics from "../../lib/metrics/client-metrics"; +import { defaultOptions } from "../../lib/client-options"; + const ProfileManager = execProfileModule.ProfileManager; const ExecutionProfile = execProfileModule.ExecutionProfile; -const OperationState = require('../../lib/operation-state'); -const defaultOptions = require('../../lib/client-options').defaultOptions; -const execOptionsModule = require('../../lib/execution-options'); const DefaultExecutionOptions = execOptionsModule.DefaultExecutionOptions; const ExecutionOptions = execOptionsModule.ExecutionOptions; -const ClientMetrics = require('../../lib/metrics/client-metrics'); - describe('RequestHandler', function () { const queryRequest = new requests.QueryRequest('QUERY1'); describe('#send()', function () { diff --git a/test/unit/requests-test.js b/test/unit/requests-test.ts similarity index 97% rename from test/unit/requests-test.js rename to test/unit/requests-test.ts index c8facec6b..b87ae6bf5 100644 --- a/test/unit/requests-test.js +++ b/test/unit/requests-test.ts @@ -13,15 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const requests = require('../../lib/requests'); -const Encoder = require('../../lib/encoder'); -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); -const ExecutionOptions = require('../../lib/execution-options').ExecutionOptions; -const packageInfo = require('../../package.json'); + +import assert from "assert"; +import requests from "../../lib/requests"; +import Encoder from "../../lib/encoder"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import packageInfo from "../../package.json"; +import { ExecutionOptions } from "../../lib/execution-options"; + const QueryRequest = requests.QueryRequest; const ExecuteRequest = requests.ExecuteRequest; const BatchRequest = requests.BatchRequest; diff --git a/test/unit/result-set-tests.js b/test/unit/result-set-tests.ts similarity index 97% rename from test/unit/result-set-tests.js rename to test/unit/result-set-tests.ts index 26bb8a784..26c436f08 100644 --- a/test/unit/result-set-tests.js +++ b/test/unit/result-set-tests.ts @@ -13,13 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import { assert } from "chai"; +import sinon from "sinon"; +import utils from "../../lib/utils"; +import types from "../../lib/types/index"; +import helper from "../test-helper"; + -const { assert } = require('chai'); -const sinon = require('sinon'); -const utils = require('../../lib/utils'); -const types = require('../../lib/types'); -const helper = require('../test-helper'); const { ResultSet } = types; describe('ResultSet', function () { diff --git a/test/unit/retry-policy-tests.js b/test/unit/retry-policy-tests.ts similarity index 97% rename from test/unit/retry-policy-tests.js rename to test/unit/retry-policy-tests.ts index 19296cf63..14b081071 100644 --- a/test/unit/retry-policy-tests.js +++ b/test/unit/retry-policy-tests.ts @@ -13,12 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const types = require('../../lib/types'); -const policies = require('../../lib/policies'); -const helper = require('../test-helper'); -const ExecutionOptions = require('../../lib/execution-options').ExecutionOptions; + +import assert from "assert"; +import types from "../../lib/types/index"; +import policies from "../../lib/policies/index"; +import helper from "../test-helper"; +import {ExecutionOptions} from "../../lib/execution-options"; + const RetryPolicy = policies.retry.RetryPolicy; const IdempotenceAwareRetryPolicy = policies.retry.IdempotenceAwareRetryPolicy; const FallthroughRetryPolicy = policies.retry.FallthroughRetryPolicy; diff --git a/test/unit/search/date-range-tests.js b/test/unit/search/date-range-tests.ts similarity index 96% rename from test/unit/search/date-range-tests.js rename to test/unit/search/date-range-tests.ts index 2fc875675..d0831b74c 100644 --- a/test/unit/search/date-range-tests.js +++ b/test/unit/search/date-range-tests.ts @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import assert from "assert"; +import util from "util"; +import helper from "../../test-helper"; +import * as dateRangeModule from "../../../lib/datastax/search/date-range"; + -const assert = require('assert'); -const util = require('util'); -const helper = require('../../test-helper'); -const dateRangeModule = require('../../../lib/datastax/search/date-range'); const DateRange = dateRangeModule.DateRange; const DateRangeBound = dateRangeModule.DateRangeBound; const unbounded = dateRangeModule.unbounded; diff --git a/test/unit/speculative-execution-tests.js b/test/unit/speculative-execution-tests.ts similarity index 89% rename from test/unit/speculative-execution-tests.js rename to test/unit/speculative-execution-tests.ts index 6fe4871dc..9e283cba1 100644 --- a/test/unit/speculative-execution-tests.js +++ b/test/unit/speculative-execution-tests.ts @@ -13,11 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import helper from "../test-helper"; +import specExecModule from "../../lib/policies/speculative-execution"; -'use strict'; - -const helper = require('../test-helper'); -const specExecModule = require('../../lib/policies/speculative-execution'); describe('NoSpeculativeExecutionPolicy', () => { describe('#getOptions()', () => { diff --git a/test/unit/stream-id-stack-tests.js b/test/unit/stream-id-stack-tests.ts similarity index 97% rename from test/unit/stream-id-stack-tests.js rename to test/unit/stream-id-stack-tests.ts index c70436a66..81fe24499 100644 --- a/test/unit/stream-id-stack-tests.js +++ b/test/unit/stream-id-stack-tests.ts @@ -13,12 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import sinon from "sinon"; +import assert from "assert"; +import StreamIdStack from "../../lib/stream-id-stack"; -"use strict"; -const sinon = require('sinon'); -const assert = require('assert'); - -const StreamIdStack = require('../../lib/stream-id-stack'); describe('StreamIdStack', function () { let clock; diff --git a/test/unit/timestamp-tests.js b/test/unit/timestamp-tests.ts similarity index 92% rename from test/unit/timestamp-tests.js rename to test/unit/timestamp-tests.ts index a1f89224b..2fde97d77 100644 --- a/test/unit/timestamp-tests.js +++ b/test/unit/timestamp-tests.ts @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const MonotonicTimestampGenerator = require('../../lib/policies/timestamp-generation').MonotonicTimestampGenerator; -const Long = require('../../lib/types').Long; -const helper = require('../test-helper'); +import assert from "assert"; +import helper from "../test-helper"; +import { MonotonicTimestampGenerator } from "../../lib/policies/timestamp-generation"; +import {Long} from "../../lib/types"; describe('MonotonicTimestampGenerator', function () { describe('#next()', function () { diff --git a/test/unit/token-tests.js b/test/unit/token-tests.ts similarity index 99% rename from test/unit/token-tests.js rename to test/unit/token-tests.ts index 06297ab35..7d578c40d 100644 --- a/test/unit/token-tests.js +++ b/test/unit/token-tests.ts @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import tokenizer from "../../lib/tokenizer"; +import token from "../../lib/token"; +import assert from "assert"; + -const tokenizer = require('../../lib/tokenizer'); -const token = require('../../lib/token'); -const assert = require('assert'); class TokenTester { constructor(tokenizer) { diff --git a/test/unit/tokenizer-tests.js b/test/unit/tokenizer-tests.ts similarity index 94% rename from test/unit/tokenizer-tests.js rename to test/unit/tokenizer-tests.ts index c710d8764..f730e019c 100644 --- a/test/unit/tokenizer-tests.js +++ b/test/unit/tokenizer-tests.ts @@ -13,19 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); +import assert from "assert"; +import tokenizer from "../../lib/tokenizer"; +import token from "../../lib/token"; +import types from "../../lib/types/index"; +import utils from "../../lib/utils"; +import MutableLong from "../../lib/types/mutable-long"; +import helper from "../test-helper"; + -const tokenizer = require('../../lib/tokenizer'); -const token = require('../../lib/token'); const Murmur3Tokenizer = tokenizer.Murmur3Tokenizer; const RandomTokenizer = tokenizer.RandomTokenizer; const ByteOrderedTokenizer = tokenizer.ByteOrderedTokenizer; -const types = require('../../lib/types'); -const utils = require('../../lib/utils'); -const MutableLong = require('../../lib/types/mutable-long'); -const helper = require('../test-helper'); - describe('Murmur3Tokenizer', function () { describe('#rotl64()', function () { it('should return expected results', function () { diff --git a/test/unit/tracker-tests.js b/test/unit/tracker-tests.ts similarity index 97% rename from test/unit/tracker-tests.js rename to test/unit/tracker-tests.ts index 11d35a154..ca54bb918 100644 --- a/test/unit/tracker-tests.js +++ b/test/unit/tracker-tests.ts @@ -14,13 +14,11 @@ * limitations under the License. */ -'use strict'; - -const assert = require('assert'); -const helper = require('../test-helper'); -const types = require('../../lib/types'); -const RequestLogger = require('../../lib/tracker').RequestLogger; -const ExecutionOptions = require('../../lib/execution-options').ExecutionOptions; +import assert from "assert"; +import helper from "../test-helper"; +import types from "../../lib/types/index"; +import { RequestLogger } from "../../lib/tracker"; +import { ExecutionOptions } from "../../lib/execution-options"; describe('RequestLogger', () => { describe('#onSuccess()', () => { diff --git a/test/unit/utils-tests.js b/test/unit/utils-tests.ts similarity index 98% rename from test/unit/utils-tests.js rename to test/unit/utils-tests.ts index ba2741601..2322f7a71 100644 --- a/test/unit/utils-tests.js +++ b/test/unit/utils-tests.ts @@ -13,11 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const { assert } = require('chai'); -const sinon = require('sinon'); -const utils = require('../../lib/utils'); -const helper = require('../test-helper'); +import { assert } from "chai"; +import sinon from "sinon"; +import utils from "../../lib/utils"; +import helper from "../test-helper"; + + const AddressResolver = utils.AddressResolver; describe('utils', function () { diff --git a/test/unit/uuid-tests.js b/test/unit/uuid-tests.ts similarity index 98% rename from test/unit/uuid-tests.js rename to test/unit/uuid-tests.ts index c53367fbf..15bbdc49e 100644 --- a/test/unit/uuid-tests.js +++ b/test/unit/uuid-tests.ts @@ -13,13 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; -const assert = require('assert'); -const helper = require('../test-helper'); -const utils = require('../../lib/utils'); -const Uuid = require('../../lib/types').Uuid; -const TimeUuid = require('../../lib/types').TimeUuid; +import assert from "assert"; +import helper from "../test-helper"; +import utils from "../../lib/utils"; +import { TimeUuid, Uuid } from "../../lib/types"; describe('Uuid', function () { describe('constructor', function () { diff --git a/test/unit/version-number-tests.js b/test/unit/version-number-tests.ts similarity index 97% rename from test/unit/version-number-tests.js rename to test/unit/version-number-tests.ts index e93d7d50c..9e90cba9e 100644 --- a/test/unit/version-number-tests.js +++ b/test/unit/version-number-tests.ts @@ -13,10 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -'use strict'; +import assert from "assert"; +import VersionNumber from "../../lib/types/version-number"; -const assert = require('assert'); -const VersionNumber = require('../../lib/types/version-number'); describe('VersionNumber', () => { describe('#parse()', () => { diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 000000000..b80ad4e97 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "module": "NodeNext", + "lib": ["es2015"], + "target": "es2015", + "sourceMap": false, + "strict": false, + "noImplicitAny": false, + "outDir": "out", + "declaration": true, + "allowJs": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declarationMap": false, + "moduleDetection": "force", + "resolveJsonModule": true, + "moduleResolution": "nodenext", + "checkJs": true, + }, + "include": [ + "lib/**/*.ts", + // "test/**/*.ts", + "index.ts" + ], + "exclude": [ + "node_modules", + "out", + "dist" + ] + } \ No newline at end of file