Node JS version

This commit is contained in:
Aravind142857
2023-06-09 20:08:47 -05:00
parent 8983f0dd80
commit a8b8883b11
894 changed files with 152408 additions and 73 deletions

View File

@@ -0,0 +1,338 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const https = require('https');
const fs = require('fs');
const util = require('util');
const AdmZip = require('adm-zip');
const { URL } = require('url');
const errors = require('../../errors');
const utils = require('../../utils');
const { DsePlainTextAuthProvider, NoAuthProvider } = require('../../auth');
// Use the callback-based method fs.readFile() instead of fs.promises as we have to support Node.js 8+
const readFile = util.promisify(fs.readFile);
/**
* When the user sets the cloud options, it uses the secure bundle or endpoint to access the metadata service and
* setting the connection options
* @param {ClientOptions} options
* @returns {Promise<void>}
*/
async function init(options) {
if (!options.cloud) {
return;
}
const cloudOptions = new CloudOptions(options);
await parseZipFile(cloudOptions);
await getMetadataServiceInfoAsync(cloudOptions);
if (!cloudOptions.clientOptions.sslOptions.checkServerIdentity) {
// With SNI enabled, hostname (uuid) and CN will not match
// Use a custom validation function to validate against the proxy address.
// Note: this function is only called if the certificate passed all other checks, like CA validation.
cloudOptions.clientOptions.sslOptions.checkServerIdentity = (_, cert) =>
checkServerIdentity(cert, cloudOptions.clientOptions.sni.address);
}
}
class CloudOptions {
constructor(clientOptions) {
this.clientOptions = clientOptions;
if (clientOptions.cloud.secureConnectBundle) {
this.secureConnectBundle = clientOptions.cloud.secureConnectBundle;
this.serviceUrl = null;
} else {
this.serviceUrl = clientOptions.cloud.endpoint;
}
// Include a log emitter to enable logging within the cloud connection logic
this.logEmitter = clientOptions.logEmitter;
this.contactPoints = null;
this.localDataCenter = null;
}
/**
* The sslOptions in the client options from a given map.
* @param {Map<String, Buffer>} zipEntries
*/
setSslOptions(zipEntries) {
this.clientOptions.sslOptions = Object.assign({
ca: [zipEntries.get('ca.crt') ],
cert: zipEntries.get('cert'),
key: zipEntries.get('key'),
rejectUnauthorized: true
}, this.clientOptions.sslOptions);
}
/**
*
* @param username
* @param password
*/
setAuthProvider(username, password) {
if (!username || !password) {
return;
}
if (this.clientOptions.authProvider && !(this.clientOptions.authProvider instanceof NoAuthProvider)) {
// There is an auth provider set by the user
return;
}
this.clientOptions.authProvider = new DsePlainTextAuthProvider(username, password);
}
}
/**
* @param {CloudOptions} cloudOptions
* @returns {Promise<void>}
*/
async function parseZipFile(cloudOptions) {
if (cloudOptions.serviceUrl) {
// Service url already was provided
return;
}
if (!cloudOptions.secureConnectBundle) {
throw new TypeError('secureConnectBundle must be provided');
}
const data = await readFile(cloudOptions.secureConnectBundle);
const zip = new AdmZip(data);
const zipEntries = new Map(zip.getEntries().map(e => [e.entryName, e.getData()]));
if (!zipEntries.get('config.json')) {
throw new TypeError('Config file must be contained in secure bundle');
}
const config = JSON.parse(zipEntries.get('config.json').toString('utf8'));
if (!config['host'] || !config['port']) {
throw new TypeError('Config file must include host and port information');
}
cloudOptions.serviceUrl = `${config['host']}:${config['port']}/metadata`;
cloudOptions.setSslOptions(zipEntries);
cloudOptions.setAuthProvider(config.username, config.password);
}
/**
* Gets the information retrieved from the metadata service.
* Invokes the callback with {proxyAddress, localDataCenter, contactPoints} as result
* @param {CloudOptions} cloudOptions
* @param {Function} callback
*/
function getMetadataServiceInfo(cloudOptions, callback) {
const regex = /^(.+?):(\d+)(.*)$/;
const matches = regex.exec(cloudOptions.serviceUrl);
callback = utils.callbackOnce(callback);
if (!matches || matches.length !== 4) {
throw new TypeError('url should be composed of host, port number and path, without scheme');
}
const requestOptions = Object.assign({
hostname: matches[1],
port: matches[2],
path: matches[3] || undefined,
timeout: cloudOptions.clientOptions.socketOptions.connectTimeout
}, cloudOptions.clientOptions.sslOptions);
const req = https.get(requestOptions, res => {
let data = '';
utils.log('verbose', `Connected to metadata service with SSL/TLS protocol ${res.socket.getProtocol()}`, {}, cloudOptions);
res
.on('data', chunk => data += chunk.toString())
.on('end', () => {
if (res.statusCode !== 200) {
return callback(getServiceRequestError(new Error(`Obtained http status ${res.statusCode}`), requestOptions));
}
let message;
try {
message = JSON.parse(data);
if (!message || !message['contact_info']) {
throw new TypeError('contact_info should be defined in response');
}
} catch (err) {
return callback(getServiceRequestError(err, requestOptions, true));
}
const contactInfo = message['contact_info'];
// Set the connect options
cloudOptions.clientOptions.contactPoints = contactInfo['contact_points'];
cloudOptions.clientOptions.localDataCenter = contactInfo['local_dc'];
cloudOptions.clientOptions.sni = { address: contactInfo['sni_proxy_address'] };
callback();
});
});
req.on('error', err => callback(getServiceRequestError(err, requestOptions)));
// We need to both set the timeout in the requestOptions and invoke ClientRequest#setTimeout()
// to handle all possible scenarios, for some reason... (tested with one OR the other and didn't fully work)
// Setting the also the timeout handler, aborting will emit 'error' and close
req.setTimeout(cloudOptions.clientOptions.socketOptions.connectTimeout, () => req.abort());
}
const getMetadataServiceInfoAsync = util.promisify(getMetadataServiceInfo);
/**
* Returns an Error that wraps the inner error obtained while fetching metadata information.
* @private
*/
function getServiceRequestError(err, requestOptions, isParsingError) {
const message = isParsingError
? 'There was an error while parsing the metadata service information'
: 'There was an error fetching the metadata information';
const url = `${requestOptions.hostname}:${requestOptions.port}${(requestOptions.path) ? requestOptions.path : '/'}`;
return new errors.NoHostAvailableError({ [url] : err }, message);
}
/**
* @param {{subject: {CN: string}, subjectaltname: string?}} cert A certificate object as defined by
* TLS module https://nodejs.org/docs/latest-v12.x/api/tls.html#tls_certificate_object
* @param {string} sniAddress
* @returns {Error|undefined} Similar to tls.checkServerIdentity() returns an Error object, populating it with reason,
* host, and cert on failure. Otherwise, it returns undefined.
* @internal
* @ignore
*/
function checkServerIdentity(cert, sniAddress) {
// Based on logic defined by the Node.js Core module
// https://github.com/nodejs/node/blob/ff48009fefcecedfee2c6ff1719e5be3f6969049/lib/tls.js#L212-L290
// SNI address is composed by hostname and port
const hostName = sniAddress.split(':')[0];
const altNames = cert.subjectaltname;
const cn = cert.subject.CN;
if (hostName === cn) {
// quick check based on common name
return undefined;
}
const parsedAltNames = [];
if (altNames) {
for (const name of altNames.split(', ')) {
if (name.startsWith('DNS:')) {
parsedAltNames.push(name.slice(4));
} else if (name.startsWith('URI:')) {
parsedAltNames.push(new URL(name.slice(4)).hostname);
}
}
}
const hostParts = hostName.split('.');
const wildcard = (pattern) => checkParts(hostParts, pattern);
let valid;
if (parsedAltNames.length > 0) {
valid = parsedAltNames.some(wildcard);
} else {
// Use the common name
valid = wildcard(cn);
}
if (!valid) {
const error = new Error(`Host: ${hostName} is not cert's CN/altnames: ${cn} / ${altNames}`);
error.reason = error.message;
error.host = hostName;
error.cert = cert;
return error;
}
}
/**
* Simplified version of Node.js tls core lib check() function
* https://github.com/nodejs/node/blob/ff48009fefcecedfee2c6ff1719e5be3f6969049/lib/tls.js#L148-L209
* @private
* @returns {boolean}
*/
function checkParts(hostParts, pattern) {
// Empty strings, null, undefined, etc. never match.
if (!pattern) {
return false;
}
const patternParts = pattern.split('.');
if (hostParts.length !== patternParts.length) {
return false;
}
// Check host parts from right to left first.
for (let i = hostParts.length - 1; i > 0; i -= 1) {
if (hostParts[i] !== patternParts[i]) {
return false;
}
}
const hostSubdomain = hostParts[0];
const patternSubdomain = patternParts[0];
const patternSubdomainParts = patternSubdomain.split('*');
// Short-circuit when the subdomain does not contain a wildcard.
// RFC 6125 does not allow wildcard substitution for components
// containing IDNA A-labels (Punycode) so match those verbatim.
if (patternSubdomainParts.length === 1 || patternSubdomain.includes('xn--')) {
return hostSubdomain === patternSubdomain;
}
// More than one wildcard is always wrong.
if (patternSubdomainParts.length > 2) {
return false;
}
// *.tld wildcards are not allowed.
if (patternParts.length <= 2) {
return false;
}
const [prefix, suffix] = patternSubdomainParts;
if (prefix.length + suffix.length > hostSubdomain.length) {
return false;
}
if (!hostSubdomain.startsWith(prefix)) {
return false;
}
if (!hostSubdomain.endsWith(suffix)) {
return false;
}
return true;
}
module.exports = {
checkServerIdentity,
init
};

View File

@@ -0,0 +1,99 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const { GraphTypeWrapper, UdtGraphWrapper } = require('./wrappers');
const types = require('../../types');
const Encoder = require('../../encoder');
const { dataTypes } = types;
function getTypeDefinitionByValue(value) {
if (value instanceof types.Tuple) {
return {
'cqlType': 'tuple',
'definition': value.elements.map(getTypeDefinitionByValue)
};
}
if (value instanceof Map) {
// Try to guess the types of the key and value based on the first element
const result = { 'cqlType': 'map' };
if (value.size > 0) {
const first = value.entries().next().value;
result['definition'] = first.map(getTypeDefinitionByValue);
}
return result;
}
if (value instanceof UdtGraphWrapper) {
return getUdtTypeDefinitionByValue(value);
}
let type;
if (value instanceof GraphTypeWrapper) {
type = value.typeInfo;
} else {
type = Encoder.guessDataType(value);
}
if (!type) {
return null;
}
return getDefinitionByType(type);
}
function getDefinitionByType(type) {
if (type.code === dataTypes.udt) {
return getUdtTypeDefinition(type.info);
}
if (type.code === dataTypes.tuple || type.code === dataTypes.map) {
return {
'cqlType': types.getDataTypeNameByCode(type),
'definition': type.info.map(getDefinitionByType)
};
}
if (type.code === dataTypes.list || type.code === dataTypes.set) {
return {
'cqlType': type.code === dataTypes.list ? 'list' : 'set',
'definition': [ getDefinitionByType(type.info) ]
};
}
return { 'cqlType': types.getDataTypeNameByCode(type) };
}
function getUdtTypeDefinition(udtInfo) {
return {
'cqlType': 'udt',
'keyspace': udtInfo.keyspace,
'name': udtInfo.name,
'definition': udtInfo.fields.map(field =>
// fieldName should be the first property serialized
Object.assign({ 'fieldName': field.name }, getDefinitionByType(field.type))
),
};
}
function getUdtTypeDefinitionByValue(wrappedValue) {
return getUdtTypeDefinition(wrappedValue.udtInfo);
}
module.exports = { getTypeDefinitionByValue, getUdtTypeDefinitionByValue };

View File

@@ -0,0 +1,362 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const types = require('../../types');
const utils = require('../../utils');
const { getTypeDefinitionByValue, getUdtTypeDefinitionByValue } = require('./complex-type-helper');
const { Point, Polygon, LineString } = require('../../geometry');
const { Edge } = require('./structure');
const { GraphTypeWrapper, UdtGraphWrapper } = require('./wrappers');
const { Tuple, dataTypes } = types;
const typeKey = '@type';
const valueKey = '@value';
class EdgeDeserializer {
constructor() {
this.key = 'g:Edge';
}
deserialize(obj) {
const value = obj[valueKey];
return new Edge(this.reader.read(value['id']), this.reader.read(value['outV']), value['outVLabel'], value['label'], this.reader.read(value['inV']), value['inVLabel'], this.reader.read(value['properties']));
}
}
/**
* Uses toString() instance method and fromString() static method to serialize and deserialize the value.
* @abstract
* @private
*/
class StringBasedTypeSerializer {
/**
* Creates a new instance of the deserializer.
* @param {String} key
* @param {Function} targetType
*/
constructor(key, targetType) {
if (!key) {
throw new Error('Deserializer must provide a type key');
}
if (!targetType) {
throw new Error('Deserializer must provide a target type');
}
this.key = key;
this.targetType = targetType;
}
deserialize(obj) {
let value = obj[valueKey];
if (typeof value !== 'string') {
value = value.toString();
}
return this.targetType.fromString(value);
}
serialize(value) {
return {
[typeKey]: this.key,
[valueKey]: value.toString()
};
}
canBeUsedFor(value) {
return value instanceof this.targetType;
}
}
class UuidSerializer extends StringBasedTypeSerializer {
constructor() {
super('g:UUID', types.Uuid);
}
}
class LongSerializer extends StringBasedTypeSerializer {
constructor() {
super('g:Int64', types.Long);
}
}
class BigDecimalSerializer extends StringBasedTypeSerializer {
constructor() {
super('gx:BigDecimal', types.BigDecimal);
}
}
class BigIntegerSerializer extends StringBasedTypeSerializer {
constructor() {
super('gx:BigInteger', types.Integer);
}
}
class InetAddressSerializer extends StringBasedTypeSerializer {
constructor() {
super('gx:InetAddress', types.InetAddress);
}
}
class LocalDateSerializer extends StringBasedTypeSerializer {
constructor() {
super('gx:LocalDate', types.LocalDate);
}
}
class LocalTimeSerializer extends StringBasedTypeSerializer {
constructor() {
super('gx:LocalTime', types.LocalTime);
}
}
class InstantSerializer extends StringBasedTypeSerializer {
constructor() {
super('gx:Instant', Date);
}
serialize(item) {
return {
[typeKey]: this.key,
[valueKey]: item.toISOString()
};
}
deserialize(obj) {
return new Date(obj[valueKey]);
}
}
class BlobSerializer extends StringBasedTypeSerializer {
constructor() {
super('dse:Blob', Buffer);
}
deserialize(obj) {
return utils.allocBufferFromString(obj[valueKey], 'base64');
}
serialize(item) {
return {
[typeKey]: this.key,
[valueKey]: item.toString('base64')
};
}
}
class PointSerializer extends StringBasedTypeSerializer {
constructor() {
super('dse:Point', Point);
}
}
class LineStringSerializer extends StringBasedTypeSerializer {
constructor() {
super('dse:LineString', LineString);
}
}
class PolygonSerializer extends StringBasedTypeSerializer {
constructor() {
super('dse:Polygon', Polygon);
}
}
class TupleSerializer {
constructor() {
this.key = 'dse:Tuple';
}
deserialize(obj) {
// Skip definitions and go to the value
const value = obj[valueKey]['value'];
if (!Array.isArray(value)) {
throw new Error('Expected Array, obtained: ' + value);
}
const result = [];
for (const element of value) {
result.push(this.reader.read(element));
}
return Tuple.fromArray(result);
}
/** @param {Tuple} tuple */
serialize(tuple) {
const result = {
'cqlType': 'tuple',
'definition': tuple.elements.map(getTypeDefinitionByValue),
'value': tuple.elements.map(e => this.writer.adaptObject(e))
};
return {
[typeKey]: this.key,
[valueKey]: result
};
}
canBeUsedFor(value) {
return value instanceof Tuple;
}
}
class DurationSerializer {
constructor() {
this.key = 'dse:Duration';
}
deserialize(obj) {
// Skip definitions and go to the value
const value = obj[valueKey];
return new types.Duration(
this.reader.read(value['months']), this.reader.read(value['days']), this.reader.read(value['nanos']));
}
/** @param {Duration} value */
serialize(value) {
return {
[typeKey]: this.key,
[valueKey]: {
'months': value['months'],
'days': value['days'],
'nanos': value['nanoseconds'],
}
};
}
canBeUsedFor(value) {
return value instanceof types.Duration;
}
}
class UdtSerializer {
constructor() {
this.key = 'dse:UDT';
}
deserialize(obj) {
// Skip definitions and go to the value
const valueRoot = obj[valueKey];
const result = {};
const value = valueRoot['value'];
valueRoot['definition'].forEach((definition, index) => {
result[definition.fieldName] = this.reader.read(value[index]);
});
return result;
}
serialize(udtWrapper) {
const serializedValue = getUdtTypeDefinitionByValue(udtWrapper);
// New properties can be added to the existing object without need to clone
// as getTypeDefinition() returns a new object each time
serializedValue['value'] = Object.entries(udtWrapper.value).map(([_, v]) => this.writer.adaptObject(v));
return {
[typeKey]: this.key,
[valueKey]: serializedValue
};
}
canBeUsedFor(value) {
return value instanceof UdtGraphWrapper;
}
}
class InternalSerializer {
constructor(name, transformFn) {
this._name = name;
this._transformFn = transformFn || (x => x);
}
serialize(item) {
return {
[typeKey]: this._name,
[valueKey]: this._transformFn(item)
};
}
}
// Associative array of graph type name by CQL type code, used by the type wrapper
const graphSONSerializerByCqlType = {
[dataTypes.int]: new InternalSerializer('g:Int32'),
[dataTypes.bigint]: new InternalSerializer('g:Int64'),
[dataTypes.double]: new InternalSerializer('g:Double'),
[dataTypes.float]: new InternalSerializer('g:Float'),
[dataTypes.timestamp]: new InternalSerializer('g:Timestamp', x => x.getTime())
};
class GraphTypeWrapperSerializer {
constructor() {
// Use a fixed name that doesn't conflict with TinkerPop and DS Graph
this.key = 'client:wrapper';
}
serialize(wrappedValue) {
const s = graphSONSerializerByCqlType[wrappedValue.typeInfo.code];
if (!s) {
throw new Error(`No serializer found for wrapped value ${wrappedValue}`);
}
return s.serialize(wrappedValue.value);
}
canBeUsedFor(value) {
return value instanceof GraphTypeWrapper;
}
}
const serializersArray = [
EdgeDeserializer,
UuidSerializer,
LongSerializer,
BigDecimalSerializer,
BigIntegerSerializer,
InetAddressSerializer,
LocalDateSerializer,
LocalTimeSerializer,
InstantSerializer,
BlobSerializer,
PointSerializer,
LineStringSerializer,
PolygonSerializer,
TupleSerializer,
UdtSerializer,
GraphTypeWrapperSerializer,
DurationSerializer
];
function getCustomSerializers() {
const customSerializers = {};
serializersArray.forEach(sConstructor => {
const instance = new sConstructor();
if (!instance.key) {
throw new TypeError(`Key for ${sConstructor} instance not set`);
}
customSerializers[instance.key] = instance;
});
return customSerializers;
}
module.exports = getCustomSerializers;

View File

@@ -0,0 +1,280 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const utils = require('../../utils');
const policies = require('../../policies');
const GraphResultSet = require('./result-set');
const { GraphSON2Reader, GraphSON2Writer, GraphSON3Reader, GraphSON3Writer } = require('./graph-serializer');
const getCustomTypeSerializers = require('./custom-type-serializers');
const { GraphExecutionOptions, graphProtocol } = require('./options');
const graphLanguageGroovyString = 'gremlin-groovy';
const graphEngineCore = 'Core';
const graphSON2Reader = new GraphSON2Reader({ serializers: getCustomTypeSerializers() });
const graphSON2Writer = new GraphSON2Writer({ serializers: getCustomTypeSerializers() });
const graphSON3Reader = new GraphSON3Reader({ serializers: getCustomTypeSerializers() });
const graphSON3Writer = new GraphSON3Writer({ serializers: getCustomTypeSerializers() });
const rowParsers = new Map([
[ graphProtocol.graphson2, getRowParser(graphSON2Reader) ],
[ graphProtocol.graphson3, getRowParser(graphSON3Reader) ]
]);
const defaultWriters = new Map([
[ graphProtocol.graphson1, x => JSON.stringify(x) ],
[ graphProtocol.graphson2, getDefaultWriter(graphSON2Writer) ],
[ graphProtocol.graphson3, getDefaultWriter(graphSON3Writer) ]
]);
/**
* Internal class that contains the logic for executing a graph traversal.
* @ignore
*/
class GraphExecutor {
/**
* Creates a new instance of GraphExecutor.
* @param {Client} client
* @param {ClientOptions} rawOptions
* @param {Function} handler
*/
constructor(client, rawOptions, handler) {
this._client = client;
this._handler = handler;
// Retrieve the retry policy for the default profile to determine if it was specified
this._defaultProfileRetryPolicy = client.profileManager.getDefaultConfiguredRetryPolicy();
// Use graphBaseOptions as a way to gather all defaults that affect graph executions
this._graphBaseOptions = utils.extend({
executeAs: client.options.queryOptions.executeAs,
language: graphLanguageGroovyString,
source: 'g',
readTimeout: 0,
// As the default retry policy might retry non-idempotent queries
// we should use default retry policy for all graph queries that does not retry
retry: new policies.retry.FallthroughRetryPolicy()
}, rawOptions.graphOptions, client.profileManager.getDefault().graphOptions);
if (this._graphBaseOptions.readTimeout === null) {
this._graphBaseOptions.readTimeout = client.options.socketOptions.readTimeout;
}
}
/**
* Executes the graph traversal.
* @param {String|Object} query
* @param {Object} parameters
* @param {GraphQueryOptions} options
*/
async send(query, parameters, options) {
if (Array.isArray(parameters)) {
throw new TypeError('Parameters must be a Object instance as an associative array');
}
if (!query) {
throw new TypeError('Query must be defined');
}
const execOptions = new GraphExecutionOptions(
options, this._client, this._graphBaseOptions, this._defaultProfileRetryPolicy);
if (execOptions.getGraphSource() === 'a') {
const host = await this._getAnalyticsMaster();
execOptions.setPreferredHost(host);
}
// A query object that allows to plugin any executable thing
const isQueryObject = typeof query === 'object' && query.graphLanguage && query.value && query.queryWriterFactory;
if (isQueryObject) {
// Use the provided graph language to override the current
execOptions.setGraphLanguage(query.graphLanguage);
}
this._setGraphProtocol(execOptions);
execOptions.setGraphPayload();
parameters = GraphExecutor._buildGraphParameters(parameters, execOptions.getGraphSubProtocol());
if (typeof query !== 'string') {
// Its a traversal that needs to be converted
// Transforming the provided query into a traversal requires the protocol to be set first.
// Query writer factory can be defined in the options or in the query object
let queryWriter = execOptions.getQueryWriter();
if (isQueryObject) {
queryWriter = query.queryWriterFactory(execOptions.getGraphSubProtocol());
} else if (!queryWriter) {
queryWriter = GraphExecutor._writerFactory(execOptions.getGraphSubProtocol());
}
query = queryWriter(!isQueryObject ? query : query.value);
}
return await this._executeGraphQuery(query, parameters, execOptions);
}
/**
* Sends the graph traversal.
* @param {string} query
* @param {object} parameters
* @param {GraphExecutionOptions} execOptions
* @returns {Promise<GraphResultSet>}
* @private
*/
async _executeGraphQuery(query, parameters, execOptions) {
const result = await this._handler.call(this._client, query, parameters, execOptions);
// Instances of rowParser transform Row instances into Traverser instances.
// Traverser instance is an object with the following form { object: any, bulk: number }
const rowParser = execOptions.getRowParser() || GraphExecutor._rowParserFactory(execOptions.getGraphSubProtocol());
return new GraphResultSet(result, rowParser);
}
/**
* Uses the RPC call to obtain the analytics master host.
* @returns {Promise<Host|null>}
* @private
*/
async _getAnalyticsMaster() {
try {
const result = await this._client.execute('CALL DseClientTool.getAnalyticsGraphServer()', utils.emptyArray);
if (result.rows.length === 0) {
this._client.log('verbose',
'Empty response querying graph analytics server, query will not be routed optimally');
return null;
}
const resultField = result.rows[0]['result'];
if (!resultField || !resultField['location']) {
this._client.log('verbose',
'Unexpected response querying graph analytics server, query will not be routed optimally',
result.rows[0]);
return null;
}
const hostName = resultField['location'].substr(0, resultField['location'].lastIndexOf(':'));
const addressTranslator = this._client.options.policies.addressResolution;
return await new Promise(resolve => {
addressTranslator.translate(hostName, this._client.options.protocolOptions.port, (endpoint) =>
resolve(this._client.hosts.get(endpoint)));
});
} catch (err) {
this._client.log('verbose', 'Error querying graph analytics server, query will not be routed optimally', err);
return null;
}
}
/**
* Resolves what protocol should be used for decoding graph results for the given execution.
*
* <p>Resolution is done in the following manner if graphResults is not set:</p>
*
* <ul>
* <li>If graph name is set, and associated keyspace's graph engine is set to "Core", use {@link
* graphProtocol#graphson3}.
* <li>Else, if the graph language is not 'gremlin-groovy', use {@link graphProtocol#graphson2}
* <li>Otherwise, use {@link graphProtocol#graphson1}
* </ul>
* @param {GraphExecutionOptions} execOptions
*/
_setGraphProtocol(execOptions) {
let protocol = execOptions.getGraphSubProtocol();
if (protocol) {
return;
}
if (execOptions.getGraphName()) {
const keyspace = this._client.metadata.keyspaces[execOptions.getGraphName()];
if (keyspace && keyspace.graphEngine === graphEngineCore) {
protocol = graphProtocol.graphson3;
}
}
if (!protocol) {
// Decide the minimal version supported by the graph language
if (execOptions.getGraphLanguage() === graphLanguageGroovyString) {
protocol = graphProtocol.graphson1;
} else {
protocol = graphProtocol.graphson2;
}
}
execOptions.setGraphSubProtocol(protocol);
}
/**
* Only GraphSON1 parameters are supported.
* @param {Array|function|null} parameters
* @param {string} protocol
* @returns {string[]|null}
* @private
*/
static _buildGraphParameters(parameters, protocol) {
if (!parameters || typeof parameters !== 'object') {
return null;
}
const queryWriter = GraphExecutor._writerFactory(protocol);
return [
(protocol !== graphProtocol.graphson1 && protocol !== graphProtocol.graphson2)
? queryWriter(new Map(Object.entries(parameters)))
: queryWriter(parameters)
];
}
static _rowParserFactory(protocol) {
const handler = rowParsers.get(protocol);
if (!handler) {
// Default to no row parser
return null;
}
return handler;
}
static _writerFactory(protocol) {
const handler = defaultWriters.get(protocol);
if (!handler) {
throw new Error(`No writer defined for protocol ${protocol}`);
}
return handler;
}
}
function getRowParser(reader) {
return row => {
const item = reader.read(JSON.parse(row['gremlin']));
return { object: item['result'], bulk: item['bulk'] || 1 };
};
}
function getDefaultWriter(writer) {
return value => writer.write(value);
}
module.exports = GraphExecutor;

View File

@@ -0,0 +1,260 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @module datastax/graph/tinkerpop/graphSerializers
* @ignore
*/
/**
* @author Jorge Bay Gondra
*/
'use strict';
const typeSerializers = require('./type-serializers');
/**
* GraphSON2 writer.
*/
class GraphSON2Writer {
/**
* @param {Object} [options]
* @param {Object} [options.serializers] An object used as an associative array with GraphSON 2 type name as keys and
* serializer instances as values, ie: { 'g:Int64': longSerializer }.
* @constructor
*/
constructor(options) {
this._options = options || {};
// Create instance of the default serializers
this._serializers = this.getDefaultSerializers().map(serializerConstructor => {
const s = new serializerConstructor();
s.writer = this;
return s;
});
const customSerializers = this._options.serializers || {};
Object.keys(customSerializers).forEach(key => {
const s = customSerializers[key];
if (!s.serialize) {
return;
}
s.writer = this;
// Insert custom serializers first
this._serializers.unshift(s);
});
}
/**
* Gets the default serializers to be used.
* @returns {Array}
*/
getDefaultSerializers() {
return graphSON2Serializers;
}
adaptObject(value) {
let s;
for (let i = 0; i < this._serializers.length; i++) {
const currentSerializer = this._serializers[i];
if (currentSerializer.canBeUsedFor && currentSerializer.canBeUsedFor(value)) {
s = currentSerializer;
break;
}
}
if (s) {
return s.serialize(value);
}
if (Array.isArray(value)) {
// We need to handle arrays when there is no serializer
// for older versions of GraphSON
return value.map(item => this.adaptObject(item));
}
// Default (strings / objects / ...)
return value;
}
/**
* Returns the GraphSON representation of the provided object instance.
* @param {Object} obj
* @returns {String}
*/
write(obj) {
return JSON.stringify(this.adaptObject(obj));
}
}
/**
* GraphSON3 writer.
*/
class GraphSON3Writer extends GraphSON2Writer {
getDefaultSerializers() {
return graphSON3Serializers;
}
}
/**
* GraphSON2 reader.
*/
class GraphSON2Reader {
/**
* GraphSON Reader
* @param {Object} [options]
* @param {Object} [options.serializers] An object used as an associative array with GraphSON 2 type name as keys and
* deserializer instances as values, ie: { 'g:Int64': longSerializer }.
* @constructor
*/
constructor(options) {
this._options = options || {};
this._deserializers = {};
const defaultDeserializers = this.getDefaultDeserializers();
Object.keys(defaultDeserializers).forEach(typeName => {
const serializerConstructor = defaultDeserializers[typeName];
const s = new serializerConstructor();
s.reader = this;
this._deserializers[typeName] = s;
});
if (this._options.serializers) {
const customSerializers = this._options.serializers || {};
Object.keys(customSerializers).forEach(key => {
const s = customSerializers[key];
if (!s.deserialize) {
return;
}
s.reader = this;
this._deserializers[key] = s;
});
}
}
/**
* Gets the default deserializers as an associative array.
* @returns {Object}
*/
getDefaultDeserializers() {
return graphSON2Deserializers;
}
read(obj) {
if (obj === undefined) {
return undefined;
}
if (obj === null) {
return null;
}
if (Array.isArray(obj)) {
return obj.map(item => this.read(item));
}
const type = obj[typeSerializers.typeKey];
if (type) {
const d = this._deserializers[type];
if (d) {
// Use type serializer
return d.deserialize(obj);
}
return obj[typeSerializers.valueKey];
}
if (obj && typeof obj === 'object' && obj.constructor === Object) {
return this._deserializeObject(obj);
}
// Default (for boolean, number and other scalars)
return obj;
}
_deserializeObject(obj) {
const keys = Object.keys(obj);
const result = {};
for (let i = 0; i < keys.length; i++) {
result[keys[i]] = this.read(obj[keys[i]]);
}
return result;
}
}
/**
* GraphSON3 reader.
*/
class GraphSON3Reader extends GraphSON2Reader {
getDefaultDeserializers() {
return graphSON3Deserializers;
}
}
const graphSON2Deserializers = {
'g:Traverser': typeSerializers.TraverserSerializer,
'g:TraversalStrategy': typeSerializers.TraversalStrategySerializer,
'g:Int32': typeSerializers.NumberSerializer,
'g:Int64': typeSerializers.NumberSerializer,
'g:Float': typeSerializers.NumberSerializer,
'g:Double': typeSerializers.NumberSerializer,
'g:Date': typeSerializers.DateSerializer,
'g:Direction': typeSerializers.DirectionSerializer,
'g:Vertex': typeSerializers.VertexSerializer,
'g:Edge': typeSerializers.EdgeSerializer,
'g:VertexProperty': typeSerializers.VertexPropertySerializer,
'g:Property': typeSerializers.PropertySerializer,
'g:Path': typeSerializers.Path3Serializer,
'g:TextP': typeSerializers.TextPSerializer,
'g:T': typeSerializers.TSerializer,
'g:BulkSet': typeSerializers.BulkSetSerializer
};
const graphSON3Deserializers = Object.assign({}, graphSON2Deserializers, {
'g:List': typeSerializers.ListSerializer,
'g:Set': typeSerializers.SetSerializer,
'g:Map': typeSerializers.MapSerializer
});
const graphSON2Serializers = [
typeSerializers.NumberSerializer,
typeSerializers.DateSerializer,
typeSerializers.BytecodeSerializer,
typeSerializers.TraverserSerializer,
typeSerializers.TraversalStrategySerializer,
typeSerializers.PSerializer,
typeSerializers.TextPSerializer,
typeSerializers.LambdaSerializer,
typeSerializers.EnumSerializer,
typeSerializers.VertexSerializer,
typeSerializers.EdgeSerializer,
typeSerializers.LongSerializer
];
const graphSON3Serializers = graphSON2Serializers.concat([
typeSerializers.ListSerializer,
typeSerializers.SetSerializer,
typeSerializers.MapSerializer
]);
module.exports = {
GraphSON3Writer,
GraphSON3Reader,
GraphSON2Writer,
GraphSON2Reader,
GraphSONWriter: GraphSON3Writer,
GraphSONReader: GraphSON3Reader
};

View File

@@ -0,0 +1,92 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { types } from '../../types';
export namespace graph {
interface Edge extends Element {
outV?: Vertex;
outVLabel?: string;
inV?: Vertex;
inVLabel?: string;
properties?: object;
}
interface Element {
id: any;
label: string;
}
class GraphResultSet implements Iterator<any> {
constructor(rs: types.ResultSet);
first(): any;
toArray(): any[];
values(): Iterator<any>;
next(value?: any): IteratorResult<any>;
}
interface Path {
labels: any[];
objects: any[];
}
interface Property {
value: any
key: any
}
interface Vertex extends Element {
properties?: { [key: string]: any[] }
}
interface VertexProperty extends Element {
value: any
key: string
properties?: any
}
function asDouble(value: number): object;
function asFloat(value: number): object;
function asInt(value: number): object;
function asTimestamp(value: Date): object;
function asUdt(value: object): object;
interface EnumValue {
toString(): string
}
namespace t {
const id: EnumValue;
const key: EnumValue;
const label: EnumValue;
const value: EnumValue;
}
namespace direction {
// `in` is a reserved word
const in_: EnumValue;
const out: EnumValue;
const both: EnumValue;
}
}

View File

@@ -0,0 +1,82 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
/**
* Graph module.
* @module datastax/graph
*/
const GraphResultSet = require('./result-set');
const getCustomTypeSerializers = require('./custom-type-serializers');
const { asInt, asDouble, asFloat, asTimestamp, asUdt, UdtGraphWrapper, GraphTypeWrapper} = require('./wrappers');
const { Edge, Element, Path, Property, Vertex, VertexProperty } = require('./structure');
class EnumValue {
constructor(typeName, elementName) {
this.typeName = typeName;
this.elementName = elementName;
}
toString() {
return this.elementName;
}
}
/**
* Represents a collection of tokens for more concise Traversal definitions.
*/
const t = {
id: new EnumValue('T', 'id'),
key: new EnumValue('T', 'key'),
label: new EnumValue('T', 'label'),
value: new EnumValue('T', 'value'),
};
/**
* Represents the edge direction.
*/
const direction = {
'both': new EnumValue('Direction', 'BOTH'),
'in': new EnumValue('Direction', 'IN'),
'out': new EnumValue('Direction', 'OUT')
};
// `in` is a reserved keyword depending on the context
// TinkerPop JavaScript GLV only exposes `in` but it can lead to issues for TypeScript users and others.
// Expose an extra property to represent `Direction.IN`.
direction.in_ = direction.in;
module.exports = {
Edge,
Element,
Path,
Property,
Vertex,
VertexProperty,
asInt,
asDouble,
asFloat,
asTimestamp,
asUdt,
direction,
getCustomTypeSerializers,
GraphResultSet,
GraphTypeWrapper,
t,
UdtGraphWrapper
};

View File

@@ -0,0 +1,334 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const util = require('util');
const types = require('../../types');
const utils = require('../../utils');
const { DefaultExecutionOptions, proxyExecuteKey } = require('../../execution-options');
const Long = types.Long;
let consistencyNames;
const graphProtocol = Object.freeze({
graphson1: 'graphson-1.0',
graphson2: 'graphson-2.0',
graphson3: 'graphson-3.0'
});
const payloadKeys = Object.freeze({
language :'graph-language',
source: 'graph-source',
name: 'graph-name',
results: 'graph-results',
writeConsistency: 'graph-write-consistency',
readConsistency: 'graph-read-consistency',
timeout: 'request-timeout'
});
/**
* Graph options that extends {@link QueryOptions}.
* <p>
* Consider using [execution profiles]{@link ExecutionProfile} if you plan to reuse options across different
* query executions.
* </p>
* @typedef {QueryOptions} module:datastax/graph~GraphQueryOptions
* @property {String} [graphLanguage] The graph language to use in graph queries.
* @property {String} [graphResults] The protocol to use for serializing and deserializing graph results.
* <p>
* Note that this value should rarely be set by users and will otherwise be unset. When unset the server resolves
* the protocol based on the <code>graphLanguage</code> specified.
* </p>
* @property {String} [graphName] The graph name to be used in the query. You can use <code>null</code> to clear the
* value from the <code>DseClientOptions</code> and execute a query without a default graph.
* @property {Number} [graphReadConsistency] Specifies the
* [consistency level]{@link module:types~consistencies}
* to be used for the graph read queries in this execution.
* <p>
* When defined, it overrides the consistency level only for the READ part of the graph query.
* </p>
* @property {String} [graphSource] The graph traversal source name to use in graph queries.
* @property {Number} [graphWriteConsistency] Specifies the [consistency level]{@link module:types~consistencies} to
* be used for the graph write queries in this execution.
* <p>
* When defined, it overrides the consistency level only for the WRITE part of the graph query.
* </p>
* @property {RetryPolicy} [retry] Sets the retry policy to be used for the graph query execution.
* <p>
* When not specified in the {@link GraphQueryOptions} or in the {@link ExecutionProfile}, it will use by default
* a retry policy that does not retry graph executions.
* </p>
*/
/**
* Gets the default options with the custom payload for a given profile.
* @param {ProfileManager} profileManager
* @param baseOptions
* @param {RetryPolicy|null} defaultRetryPolicy
* @param {ExecutionProfile} profile
* @returns {DseClientOptions}
* @private
*/
function getDefaultGraphOptions(profileManager, baseOptions, defaultRetryPolicy, profile) {
return profileManager.getOrCreateGraphOptions(profile, function createDefaultOptions() {
const profileOptions = profile.graphOptions || utils.emptyObject;
const defaultProfile = profileManager.getDefault();
const options = {
customPayload: {
[payloadKeys.language]: utils.allocBufferFromString(profileOptions.language || baseOptions.language),
[payloadKeys.source]: utils.allocBufferFromString(profileOptions.source || baseOptions.source)
},
graphLanguage: profileOptions.language || baseOptions.language,
graphResults: profileOptions.results || baseOptions.results,
graphSource: profileOptions.source || baseOptions.source,
graphName: utils.ifUndefined(profileOptions.name, baseOptions.name)
};
if (profile !== defaultProfile) {
options.retry = profile.retry || baseOptions.retry;
} else {
// Based on an implementation detail of the execution profiles, the retry policy for the default profile is
// always loaded (required), but that doesn't mean that it was specified by the user.
// If it wasn't specified by the user, use the default retry policy for graph statements.
options.retry = defaultRetryPolicy || baseOptions.retry;
}
if (baseOptions.executeAs) {
options.customPayload[proxyExecuteKey] = utils.allocBufferFromString(baseOptions.executeAs);
}
if (options.graphName) {
options.customPayload[payloadKeys.name] = utils.allocBufferFromString(options.graphName);
}
const graphResults = utils.ifUndefined(profileOptions.results, baseOptions.graphResults);
if (graphResults !== undefined) {
options.customPayload[payloadKeys.results] = utils.allocBufferFromString(graphResults);
}
const readConsistency = utils.ifUndefined(profileOptions.readConsistency, baseOptions.readConsistency);
if (readConsistency !== undefined) {
options.customPayload[payloadKeys.readConsistency] =
utils.allocBufferFromString(getConsistencyName(readConsistency));
}
const writeConsistency = utils.ifUndefined(profileOptions.writeConsistency, baseOptions.writeConsistency);
if (writeConsistency !== undefined) {
options.customPayload[payloadKeys.writeConsistency] =
utils.allocBufferFromString(getConsistencyName(writeConsistency));
}
options.readTimeout = utils.ifUndefined3(profile.readTimeout, defaultProfile.readTimeout, baseOptions.readTimeout);
if (options.readTimeout > 0) {
// Write the graph read timeout payload
options.customPayload[payloadKeys.timeout] = longBuffer(options.readTimeout);
}
return options;
});
}
/**
* Sets the payload key. If the value is not provided, it uses the value from the default profile options.
* @param {Object} payload
* @param {QueryOptions} profileOptions
* @param {String} key
* @param {String|Number|null} value
* @param {Function} [converter]
* @private
*/
function setPayloadKey(payload, profileOptions, key, value, converter) {
converter = converter || utils.allocBufferFromString;
if (value === null) {
// Use null to avoid set payload for a key
return;
}
if (value !== undefined) {
payload[key] = converter(value);
return;
}
if (profileOptions.customPayload[key]) {
payload[key] = profileOptions.customPayload[key];
}
}
function longBuffer(value) {
value = Long.fromNumber(value);
return Long.toBuffer(value);
}
/**
* Gets the name in upper case of the consistency level.
* @param {Number} consistency
* @private
*/
function getConsistencyName(consistency) {
// eslint-disable-next-line
if (consistency == undefined) {
//null or undefined => undefined
return undefined;
}
loadConsistencyNames();
const name = consistencyNames[consistency];
if (!name) {
throw new Error(util.format(
'Consistency %s not found, use values defined as properties in types.consistencies object', consistency
));
}
return name;
}
function loadConsistencyNames() {
if (consistencyNames) {
return;
}
consistencyNames = {};
const propertyNames = Object.keys(types.consistencies);
for (let i = 0; i < propertyNames.length; i++) {
const name = propertyNames[i];
consistencyNames[types.consistencies[name]] = name.toUpperCase();
}
//Using java constants naming conventions
consistencyNames[types.consistencies.localQuorum] = 'LOCAL_QUORUM';
consistencyNames[types.consistencies.eachQuorum] = 'EACH_QUORUM';
consistencyNames[types.consistencies.localSerial] = 'LOCAL_SERIAL';
consistencyNames[types.consistencies.localOne] = 'LOCAL_ONE';
}
/**
* Represents a wrapper around the options related to a graph execution.
* @internal
* @ignore
*/
class GraphExecutionOptions extends DefaultExecutionOptions {
/**
* Creates a new instance of GraphExecutionOptions.
* @param {GraphQueryOptions} queryOptions The user provided query options.
* @param {Client} client the client instance.
* @param graphBaseOptions The default graph base options.
* @param {RetryPolicy} defaultProfileRetryPolicy
*/
constructor(queryOptions, client, graphBaseOptions, defaultProfileRetryPolicy) {
queryOptions = queryOptions || utils.emptyObject;
super(queryOptions, client, null);
this._defaultGraphOptions = getDefaultGraphOptions(
client.profileManager, graphBaseOptions, defaultProfileRetryPolicy, this.getProfile());
this._preferredHost = null;
this._graphSubProtocol = queryOptions.graphResults || this._defaultGraphOptions.graphResults;
this._graphLanguage = queryOptions.graphLanguage || this._defaultGraphOptions.graphLanguage;
}
setPreferredHost(host) {
this._preferredHost = host;
}
getPreferredHost() {
return this._preferredHost;
}
getGraphSource() {
return this.getRawQueryOptions().graphSource || this._defaultGraphOptions.graphSource;
}
getGraphLanguage() {
return this._graphLanguage;
}
setGraphLanguage(value) {
this._graphLanguage = value;
}
getGraphName() {
return utils.ifUndefined(this.getRawQueryOptions().graphName, this._defaultGraphOptions.graphName);
}
getGraphSubProtocol() {
return this._graphSubProtocol;
}
setGraphSubProtocol(protocol) {
this._graphSubProtocol = protocol;
}
/** Graph executions have a specific default read timeout */
getReadTimeout() {
return this.getRawQueryOptions().readTimeout || this._defaultGraphOptions.readTimeout;
}
/** Graph executions have a specific default retry policy */
getRetryPolicy() {
return this.getRawQueryOptions().retry || this._defaultGraphOptions.retry;
}
getRowParser() {
const factory = this.getRawQueryOptions().rowParserFactory;
if (!factory) {
return null;
}
return factory(this.getGraphSubProtocol());
}
getQueryWriter() {
const factory = this.getRawQueryOptions().queryWriterFactory;
if (!factory) {
return null;
}
return factory(this.getGraphSubProtocol());
}
setGraphPayload() {
const options = this.getRawQueryOptions();
const defaultOptions = this._defaultGraphOptions;
// Clone the existing custom payload (if any)
const payload = Object.assign({}, this.getCustomPayload());
// Override the payload for DSE Graph exclusive options
setPayloadKey(payload, defaultOptions, payloadKeys.language,
this.getGraphLanguage() !== this._defaultGraphOptions.graphLanguage ? this.getGraphLanguage() : undefined);
setPayloadKey(payload, defaultOptions, payloadKeys.source, options.graphSource);
setPayloadKey(payload, defaultOptions, payloadKeys.name, options.graphName);
setPayloadKey(payload, defaultOptions, payloadKeys.readConsistency,
getConsistencyName(options.graphReadConsistency));
setPayloadKey(payload, defaultOptions, payloadKeys.writeConsistency,
getConsistencyName(options.graphWriteConsistency));
// Use the read timeout defined by the user or the one default to graph executions
setPayloadKey(payload, defaultOptions, payloadKeys.timeout,
this.getReadTimeout() > 0 ? this.getReadTimeout() : null, longBuffer);
// Graph result is always set
payload[payloadKeys.results] = defaultOptions.graphResults === this.getGraphSubProtocol()
? defaultOptions.customPayload[payloadKeys.results] : utils.allocBufferFromString(this.getGraphSubProtocol());
this.setCustomPayload(payload);
}
}
module.exports = {
GraphExecutionOptions,
graphProtocol,
payloadKeys
};

View File

@@ -0,0 +1,156 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const utils = require('../../utils');
/**
* Creates a new instance of <code>GraphResultSet</code>.
* @class
* @classdesc
* Represents the result set of a [graph query execution]{@link Client#executeGraph} containing vertices, edges or
* scalar values depending on the query.
* <p>
* It allows iteration of the items using <code>for..of</code> statements under ES2015 and exposes
* <code>forEach()</code>, <code>first()</code> and <code>toArray()</code> to access the underlying items.
* </p>
* @example
* for (let vertex of result} { ... }
* @example
* const arr = result.toArray();
* @example
* const vertex = result.first();
* @param {ResultSet} result
* @param {Function} [rowParser]
* @alias module:datastax/graph~GraphResultSet
* @constructor
*/
function GraphResultSet(result, rowParser) {
/**
* Information on the execution of a successful query:
* @member {Object}
* @property {Number} achievedConsistency The consistency level that has been actually achieved by the query.
* @property {String} queriedHost The Cassandra host that coordinated this query.
* @property {Object} triedHosts Gets the associative array of host that were queried before getting a valid response,
* being the last host the one that replied correctly.
* @property {Uuid} traceId Identifier of the trace session.
* @property {Array.<string>} warnings Warning messages generated by the server when executing the query.
*/
this.info = result.info;
const rows = result.rows;
rowParser = rowParser || parsePlainJsonRow;
/**
* This property has been deprecated because it may return a lower value than the actual length of the results.
* Use <code>toArray()</code> instead.
* <p>Gets the length of the result.</p>
* @deprecated Use <code>toArray()</code> instead. This property will be removed in the following major version.
* @member {Number}
*/
this.length = result.rowLength;
/**
* A string token representing the current page state of query. It can be used in the following executions to
* continue paging and retrieve the remained of the result for the query.
* @member {String}
*/
this.pageState = result.pageState;
/**
* Returns the first element of the result or null if the result is empty.
* @returns {Object}
*/
this.first = function first() {
const iterator = this.values();
const item = iterator.next();
if (item.done) {
return null;
}
return item.value;
};
/**
* Executes a provided function once per result element.
* @param {Function} callback Function to execute for each element, taking two arguments: currentValue and index.
* @param {Object} [thisArg] Value to use as <code>this</code> when executing callback.
*/
this.forEach = function forEach(callback, thisArg) {
if (!rows.length) {
return;
}
const iterator = this.values();
let item = iterator.next();
let index = 0;
while (!item.done) {
callback.call(thisArg || this, item.value, index++);
item = iterator.next();
}
};
/**
* Results an Array of graph result elements (vertex, edge, scalar).
* @returns {Array}
*/
this.toArray = function toArray() {
if (!rows.length) {
return utils.emptyArray;
}
return utils.iteratorToArray(this.values());
};
/**
* Returns a new Iterator object that contains the values for each index in the result.
* @returns {Iterator}
*/
this.values = function* values() {
for (const traverser of this.getTraversers()) {
const bulk = traverser.bulk || 1;
for (let j = 0; j < bulk; j++) {
yield traverser.object;
}
}
};
/**
* Gets the traversers represented contained in the result set.
* @returns {Iterator}
*/
this.getTraversers = function* () {
for (const row of rows) {
yield rowParser(row);
}
};
}
if (typeof Symbol !== 'undefined' && typeof Symbol.iterator === 'symbol') {
// Make iterable
GraphResultSet.prototype[Symbol.iterator] = function getIterator() {
return this.values();
};
}
/**
* @param {Row} row
* @private
*/
function parsePlainJsonRow(row) {
const parsed = JSON.parse(row['gremlin']);
return { object: parsed.result, bulk: parsed.bulk || 1 };
}
module.exports = GraphResultSet;

View File

@@ -0,0 +1,167 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const util = require('util');
/**
* @classdesc
* Represents a graph Element.
* @param id
* @param label
* @abstract
* @memberOf module:datastax/graph
* @constructor
*/
function Element(id, label) {
/**
* Gets the element id.
*/
this.id = id;
/**
* Gets the element label.
* @type {String}
*/
this.label = label;
}
/**
* @classdesc
* Represents a graph Vertex.
* @param id
* @param {String} label
* @param {Object<string, Array>} properties
* @extends {Element}
* @memberOf module:datastax/graph
* @constructor
*/
function Vertex(id, label, properties) {
Element.call(this, id, label);
/**
* Gets the vertex properties.
* @type {Object<string, Array>}
*/
this.properties = properties;
}
util.inherits(Vertex, Element);
/**
* @classdesc
* Represents a graph Edge.
* @param id
* @param outV
* @param {outVLabel} outVLabel
* @param {String} label
* @param inV
* @param {String} inVLabel
* @param {Object<string, Property>} properties
* @extends {Element}
* @memberOf module:datastax/graph
* @constructor
*/
function Edge(id, outV, outVLabel, label, inV, inVLabel, properties) {
Element.call(this, id, label);
/**
* Gets the id of outgoing vertex of the edge.
*/
this.outV = outV;
/**
* Gets the label of the outgoing vertex.
*/
this.outVLabel = outVLabel;
/**
* Gets the id of the incoming vertex of the edge.
*/
this.inV = inV;
/**
* Gets the label of the incoming vertex.
*/
this.inVLabel = inVLabel;
/**
* Gets the properties of the edge as an associative array.
* @type {Object}
*/
this.properties = {};
(function adaptProperties(self) {
if (properties) {
const keys = Object.keys(properties);
for (let i = 0; i < keys.length; i++) {
const k = keys[i];
self.properties[k] = properties[k].value;
}
}
})(this);
}
util.inherits(Edge, Element);
/**
* @classdesc
* Represents a graph vertex property.
* @param id
* @param {String} label
* @param value
* @param {Object} properties
* @extends {Element}
* @memberOf module:datastax/graph
* @constructor
*/
function VertexProperty(id, label, value, properties) {
Element.call(this, id, label);
this.value = value;
this.key = this.label;
this.properties = properties;
}
util.inherits(VertexProperty, Element);
/**
* @classdesc
* Represents a property.
* @param key
* @param value
* @memberOf module:datastax/graph
* @constructor
*/
function Property(key, value) {
this.key = key;
this.value = value;
}
/**
* @classdesc
* Represents a walk through a graph as defined by a traversal.
* @param {Array} labels
* @param {Array} objects
* @memberOf module:datastax/graph
* @constructor
*/
function Path(labels, objects) {
this.labels = labels;
this.objects = objects;
}
module.exports = {
Edge,
Element,
Path,
Property,
Vertex,
VertexProperty
};

View File

@@ -0,0 +1,501 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @module datastax/graph/tinkerpop/typeSerializers
* @ignore
*/
/**
* @author Jorge Bay Gondra
*/
'use strict';
// Replace dependencies to minimize code changes from Apache TinkerPop
const t = {
P: UnsupportedType, TextP: UnsupportedType, Traversal: UnsupportedType, Traverser: UnsupportedType,
EnumValue: UnsupportedType
};
const ts = { TraversalStrategy: UnsupportedType };
const Bytecode = UnsupportedType;
const g = require('./index');
const utils = { Long: UnsupportedType };
t.t = g.t;
t.direction = g.direction;
function UnsupportedType() { }
const valueKey = '@value';
const typeKey = '@type';
/**
* @abstract
*/
class TypeSerializer {
serialize() {
throw new Error('serialize() method not implemented for ' + this.constructor.name);
}
deserialize() {
throw new Error('deserialize() method not implemented for ' + this.constructor.name);
}
canBeUsedFor() {
throw new Error('canBeUsedFor() method not implemented for ' + this.constructor.name);
}
}
class NumberSerializer extends TypeSerializer {
serialize(item) {
if (isNaN(item)) {
return {
[typeKey]: 'g:Double',
[valueKey]: 'NaN'
};
} else if (item === Number.POSITIVE_INFINITY) {
return {
[typeKey]: 'g:Double',
[valueKey]: 'Infinity'
};
} else if (item === Number.NEGATIVE_INFINITY) {
return {
[typeKey]: 'g:Double',
[valueKey]: '-Infinity'
};
} else {
return item;
}
}
deserialize(obj) {
var val = obj[valueKey];
if (val === 'NaN') {
return NaN;
} else if (val === 'Infinity') {
return Number.POSITIVE_INFINITY;
} else if (val === '-Infinity') {
return Number.NEGATIVE_INFINITY;
} else {
return parseFloat(val);
}
}
canBeUsedFor(value) {
return (typeof value === 'number');
}
}
class DateSerializer extends TypeSerializer {
serialize(item) {
return {
[typeKey]: 'g:Date',
[valueKey]: item.getTime()
};
}
deserialize(obj) {
return new Date(obj[valueKey]);
}
canBeUsedFor(value) {
return (value instanceof Date);
}
}
class LongSerializer extends TypeSerializer {
serialize(item) {
return {
[typeKey]: 'g:Int64',
[valueKey]: item.value
};
}
canBeUsedFor(value) {
return (value instanceof utils.Long);
}
}
class BytecodeSerializer extends TypeSerializer {
serialize(item) {
let bytecode = item;
if (item instanceof t.Traversal) {
bytecode = item.getBytecode();
}
const result = {};
result[typeKey] = 'g:Bytecode';
const resultValue = result[valueKey] = {};
const sources = this._serializeInstructions(bytecode.sourceInstructions);
if (sources) {
resultValue['source'] = sources;
}
const steps = this._serializeInstructions(bytecode.stepInstructions);
if (steps) {
resultValue['step'] = steps;
}
return result;
}
_serializeInstructions(instructions) {
if (instructions.length === 0) {
return null;
}
const result = new Array(instructions.length);
result[0] = instructions[0];
for (let i = 0; i < instructions.length; i++) {
result[i] = instructions[i].map(item => this.writer.adaptObject(item));
}
return result;
}
canBeUsedFor(value) {
return (value instanceof Bytecode) || (value instanceof t.Traversal);
}
}
class PSerializer extends TypeSerializer {
/** @param {P} item */
serialize(item) {
const result = {};
result[typeKey] = 'g:P';
const resultValue = result[valueKey] = {
'predicate': item.operator
};
if (item.other === undefined || item.other === null) {
resultValue['value'] = this.writer.adaptObject(item.value);
}
else {
resultValue['value'] = [ this.writer.adaptObject(item.value), this.writer.adaptObject(item.other) ];
}
return result;
}
canBeUsedFor(value) {
return (value instanceof t.P);
}
}
class TextPSerializer extends TypeSerializer {
/** @param {TextP} item */
serialize(item) {
const result = {};
result[typeKey] = 'g:TextP';
const resultValue = result[valueKey] = {
'predicate': item.operator
};
if (item.other === undefined || item.other === null) {
resultValue['value'] = this.writer.adaptObject(item.value);
}
else {
resultValue['value'] = [ this.writer.adaptObject(item.value), this.writer.adaptObject(item.other) ];
}
return result;
}
canBeUsedFor(value) {
return (value instanceof t.TextP);
}
}
class LambdaSerializer extends TypeSerializer {
/** @param {Function} item */
serialize(item) {
return {
[typeKey]: 'g:Lambda',
[valueKey]: {
'arguments': item.length,
'language': 'gremlin-javascript',
'script': item.toString()
}
};
}
canBeUsedFor(value) {
return (typeof value === 'function');
}
}
class EnumSerializer extends TypeSerializer {
/** @param {EnumValue} item */
serialize(item) {
return {
[typeKey]: 'g:' + item.typeName,
[valueKey]: item.elementName
};
}
canBeUsedFor(value) {
return value && value.typeName && value instanceof t.EnumValue;
}
}
class TraverserSerializer extends TypeSerializer {
/** @param {Traverser} item */
serialize(item) {
return {
[typeKey]: 'g:Traverser',
[valueKey]: {
'value': this.writer.adaptObject(item.object),
'bulk': this.writer.adaptObject(item.bulk)
}
};
}
deserialize(obj) {
const value = obj[valueKey];
return new t.Traverser(this.reader.read(value['value']), this.reader.read(value['bulk']));
}
canBeUsedFor(value) {
return (value instanceof t.Traverser);
}
}
class TraversalStrategySerializer extends TypeSerializer {
/** @param {TraversalStrategy} item */
serialize(item) {
return {
[typeKey]: 'g:' + item.constructor.name,
[valueKey]: item.configuration
};
}
canBeUsedFor(value) {
return (value instanceof ts.TraversalStrategy);
}
}
class VertexSerializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
return new g.Vertex(this.reader.read(value['id']), value['label'], this.reader.read(value['properties']));
}
/** @param {Vertex} item */
serialize(item) {
return {
[typeKey]: 'g:Vertex',
[valueKey]: {
'id': this.writer.adaptObject(item.id),
'label': item.label
}
};
}
canBeUsedFor(value) {
return (value instanceof g.Vertex);
}
}
class VertexPropertySerializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
return new g.VertexProperty(
this.reader.read(value['id']),
value['label'],
this.reader.read(value['value']),
this.reader.read(value['properties'])
);
}
}
class PropertySerializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
return new g.Property(
value['key'],
this.reader.read(value['value']));
}
}
class EdgeSerializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
return new g.Edge(
this.reader.read(value['id']),
new g.Vertex(this.reader.read(value['outV']), this.reader.read(value['outVLabel'])),
value['label'],
new g.Vertex(this.reader.read(value['inV']), this.reader.read(value['inVLabel'])),
this.reader.read(value['properties'])
);
}
/** @param {Edge} item */
serialize(item) {
return {
[typeKey]: 'g:Edge',
[valueKey]: {
'id': this.writer.adaptObject(item.id),
'label': item.label,
'outV': this.writer.adaptObject(item.outV.id),
'outVLabel': item.outV.label,
'inV': this.writer.adaptObject(item.inV.id),
'inVLabel': item.inV.label
}
};
}
canBeUsedFor(value) {
return (value instanceof g.Edge);
}
}
class PathSerializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
const objects = value['objects'].map(o => this.reader.read(o));
return new g.Path(this.reader.read(value['labels']), objects);
}
}
class Path3Serializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
return new g.Path(this.reader.read(value['labels']), this.reader.read(value['objects']));
}
}
class TSerializer extends TypeSerializer {
deserialize(obj) {
return t.t[obj[valueKey]];
}
}
class DirectionSerializer extends TypeSerializer {
deserialize(obj) {
return t.direction[obj[valueKey].toLowerCase()];
}
}
class ArraySerializer extends TypeSerializer {
constructor(typeKey) {
super();
this.typeKey = typeKey;
}
deserialize(obj) {
const value = obj[valueKey];
if (!Array.isArray(value)) {
throw new Error('Expected Array, obtained: ' + value);
}
return value.map(x => this.reader.read(x));
}
/** @param {Array} item */
serialize(item) {
return {
[typeKey]: this.typeKey,
[valueKey]: item.map(x => this.writer.adaptObject(x))
};
}
canBeUsedFor(value) {
return Array.isArray(value);
}
}
class BulkSetSerializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
if (!Array.isArray(value)) {
throw new Error('Expected Array, obtained: ' + value);
}
// coerce the BulkSet to List. if the bulk exceeds the int space then we can't coerce to List anyway,
// so this query will be trouble. we'd need a legit BulkSet implementation here in js. this current
// implementation is here to replicate the previous functionality that existed on the server side in
// previous versions.
let result = [];
for (let ix = 0, iy = value.length; ix < iy; ix += 2) {
const pair = value.slice(ix, ix + 2);
result = result.concat(Array(this.reader.read(pair[1])).fill(this.reader.read(pair[0])));
}
return result;
}
}
class MapSerializer extends TypeSerializer {
deserialize(obj) {
const value = obj[valueKey];
if (!Array.isArray(value)) {
throw new Error('Expected Array, obtained: ' + value);
}
const result = new Map();
for (let i = 0; i < value.length; i += 2) {
result.set(this.reader.read(value[i]), this.reader.read(value[i + 1]));
}
return result;
}
/** @param {Map} map */
serialize(map) {
const arr = [];
map.forEach((v, k) => {
arr.push(this.writer.adaptObject(k));
arr.push(this.writer.adaptObject(v));
});
return {
[typeKey]: 'g:Map',
[valueKey]: arr
};
}
canBeUsedFor(value) {
return value instanceof Map;
}
}
class ListSerializer extends ArraySerializer {
constructor() {
super('g:List');
}
}
class SetSerializer extends ArraySerializer {
constructor() {
super('g:Set');
}
}
module.exports = {
BulkSetSerializer,
BytecodeSerializer,
DateSerializer,
DirectionSerializer,
EdgeSerializer,
EnumSerializer,
LambdaSerializer,
ListSerializer,
LongSerializer,
MapSerializer,
NumberSerializer,
Path3Serializer,
PathSerializer,
PropertySerializer,
PSerializer,
TextPSerializer,
SetSerializer,
TSerializer,
TraverserSerializer,
TraversalStrategySerializer,
typeKey,
valueKey,
VertexPropertySerializer,
VertexSerializer
};

View File

@@ -0,0 +1,84 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const types = require('../../types');
const { dataTypes } = types;
/**
* Internal representation of a value with additional type information.
* @internal
* @ignore
*/
class GraphTypeWrapper {
constructor(value, typeInfo) {
this.value = value;
this.typeInfo = typeof typeInfo === 'number' ? { code: typeInfo } : typeInfo;
}
}
/**
* Internal representation of user-defined type with the metadata.
* @internal
* @ignore
*/
class UdtGraphWrapper {
constructor(value, udtInfo) {
this.value = value;
if (!udtInfo || !udtInfo.name || !udtInfo.keyspace || !udtInfo.fields) {
throw new TypeError(`udtInfo must be an object with name, keyspace and field properties defined`);
}
this.udtInfo = udtInfo;
}
}
/**
* Wraps a number or null value to hint the client driver that the data type of the value is an int
* @memberOf module:datastax/graph
*/
function asInt(value) { return new GraphTypeWrapper(value, dataTypes.int); }
/**
* Wraps a number or null value to hint the client driver that the data type of the value is a double
* @memberOf module:datastax/graph
*/
function asDouble(value) { return new GraphTypeWrapper(value, dataTypes.double); }
/**
* Wraps a number or null value to hint the client driver that the data type of the value is a double
* @memberOf module:datastax/graph
*/
function asFloat(value) { return new GraphTypeWrapper(value, dataTypes.float); }
/**
* Wraps a Date or null value to hint the client driver that the data type of the value is a timestamp
* @memberOf module:datastax/graph
*/
function asTimestamp(value) { return new GraphTypeWrapper(value, dataTypes.timestamp); }
/**
* Wraps an Object or null value to hint the client driver that the data type of the value is a user-defined type.
* @memberOf module:datastax/graph
* @param {object} value The object representing the UDT.
* @param {{name: string, keyspace: string, fields: Array}} udtInfo The UDT metadata as defined by the driver.
*/
function asUdt(value, udtInfo) { return new UdtGraphWrapper(value, udtInfo); }
module.exports = { asInt, asDouble, asFloat, asTimestamp, asUdt, UdtGraphWrapper, GraphTypeWrapper };

24
node_modules/cassandra-driver/lib/datastax/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,24 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as graphModule from './graph';
import * as searchModule from './search';
export namespace datastax {
export import graph = graphModule.graph;
export import search = searchModule.search;
}

28
node_modules/cassandra-driver/lib/datastax/index.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
/**
* DataStax module.
* <p>
* Contains modules and classes to represent functionality that is specific to DataStax products.
* </p>
* @module datastax
*/
exports.graph = require('./graph');
exports.search = require('./search');

View File

@@ -0,0 +1,537 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const utils = require('../../utils');
const Long = require('long');
/**
* Regex to parse dates in the following format YYYY-MM-DDThh:mm:ss.mssZ
* Looks cumbersome but it's straightforward:
* - "(\d{1,6})": year mandatory 1 to 6 digits
* - (?:-(\d{1,2}))?(?:-(\d{1,2}))? two non-capturing groups representing the month and day (1 to 2 digits captured).
* - (?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2}))?)?Z? A non-capturing group for the time portion
* @private
*/
const dateRegex =
/^[-+]?(\d{1,6})(?:-(\d{1,2}))?(?:-(\d{1,2}))?(?:T(\d{1,2}?)?(?::(\d{1,2}))?(?::(\d{1,2})(?:\.(\d{1,3}))?)?)?Z?$/;
const multipleBoundariesRegex = /^\[(.+?) TO (.+)]$/;
const unbounded = Object.freeze(new DateRangeBound(null, -1));
const dateRangeType = {
// single value as in "2001-01-01"
singleValue: 0,
// closed range as in "[2001-01-01 TO 2001-01-31]"
closedRange: 1,
// open range high as in "[2001-01-01 TO *]"
openRangeHigh: 2,
// - 0x03 - open range low as in "[* TO 2001-01-01]"
openRangeLow: 3,
// - 0x04 - both ranges open as in "[* TO *]"
openBoth: 4,
// - 0x05 - single open range as in "[*]"
openSingle: 5
};
/**
* Defines the possible values of date range precision.
* @type {Object}
* @property {Number} year
* @property {Number} month
* @property {Number} day
* @property {Number} hour
* @property {Number} minute
* @property {Number} second
* @property {Number} millisecond
* @memberof module:search
*/
const dateRangePrecision = {
year: 0,
month: 1,
day: 2,
hour: 3,
minute: 4,
second: 5,
millisecond: 6
};
/**
* Creates a new instance of <code>DateRange</code> using a lower bound and an upper bound.
* <p>Consider using <code>DateRange.fromString()</code> to create instances more easily.</p>
* @classdesc
* Represents a range of dates, corresponding to the Apache Solr type
* <a href="https://cwiki.apache.org/confluence/display/solr/Working+with+Dates"><code>DateRangeField</code></a>.
* <p>
* A date range can have one or two bounds, namely lower bound and upper bound, to represent an interval of time.
* Date range bounds are both inclusive. For example:
* </p>
* <ul>
* <li><code>2015 TO 2016-10</code> represents from the first day of 2015 to the last day of October 2016</li>
* <li><code>2015</code> represents during the course of the year 2015.</li>
* <li><code>2017 TO *</code> represents any date greater or equals to the first day of the year 2017.</li>
* </ul>
* <p>
* Note that this JavaScript representation of <code>DateRangeField</code> does not support Dates outside of the range
* supported by ECMAScript Date: 100,000,000 days to 100,000,000 days measured relative to midnight at the
* beginning of 01 January, 1970 UTC. Being <code>-271821-04-20T00:00:00.000Z</code> the minimum lower boundary
* and <code>275760-09-13T00:00:00.000Z</code> the maximum higher boundary.
* <p>
* @param {DateRangeBound} lowerBound A value representing the range lower bound, composed by a
* <code>Date</code> and a precision. Use <code>DateRangeBound.unbounded</code> for an open lower bound.
* @param {DateRangeBound} [upperBound] A value representing the range upper bound, composed by a
* <code>Date</code> and a precision. Use <code>DateRangeBound.unbounded</code> for an open upper bound. When it's not
* defined, the <code>DateRange</code> instance is considered as a single value range.
* @constructor
* @memberOf module:datastax/search
*/
function DateRange(lowerBound, upperBound) {
if (!lowerBound) {
throw new TypeError('The lower boundaries must be defined');
}
/**
* Gets the lower bound of this range (inclusive).
* @type {DateRangeBound}
*/
this.lowerBound = lowerBound;
/**
* Gets the upper bound of this range (inclusive).
* @type {DateRangeBound|null}
*/
this.upperBound = upperBound || null;
// Define the type
if (this.upperBound === null) {
if (this.lowerBound !== unbounded) {
this._type = dateRangeType.singleValue;
}
else {
this._type = dateRangeType.openSingle;
}
}
else {
if (this.lowerBound !== unbounded) {
this._type = this.upperBound !== unbounded ? dateRangeType.closedRange : dateRangeType.openRangeHigh;
}
else {
this._type = this.upperBound !== unbounded ? dateRangeType.openRangeLow : dateRangeType.openBoth;
}
}
}
/**
* Returns true if the value of this DateRange instance and other are the same.
* @param {DateRange} other
* @returns {Boolean}
*/
DateRange.prototype.equals = function (other) {
if (!(other instanceof DateRange)) {
return false;
}
return (other.lowerBound.equals(this.lowerBound) &&
(other.upperBound ? other.upperBound.equals(this.upperBound) : !this.upperBound));
};
/**
* Returns the string representation of the instance.
* @return {String}
*/
DateRange.prototype.toString = function () {
if (this.upperBound === null) {
return this.lowerBound.toString();
}
return '[' + this.lowerBound.toString() + ' TO ' + this.upperBound.toString() + ']';
};
DateRange.prototype.toBuffer = function () {
// Serializes the value containing:
// <type>[<time0><precision0><time1><precision1>]
if (this._type === dateRangeType.openBoth || this._type === dateRangeType.openSingle) {
return utils.allocBufferFromArray([ this._type ]);
}
let buffer;
let offset = 0;
if (this._type !== dateRangeType.closedRange) {
// byte + long + byte
const boundary = this._type !== dateRangeType.openRangeLow ? this.lowerBound : this.upperBound;
buffer = utils.allocBufferUnsafe(10);
buffer.writeUInt8(this._type, offset++);
offset = writeDate(boundary.date, buffer, offset);
buffer.writeUInt8(boundary.precision, offset);
return buffer;
}
// byte + long + byte + long + byte
buffer = utils.allocBufferUnsafe(19);
buffer.writeUInt8(this._type, offset++);
offset = writeDate(this.lowerBound.date, buffer, offset);
buffer.writeUInt8(this.lowerBound.precision, offset++);
offset = writeDate(this.upperBound.date, buffer, offset);
buffer.writeUInt8(this.upperBound.precision, offset);
return buffer;
};
/**
* Returns the <code>DateRange</code> representation of a given string.
* <p>String representations of dates are always expressed in Coordinated Universal Time (UTC)</p>
* @param {String} dateRangeString
*/
DateRange.fromString = function (dateRangeString) {
const matches = multipleBoundariesRegex.exec(dateRangeString);
if (!matches) {
return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(dateRangeString)));
}
return new DateRange(DateRangeBound.toLowerBound(DateRangeBound.fromString(matches[1])), DateRangeBound.toUpperBound(DateRangeBound.fromString(matches[2])));
};
/**
* Deserializes the buffer into a <code>DateRange</code>
* @param {Buffer} buffer
* @return {DateRange}
*/
DateRange.fromBuffer = function (buffer) {
if (buffer.length === 0) {
throw new TypeError('DateRange serialized value must have at least 1 byte');
}
const type = buffer.readUInt8(0);
if (type === dateRangeType.openBoth) {
return new DateRange(unbounded, unbounded);
}
if (type === dateRangeType.openSingle) {
return new DateRange(unbounded);
}
let offset = 1;
let date1;
let lowerBound;
let upperBound = null;
if (type !== dateRangeType.closedRange) {
date1 = readDate(buffer, offset);
offset += 8;
lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset));
if (type === dateRangeType.openRangeLow) {
// lower boundary is open, the first serialized boundary is the upperBound
upperBound = lowerBound;
lowerBound = unbounded;
}
else {
upperBound = type === dateRangeType.openRangeHigh ? unbounded : null;
}
return new DateRange(lowerBound, upperBound);
}
date1 = readDate(buffer, offset);
offset += 8;
lowerBound = new DateRangeBound(date1, buffer.readUInt8(offset++));
const date2 = readDate(buffer, offset);
offset += 8;
upperBound = new DateRangeBound(date2, buffer.readUInt8(offset));
return new DateRange(lowerBound, upperBound);
};
/**
* Writes a Date, long millis since epoch, to a buffer starting from offset.
* @param {Date} date
* @param {Buffer} buffer
* @param {Number} offset
* @return {Number} The new offset.
* @private
*/
function writeDate(date, buffer, offset) {
const long = Long.fromNumber(date.getTime());
buffer.writeUInt32BE(long.getHighBitsUnsigned(), offset);
buffer.writeUInt32BE(long.getLowBitsUnsigned(), offset + 4);
return offset + 8;
}
/**
* Reads a Date, long millis since epoch, from a buffer starting from offset.
* @param {Buffer} buffer
* @param {Number} offset
* @return {Date}
* @private
*/
function readDate(buffer, offset) {
const long = new Long(buffer.readInt32BE(offset+4), buffer.readInt32BE(offset));
return new Date(long.toNumber());
}
/**
* @classdesc
* Represents a date range boundary, composed by a <code>Date</code> and a precision.
* @param {Date} date The timestamp portion, representing a single moment in time. Consider using
* <code>Date.UTC()</code> method to build the <code>Date</code> instance.
* @param {Number} precision The precision portion. Valid values for <code>DateRangeBound</code> precision are
* defined in the [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member.
* @constructor
* @memberOf module:datastax/search
*/
function DateRangeBound(date, precision) {
/**
* The timestamp portion of the boundary.
* @type {Date}
*/
this.date = date;
/**
* The precision portion of the boundary. Valid values are defined in the
* [dateRangePrecision]{@link module:datastax/search~dateRangePrecision} member.
* @type {Number}
*/
this.precision = precision;
}
/**
* Returns the string representation of the instance.
* @return {String}
*/
DateRangeBound.prototype.toString = function () {
if (this.precision === -1) {
return '*';
}
let precision = 0;
const isoString = this.date.toISOString();
let i;
let char;
// The years take at least the first 4 characters
for (i = 4; i < isoString.length && precision <= this.precision; i++) {
char = isoString.charAt(i);
if (precision === dateRangePrecision.day && char === 'T') {
precision = dateRangePrecision.hour;
continue;
}
if (precision >= dateRangePrecision.hour && char === ':' || char === '.') {
precision++;
continue;
}
if (precision < dateRangePrecision.day && char === '-') {
precision++;
}
}
let start = 0;
const firstChar = isoString.charAt(0);
let sign = '';
let toRemoveIndex = 4;
if (firstChar === '+' || firstChar === '-') {
sign = firstChar;
if (firstChar === '-') {
// since we are retaining the -, don't remove as many zeros.
toRemoveIndex = 3;
}
// Remove additional zeros
for (start = 1; start < toRemoveIndex; start++) {
if (isoString.charAt(start) !== '0') {
break;
}
}
}
if (this.precision !== dateRangePrecision.millisecond) {
// i holds the position of the first char that marks the end of a precision (ie: '-', 'T', ...),
// we should not include it in the result, except its the 'Z' char for the complete representation
i--;
}
return sign + isoString.substring(start, i);
};
/**
* Returns true if the value of this DateRange instance and other are the same.
* @param {DateRangeBound} other
* @return {boolean}
*/
DateRangeBound.prototype.equals = function (other) {
if (!(other instanceof DateRangeBound)) {
return false;
}
if (other.precision !== this.precision) {
return false;
}
return datesEqual(other.date, this.date);
};
function datesEqual(d1, d2) {
const t1 = d1 ? d1.getTime() : null;
const t2 = d2 ? d2.getTime() : null;
return t1 === t2;
}
DateRangeBound.prototype.isUnbounded = function () {
return (this.precision === -1);
};
/**
* Parses a date string and returns a DateRangeBound.
* @param {String} boundaryString
* @return {DateRangeBound}
*/
DateRangeBound.fromString = function(boundaryString) {
if (!boundaryString) {
return null;
}
if (boundaryString === '*') {
return unbounded;
}
const matches = dateRegex.exec(boundaryString);
if (!matches) {
throw TypeError('String provided is not a valid date ' + boundaryString);
}
if (matches[7] !== undefined && matches[5] === undefined) {
// Due to a limitation in the regex, its possible to match dates like 2015T03:02.001, without the seconds
// portion but with the milliseconds specified.
throw new TypeError('String representation of the date contains the milliseconds portion but not the seconds: ' +
boundaryString);
}
const builder = new BoundaryBuilder(boundaryString.charAt(0) === '-');
for (let i = 1; i < matches.length; i++) {
builder.set(i-1, matches[i], boundaryString);
}
return builder.build();
};
/**
* The unbounded {@link DateRangeBound} instance. Unbounded bounds are syntactically represented by a <code>*</code>
* (star) sign.
* @type {DateRangeBound}
*/
DateRangeBound.unbounded = unbounded;
/**
* Converts a {DateRangeBound} into a lower-bounded bound by rounding down its date
* based on its precision.
*
* @param {DateRangeBound} bound The bound to round down.
* @returns {DateRangeBound} with the date rounded down to the given precision.
*/
DateRangeBound.toLowerBound = function (bound) {
if(bound === unbounded) {
return bound;
}
const rounded = new Date(bound.date.getTime());
// in this case we want to fallthrough
/* eslint-disable no-fallthrough */
switch (bound.precision) {
case dateRangePrecision.year:
rounded.setUTCMonth(0);
case dateRangePrecision.month:
rounded.setUTCDate(1);
case dateRangePrecision.day:
rounded.setUTCHours(0);
case dateRangePrecision.hour:
rounded.setUTCMinutes(0);
case dateRangePrecision.minute:
rounded.setUTCSeconds(0);
case dateRangePrecision.second:
rounded.setUTCMilliseconds(0);
}
/* eslint-enable no-fallthrough */
return new DateRangeBound(rounded, bound.precision);
};
/**
* Converts a {DateRangeBound} into a upper-bounded bound by rounding up its date
* based on its precision.
*
* @param {DateRangeBound} bound The bound to round up.
* @returns {DateRangeBound} with the date rounded up to the given precision.
*/
DateRangeBound.toUpperBound = function (bound) {
if (bound === unbounded) {
return bound;
}
const rounded = new Date(bound.date.getTime());
// in this case we want to fallthrough
/* eslint-disable no-fallthrough */
switch (bound.precision) {
case dateRangePrecision.year:
rounded.setUTCMonth(11);
case dateRangePrecision.month:
// Advance to the beginning of next month and set day of month to 0
// which sets the date to the last day of the previous month.
// This gives us the effect of YYYY-MM-LastDayOfThatMonth
rounded.setUTCMonth(rounded.getUTCMonth() + 1, 0);
case dateRangePrecision.day:
rounded.setUTCHours(23);
case dateRangePrecision.hour:
rounded.setUTCMinutes(59);
case dateRangePrecision.minute:
rounded.setUTCSeconds(59);
case dateRangePrecision.second:
rounded.setUTCMilliseconds(999);
}
/* eslint-enable no-fallthrough */
return new DateRangeBound(rounded, bound.precision);
};
/** @private */
function BoundaryBuilder(isNegative) {
this._sign = isNegative ? -1 : 1;
this._index = 0;
this._values = new Int32Array(7);
}
BoundaryBuilder.prototype.set = function (index, value, stringDate) {
if (value === undefined) {
return;
}
if (index > 6) {
throw new TypeError('Index out of bounds: ' + index);
}
if (index > this._index) {
this._index = index;
}
const numValue = +value;
switch (index) {
case dateRangePrecision.month:
if (numValue < 1 || numValue > 12) {
throw new TypeError('Month portion is not valid for date: ' + stringDate);
}
break;
case dateRangePrecision.day:
if (numValue < 1 || numValue > 31) {
throw new TypeError('Day portion is not valid for date: ' + stringDate);
}
break;
case dateRangePrecision.hour:
if (numValue > 23) {
throw new TypeError('Hour portion is not valid for date: ' + stringDate);
}
break;
case dateRangePrecision.minute:
case dateRangePrecision.second:
if (numValue > 59) {
throw new TypeError('Minute/second portion is not valid for date: ' + stringDate);
}
break;
case dateRangePrecision.millisecond:
if (numValue > 999) {
throw new TypeError('Millisecond portion is not valid for date: ' + stringDate);
}
break;
}
this._values[index] = numValue;
};
/** @return {DateRangeBound} */
BoundaryBuilder.prototype.build = function () {
const date = new Date(0);
let month = this._values[1];
if (month) {
// ES Date months are represented from 0 to 11
month--;
}
date.setUTCFullYear(this._sign * this._values[0], month, this._values[2] || 1);
date.setUTCHours(this._values[3], this._values[4], this._values[5], this._values[6]);
return new DateRangeBound(date, this._index);
};
exports.unbounded = unbounded;
exports.dateRangePrecision = dateRangePrecision;
exports.DateRange = DateRange;
exports.DateRangeBound = DateRangeBound;

View File

@@ -0,0 +1,58 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export namespace search {
enum dateRangePrecision {
year = 0,
month,
day,
hour,
minute,
second,
millisecond
}
class DateRange {
lowerBound: DateRangeBound;
upperBound: DateRangeBound;
constructor(lowerBound: DateRangeBound, upperBound: DateRangeBound);
equals(other: DateRangeBound): boolean;
toString(): string;
static fromString(value: string): DateRange;
static fromBuffer(value: Buffer): DateRange;
}
class DateRangeBound {
date: Date;
precision: number;
equals(other: DateRangeBound): boolean;
toString(): string;
static fromString(value: string): DateRangeBound;
static toLowerBound(bound: DateRangeBound): DateRangeBound;
static toUpperBound(bound: DateRangeBound): DateRangeBound;
}
}

View File

@@ -0,0 +1,30 @@
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const dateRangeModule = require('./date-range');
/**
* Search module.
* <p>
* Contains the classes to represent the set of types for search data that come with DSE 5.1+
* </p>
* @module datastax/search
*/
exports.DateRange = dateRangeModule.DateRange;
exports.DateRangeBound = dateRangeModule.DateRangeBound;
exports.dateRangePrecision = dateRangeModule.dateRangePrecision;