First commit
This commit is contained in:
124
node_modules/mongodb/src/operations/add_user.ts
generated
vendored
Normal file
124
node_modules/mongodb/src/operations/add_user.ts
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
import type { Document } from '../bson';
|
||||
import type { Db } from '../db';
|
||||
import { MongoInvalidArgumentError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, emitWarningOnce, getTopology } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @deprecated Use the createUser command directly instead.
|
||||
*/
|
||||
export interface RoleSpecification {
|
||||
/**
|
||||
* A role grants privileges to perform sets of actions on defined resources.
|
||||
* A given role applies to the database on which it is defined and can grant access down to a collection level of granularity.
|
||||
*/
|
||||
role: string;
|
||||
/** The database this user's role should effect. */
|
||||
db: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @deprecated Use the createUser command directly instead.
|
||||
*/
|
||||
export interface AddUserOptions extends CommandOperationOptions {
|
||||
/** Roles associated with the created user */
|
||||
roles?: string | string[] | RoleSpecification | RoleSpecification[];
|
||||
/** Custom data associated with the user (only Mongodb 2.6 or higher) */
|
||||
customData?: Document;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class AddUserOperation extends CommandOperation<Document> {
|
||||
override options: AddUserOptions;
|
||||
db: Db;
|
||||
username: string;
|
||||
password?: string;
|
||||
|
||||
constructor(db: Db, username: string, password: string | undefined, options?: AddUserOptions) {
|
||||
super(db, options);
|
||||
|
||||
this.db = db;
|
||||
this.username = username;
|
||||
this.password = password;
|
||||
this.options = options ?? {};
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const db = this.db;
|
||||
const username = this.username;
|
||||
const password = this.password;
|
||||
const options = this.options;
|
||||
|
||||
// Error out if digestPassword set
|
||||
// v5 removed the digestPassword option from AddUserOptions but we still want to throw
|
||||
// an error when digestPassword is provided.
|
||||
if ('digestPassword' in options && options.digestPassword != null) {
|
||||
return callback(
|
||||
new MongoInvalidArgumentError(
|
||||
'Option "digestPassword" not supported via addUser, use db.command(...) instead'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let roles;
|
||||
if (!options.roles || (Array.isArray(options.roles) && options.roles.length === 0)) {
|
||||
emitWarningOnce(
|
||||
'Creating a user without roles is deprecated. Defaults to "root" if db is "admin" or "dbOwner" otherwise'
|
||||
);
|
||||
if (db.databaseName.toLowerCase() === 'admin') {
|
||||
roles = ['root'];
|
||||
} else {
|
||||
roles = ['dbOwner'];
|
||||
}
|
||||
} else {
|
||||
roles = Array.isArray(options.roles) ? options.roles : [options.roles];
|
||||
}
|
||||
|
||||
let topology;
|
||||
try {
|
||||
topology = getTopology(db);
|
||||
} catch (error) {
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
const digestPassword = topology.lastHello().maxWireVersion >= 7;
|
||||
|
||||
let userPassword = password;
|
||||
|
||||
if (!digestPassword) {
|
||||
// Use node md5 generator
|
||||
const md5 = crypto.createHash('md5');
|
||||
// Generate keys used for authentication
|
||||
md5.update(`${username}:mongo:${password}`);
|
||||
userPassword = md5.digest('hex');
|
||||
}
|
||||
|
||||
// Build the command to execute
|
||||
const command: Document = {
|
||||
createUser: username,
|
||||
customData: options.customData || {},
|
||||
roles: roles,
|
||||
digestPassword
|
||||
};
|
||||
|
||||
// No password
|
||||
if (typeof password === 'string') {
|
||||
command.pwd = userPassword;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(AddUserOperation, [Aspect.WRITE_OPERATION]);
|
144
node_modules/mongodb/src/operations/aggregate.ts
generated
vendored
Normal file
144
node_modules/mongodb/src/operations/aggregate.ts
generated
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
import type { Document } from '../bson';
|
||||
import { MongoInvalidArgumentError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, maxWireVersion, MongoDBNamespace } from '../utils';
|
||||
import { CollationOptions, CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects, Hint } from './operation';
|
||||
|
||||
/** @internal */
|
||||
export const DB_AGGREGATE_COLLECTION = 1 as const;
|
||||
const MIN_WIRE_VERSION_$OUT_READ_CONCERN_SUPPORT = 8 as const;
|
||||
|
||||
/** @public */
|
||||
export interface AggregateOptions extends CommandOperationOptions {
|
||||
/** allowDiskUse lets the server know if it can use disk to store temporary results for the aggregation (requires mongodb 2.6 \>). */
|
||||
allowDiskUse?: boolean;
|
||||
/** The number of documents to return per batch. See [aggregation documentation](https://www.mongodb.com/docs/manual/reference/command/aggregate). */
|
||||
batchSize?: number;
|
||||
/** Allow driver to bypass schema validation in MongoDB 3.2 or higher. */
|
||||
bypassDocumentValidation?: boolean;
|
||||
/** Return the query as cursor, on 2.6 \> it returns as a real cursor on pre 2.6 it returns as an emulated cursor. */
|
||||
cursor?: Document;
|
||||
/** specifies a cumulative time limit in milliseconds for processing operations on the cursor. MongoDB interrupts the operation at the earliest following interrupt point. */
|
||||
maxTimeMS?: number;
|
||||
/** The maximum amount of time for the server to wait on new documents to satisfy a tailable cursor query. */
|
||||
maxAwaitTimeMS?: number;
|
||||
/** Specify collation. */
|
||||
collation?: CollationOptions;
|
||||
/** Add an index selection hint to an aggregation command */
|
||||
hint?: Hint;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
|
||||
out?: string;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class AggregateOperation<T = Document> extends CommandOperation<T> {
|
||||
override options: AggregateOptions;
|
||||
target: string | typeof DB_AGGREGATE_COLLECTION;
|
||||
pipeline: Document[];
|
||||
hasWriteStage: boolean;
|
||||
|
||||
constructor(ns: MongoDBNamespace, pipeline: Document[], options?: AggregateOptions) {
|
||||
super(undefined, { ...options, dbName: ns.db });
|
||||
|
||||
this.options = { ...options };
|
||||
|
||||
// Covers when ns.collection is null, undefined or the empty string, use DB_AGGREGATE_COLLECTION
|
||||
this.target = ns.collection || DB_AGGREGATE_COLLECTION;
|
||||
|
||||
this.pipeline = pipeline;
|
||||
|
||||
// determine if we have a write stage, override read preference if so
|
||||
this.hasWriteStage = false;
|
||||
if (typeof options?.out === 'string') {
|
||||
this.pipeline = this.pipeline.concat({ $out: options.out });
|
||||
this.hasWriteStage = true;
|
||||
} else if (pipeline.length > 0) {
|
||||
const finalStage = pipeline[pipeline.length - 1];
|
||||
if (finalStage.$out || finalStage.$merge) {
|
||||
this.hasWriteStage = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.hasWriteStage) {
|
||||
this.trySecondaryWrite = true;
|
||||
} else {
|
||||
delete this.options.writeConcern;
|
||||
}
|
||||
|
||||
if (this.explain && this.writeConcern) {
|
||||
throw new MongoInvalidArgumentError(
|
||||
'Option "explain" cannot be used on an aggregate call with writeConcern'
|
||||
);
|
||||
}
|
||||
|
||||
if (options?.cursor != null && typeof options.cursor !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Cursor options must be an object');
|
||||
}
|
||||
}
|
||||
|
||||
override get canRetryRead(): boolean {
|
||||
return !this.hasWriteStage;
|
||||
}
|
||||
|
||||
addToPipeline(stage: Document): void {
|
||||
this.pipeline.push(stage);
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<T>
|
||||
): void {
|
||||
const options: AggregateOptions = this.options;
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
const command: Document = { aggregate: this.target, pipeline: this.pipeline };
|
||||
|
||||
if (this.hasWriteStage && serverWireVersion < MIN_WIRE_VERSION_$OUT_READ_CONCERN_SUPPORT) {
|
||||
this.readConcern = undefined;
|
||||
}
|
||||
|
||||
if (this.hasWriteStage && this.writeConcern) {
|
||||
Object.assign(command, { writeConcern: this.writeConcern });
|
||||
}
|
||||
|
||||
if (options.bypassDocumentValidation === true) {
|
||||
command.bypassDocumentValidation = options.bypassDocumentValidation;
|
||||
}
|
||||
|
||||
if (typeof options.allowDiskUse === 'boolean') {
|
||||
command.allowDiskUse = options.allowDiskUse;
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
command.hint = options.hint;
|
||||
}
|
||||
|
||||
if (options.let) {
|
||||
command.let = options.let;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (options.comment !== undefined) {
|
||||
command.comment = options.comment;
|
||||
}
|
||||
|
||||
command.cursor = options.cursor || {};
|
||||
if (options.batchSize && !this.hasWriteStage) {
|
||||
command.cursor.batchSize = options.batchSize;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(AggregateOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXPLAINABLE,
|
||||
Aspect.CURSOR_CREATING
|
||||
]);
|
62
node_modules/mongodb/src/operations/bulk_write.ts
generated
vendored
Normal file
62
node_modules/mongodb/src/operations/bulk_write.ts
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
import type {
|
||||
AnyBulkWriteOperation,
|
||||
BulkOperationBase,
|
||||
BulkWriteOptions,
|
||||
BulkWriteResult
|
||||
} from '../bulk/common';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { AbstractOperation, Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @internal */
|
||||
export class BulkWriteOperation extends AbstractOperation<BulkWriteResult> {
|
||||
override options: BulkWriteOptions;
|
||||
collection: Collection;
|
||||
operations: AnyBulkWriteOperation[];
|
||||
|
||||
constructor(
|
||||
collection: Collection,
|
||||
operations: AnyBulkWriteOperation[],
|
||||
options: BulkWriteOptions
|
||||
) {
|
||||
super(options);
|
||||
this.options = options;
|
||||
this.collection = collection;
|
||||
this.operations = operations;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<BulkWriteResult>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
const operations = this.operations;
|
||||
const options = { ...this.options, ...this.bsonOptions, readPreference: this.readPreference };
|
||||
|
||||
// Create the bulk operation
|
||||
const bulk: BulkOperationBase =
|
||||
options.ordered === false
|
||||
? coll.initializeUnorderedBulkOp(options)
|
||||
: coll.initializeOrderedBulkOp(options);
|
||||
|
||||
// for each op go through and add to the bulk
|
||||
try {
|
||||
for (let i = 0; i < operations.length; i++) {
|
||||
bulk.raw(operations[i]);
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Execute the bulk
|
||||
bulk.execute({ ...options, session }).then(
|
||||
result => callback(undefined, result),
|
||||
error => callback(error)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(BulkWriteOperation, [Aspect.WRITE_OPERATION]);
|
50
node_modules/mongodb/src/operations/collections.ts
generated
vendored
Normal file
50
node_modules/mongodb/src/operations/collections.ts
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
import { Collection } from '../collection';
|
||||
import type { Db } from '../db';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { AbstractOperation, OperationOptions } from './operation';
|
||||
|
||||
export interface CollectionsOptions extends OperationOptions {
|
||||
nameOnly?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class CollectionsOperation extends AbstractOperation<Collection[]> {
|
||||
override options: CollectionsOptions;
|
||||
db: Db;
|
||||
|
||||
constructor(db: Db, options: CollectionsOptions) {
|
||||
super(options);
|
||||
this.options = options;
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Collection[]>
|
||||
): void {
|
||||
// Let's get the collection names
|
||||
this.db
|
||||
.listCollections(
|
||||
{},
|
||||
{ ...this.options, nameOnly: true, readPreference: this.readPreference, session }
|
||||
)
|
||||
.toArray()
|
||||
.then(
|
||||
documents => {
|
||||
const collections = [];
|
||||
for (const { name } of documents) {
|
||||
if (!name.includes('$')) {
|
||||
// Filter collections removing any illegal ones
|
||||
collections.push(new Collection(this.db, name, this.db.s.options));
|
||||
}
|
||||
}
|
||||
// Return the collection objects
|
||||
callback(undefined, collections);
|
||||
},
|
||||
error => callback(error)
|
||||
);
|
||||
}
|
||||
}
|
159
node_modules/mongodb/src/operations/command.ts
generated
vendored
Normal file
159
node_modules/mongodb/src/operations/command.ts
generated
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
import type { BSONSerializeOptions, Document } from '../bson';
|
||||
import { MongoInvalidArgumentError } from '../error';
|
||||
import { Explain, ExplainOptions } from '../explain';
|
||||
import { ReadConcern } from '../read_concern';
|
||||
import type { ReadPreference } from '../read_preference';
|
||||
import type { Server } from '../sdam/server';
|
||||
import { MIN_SECONDARY_WRITE_WIRE_VERSION } from '../sdam/server_selection';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import {
|
||||
Callback,
|
||||
commandSupportsReadConcern,
|
||||
decorateWithExplain,
|
||||
maxWireVersion,
|
||||
MongoDBNamespace
|
||||
} from '../utils';
|
||||
import { WriteConcern, WriteConcernOptions } from '../write_concern';
|
||||
import type { ReadConcernLike } from './../read_concern';
|
||||
import { AbstractOperation, Aspect, OperationOptions } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface CollationOptions {
|
||||
locale: string;
|
||||
caseLevel?: boolean;
|
||||
caseFirst?: string;
|
||||
strength?: number;
|
||||
numericOrdering?: boolean;
|
||||
alternate?: string;
|
||||
maxVariable?: string;
|
||||
backwards?: boolean;
|
||||
normalization?: boolean;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface CommandOperationOptions
|
||||
extends OperationOptions,
|
||||
WriteConcernOptions,
|
||||
ExplainOptions {
|
||||
/** Specify a read concern and level for the collection. (only MongoDB 3.2 or higher supported) */
|
||||
readConcern?: ReadConcernLike;
|
||||
/** Collation */
|
||||
collation?: CollationOptions;
|
||||
maxTimeMS?: number;
|
||||
/**
|
||||
* Comment to apply to the operation.
|
||||
*
|
||||
* In server versions pre-4.4, 'comment' must be string. A server
|
||||
* error will be thrown if any other type is provided.
|
||||
*
|
||||
* In server versions 4.4 and above, 'comment' can be any valid BSON type.
|
||||
*/
|
||||
comment?: unknown;
|
||||
/** Should retry failed writes */
|
||||
retryWrites?: boolean;
|
||||
|
||||
// Admin command overrides.
|
||||
dbName?: string;
|
||||
authdb?: string;
|
||||
noResponse?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export interface OperationParent {
|
||||
s: { namespace: MongoDBNamespace };
|
||||
readConcern?: ReadConcern;
|
||||
writeConcern?: WriteConcern;
|
||||
readPreference?: ReadPreference;
|
||||
bsonOptions?: BSONSerializeOptions;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export abstract class CommandOperation<T> extends AbstractOperation<T> {
|
||||
override options: CommandOperationOptions;
|
||||
readConcern?: ReadConcern;
|
||||
writeConcern?: WriteConcern;
|
||||
explain?: Explain;
|
||||
|
||||
constructor(parent?: OperationParent, options?: CommandOperationOptions) {
|
||||
super(options);
|
||||
this.options = options ?? {};
|
||||
|
||||
// NOTE: this was explicitly added for the add/remove user operations, it's likely
|
||||
// something we'd want to reconsider. Perhaps those commands can use `Admin`
|
||||
// as a parent?
|
||||
const dbNameOverride = options?.dbName || options?.authdb;
|
||||
if (dbNameOverride) {
|
||||
this.ns = new MongoDBNamespace(dbNameOverride, '$cmd');
|
||||
} else {
|
||||
this.ns = parent
|
||||
? parent.s.namespace.withCollection('$cmd')
|
||||
: new MongoDBNamespace('admin', '$cmd');
|
||||
}
|
||||
|
||||
this.readConcern = ReadConcern.fromOptions(options);
|
||||
this.writeConcern = WriteConcern.fromOptions(options);
|
||||
|
||||
if (this.hasAspect(Aspect.EXPLAINABLE)) {
|
||||
this.explain = Explain.fromOptions(options);
|
||||
} else if (options?.explain != null) {
|
||||
throw new MongoInvalidArgumentError(`Option "explain" is not supported on this command`);
|
||||
}
|
||||
}
|
||||
|
||||
override get canRetryWrite(): boolean {
|
||||
if (this.hasAspect(Aspect.EXPLAINABLE)) {
|
||||
return this.explain == null;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
executeCommand(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
cmd: Document,
|
||||
callback: Callback
|
||||
): void {
|
||||
// TODO: consider making this a non-enumerable property
|
||||
this.server = server;
|
||||
|
||||
const options = {
|
||||
...this.options,
|
||||
...this.bsonOptions,
|
||||
readPreference: this.readPreference,
|
||||
session
|
||||
};
|
||||
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
const inTransaction = this.session && this.session.inTransaction();
|
||||
|
||||
if (this.readConcern && commandSupportsReadConcern(cmd) && !inTransaction) {
|
||||
Object.assign(cmd, { readConcern: this.readConcern });
|
||||
}
|
||||
|
||||
if (this.trySecondaryWrite && serverWireVersion < MIN_SECONDARY_WRITE_WIRE_VERSION) {
|
||||
options.omitReadPreference = true;
|
||||
}
|
||||
|
||||
if (this.writeConcern && this.hasAspect(Aspect.WRITE_OPERATION) && !inTransaction) {
|
||||
Object.assign(cmd, { writeConcern: this.writeConcern });
|
||||
}
|
||||
|
||||
if (
|
||||
options.collation &&
|
||||
typeof options.collation === 'object' &&
|
||||
!this.hasAspect(Aspect.SKIP_COLLATION)
|
||||
) {
|
||||
Object.assign(cmd, { collation: options.collation });
|
||||
}
|
||||
|
||||
if (typeof options.maxTimeMS === 'number') {
|
||||
cmd.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
if (this.hasAspect(Aspect.EXPLAINABLE) && this.explain) {
|
||||
cmd = decorateWithExplain(cmd, this.explain);
|
||||
}
|
||||
|
||||
server.command(this.ns, cmd, options, callback);
|
||||
}
|
||||
}
|
105
node_modules/mongodb/src/operations/common_functions.ts
generated
vendored
Normal file
105
node_modules/mongodb/src/operations/common_functions.ts
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Db } from '../db';
|
||||
import { MongoTopologyClosedError } from '../error';
|
||||
import type { ReadPreference } from '../read_preference';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, getTopology } from '../utils';
|
||||
|
||||
/** @public */
|
||||
export interface IndexInformationOptions {
|
||||
full?: boolean;
|
||||
readPreference?: ReadPreference;
|
||||
session?: ClientSession;
|
||||
}
|
||||
/**
|
||||
* Retrieves this collections index info.
|
||||
*
|
||||
* @param db - The Db instance on which to retrieve the index info.
|
||||
* @param name - The name of the collection.
|
||||
*/
|
||||
export function indexInformation(db: Db, name: string, callback: Callback): void;
|
||||
export function indexInformation(
|
||||
db: Db,
|
||||
name: string,
|
||||
options: IndexInformationOptions,
|
||||
callback?: Callback
|
||||
): void;
|
||||
export function indexInformation(
|
||||
db: Db,
|
||||
name: string,
|
||||
_optionsOrCallback: IndexInformationOptions | Callback,
|
||||
_callback?: Callback
|
||||
): void {
|
||||
let options = _optionsOrCallback as IndexInformationOptions;
|
||||
let callback = _callback as Callback;
|
||||
if ('function' === typeof _optionsOrCallback) {
|
||||
callback = _optionsOrCallback;
|
||||
options = {};
|
||||
}
|
||||
// If we specified full information
|
||||
const full = options.full == null ? false : options.full;
|
||||
|
||||
let topology;
|
||||
try {
|
||||
topology = getTopology(db);
|
||||
} catch (error) {
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
// Did the user destroy the topology
|
||||
if (topology.isDestroyed()) return callback(new MongoTopologyClosedError());
|
||||
// Process all the results from the index command and collection
|
||||
function processResults(indexes: any) {
|
||||
// Contains all the information
|
||||
const info: any = {};
|
||||
// Process all the indexes
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
const index = indexes[i];
|
||||
// Let's unpack the object
|
||||
info[index.name] = [];
|
||||
for (const name in index.key) {
|
||||
info[index.name].push([name, index.key[name]]);
|
||||
}
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
// Get the list of indexes of the specified collection
|
||||
db.collection(name)
|
||||
.listIndexes(options)
|
||||
.toArray()
|
||||
.then(
|
||||
indexes => {
|
||||
if (!Array.isArray(indexes)) return callback(undefined, []);
|
||||
if (full) return callback(undefined, indexes);
|
||||
callback(undefined, processResults(indexes));
|
||||
},
|
||||
error => callback(error)
|
||||
);
|
||||
}
|
||||
|
||||
export function prepareDocs(
|
||||
coll: Collection,
|
||||
docs: Document[],
|
||||
options: { forceServerObjectId?: boolean }
|
||||
): Document[] {
|
||||
const forceServerObjectId =
|
||||
typeof options.forceServerObjectId === 'boolean'
|
||||
? options.forceServerObjectId
|
||||
: coll.s.db.options?.forceServerObjectId;
|
||||
|
||||
// no need to modify the docs if server sets the ObjectId
|
||||
if (forceServerObjectId === true) {
|
||||
return docs;
|
||||
}
|
||||
|
||||
return docs.map(doc => {
|
||||
if (doc._id == null) {
|
||||
doc._id = coll.s.pkFactory.createPk();
|
||||
}
|
||||
|
||||
return doc;
|
||||
});
|
||||
}
|
68
node_modules/mongodb/src/operations/count.ts
generated
vendored
Normal file
68
node_modules/mongodb/src/operations/count.ts
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback, MongoDBNamespace } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface CountOptions extends CommandOperationOptions {
|
||||
/** The number of documents to skip. */
|
||||
skip?: number;
|
||||
/** The maximum amounts to count before aborting. */
|
||||
limit?: number;
|
||||
/** Number of milliseconds to wait before aborting the query. */
|
||||
maxTimeMS?: number;
|
||||
/** An index name hint for the query. */
|
||||
hint?: string | Document;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class CountOperation extends CommandOperation<number> {
|
||||
override options: CountOptions;
|
||||
collectionName?: string;
|
||||
query: Document;
|
||||
|
||||
constructor(namespace: MongoDBNamespace, filter: Document, options: CountOptions) {
|
||||
super({ s: { namespace: namespace } } as unknown as Collection, options);
|
||||
|
||||
this.options = options;
|
||||
this.collectionName = namespace.collection;
|
||||
this.query = filter;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<number>
|
||||
): void {
|
||||
const options = this.options;
|
||||
const cmd: Document = {
|
||||
count: this.collectionName,
|
||||
query: this.query
|
||||
};
|
||||
|
||||
if (typeof options.limit === 'number') {
|
||||
cmd.limit = options.limit;
|
||||
}
|
||||
|
||||
if (typeof options.skip === 'number') {
|
||||
cmd.skip = options.skip;
|
||||
}
|
||||
|
||||
if (options.hint != null) {
|
||||
cmd.hint = options.hint;
|
||||
}
|
||||
|
||||
if (typeof options.maxTimeMS === 'number') {
|
||||
cmd.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, cmd, (err, result) => {
|
||||
callback(err, result ? result.n : 0);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(CountOperation, [Aspect.READ_OPERATION, Aspect.RETRYABLE]);
|
57
node_modules/mongodb/src/operations/count_documents.ts
generated
vendored
Normal file
57
node_modules/mongodb/src/operations/count_documents.ts
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { AggregateOperation, AggregateOptions } from './aggregate';
|
||||
|
||||
/** @public */
|
||||
export interface CountDocumentsOptions extends AggregateOptions {
|
||||
/** The number of documents to skip. */
|
||||
skip?: number;
|
||||
/** The maximum amounts to count before aborting. */
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class CountDocumentsOperation extends AggregateOperation<number> {
|
||||
constructor(collection: Collection, query: Document, options: CountDocumentsOptions) {
|
||||
const pipeline = [];
|
||||
pipeline.push({ $match: query });
|
||||
|
||||
if (typeof options.skip === 'number') {
|
||||
pipeline.push({ $skip: options.skip });
|
||||
}
|
||||
|
||||
if (typeof options.limit === 'number') {
|
||||
pipeline.push({ $limit: options.limit });
|
||||
}
|
||||
|
||||
pipeline.push({ $group: { _id: 1, n: { $sum: 1 } } });
|
||||
|
||||
super(collection.s.namespace, pipeline, options);
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<number>
|
||||
): void {
|
||||
super.execute(server, session, (err, result) => {
|
||||
if (err || !result) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// NOTE: We're avoiding creating a cursor here to reduce the callstack.
|
||||
const response = result as unknown as Document;
|
||||
if (response.cursor == null || response.cursor.firstBatch == null) {
|
||||
callback(undefined, 0);
|
||||
return;
|
||||
}
|
||||
|
||||
const docs = response.cursor.firstBatch;
|
||||
callback(undefined, docs.length ? docs[0].n : 0);
|
||||
});
|
||||
}
|
||||
}
|
215
node_modules/mongodb/src/operations/create_collection.ts
generated
vendored
Normal file
215
node_modules/mongodb/src/operations/create_collection.ts
generated
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
import type { Document } from '../bson';
|
||||
import {
|
||||
MIN_SUPPORTED_QE_SERVER_VERSION,
|
||||
MIN_SUPPORTED_QE_WIRE_VERSION
|
||||
} from '../cmap/wire_protocol/constants';
|
||||
import { Collection } from '../collection';
|
||||
import type { Db } from '../db';
|
||||
import { MongoCompatibilityError } from '../error';
|
||||
import type { PkFactory } from '../mongo_client';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { CreateIndexOperation } from './indexes';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
const ILLEGAL_COMMAND_FIELDS = new Set([
|
||||
'w',
|
||||
'wtimeout',
|
||||
'j',
|
||||
'fsync',
|
||||
'autoIndexId',
|
||||
'pkFactory',
|
||||
'raw',
|
||||
'readPreference',
|
||||
'session',
|
||||
'readConcern',
|
||||
'writeConcern',
|
||||
'raw',
|
||||
'fieldsAsRaw',
|
||||
'useBigInt64',
|
||||
'promoteLongs',
|
||||
'promoteValues',
|
||||
'promoteBuffers',
|
||||
'bsonRegExp',
|
||||
'serializeFunctions',
|
||||
'ignoreUndefined',
|
||||
'enableUtf8Validation'
|
||||
]);
|
||||
|
||||
/** @public
|
||||
* Configuration options for timeseries collections
|
||||
* @see https://www.mongodb.com/docs/manual/core/timeseries-collections/
|
||||
*/
|
||||
export interface TimeSeriesCollectionOptions extends Document {
|
||||
timeField: string;
|
||||
metaField?: string;
|
||||
granularity?: 'seconds' | 'minutes' | 'hours' | string;
|
||||
bucketMaxSpanSeconds?: number;
|
||||
bucketRoundingSeconds?: number;
|
||||
}
|
||||
|
||||
/** @public
|
||||
* Configuration options for clustered collections
|
||||
* @see https://www.mongodb.com/docs/manual/core/clustered-collections/
|
||||
*/
|
||||
export interface ClusteredCollectionOptions extends Document {
|
||||
name?: string;
|
||||
key: Document;
|
||||
unique: boolean;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface CreateCollectionOptions extends CommandOperationOptions {
|
||||
/** Create a capped collection */
|
||||
capped?: boolean;
|
||||
/** @deprecated Create an index on the _id field of the document. This option is deprecated in MongoDB 3.2+ and will be removed once no longer supported by the server. */
|
||||
autoIndexId?: boolean;
|
||||
/** The size of the capped collection in bytes */
|
||||
size?: number;
|
||||
/** The maximum number of documents in the capped collection */
|
||||
max?: number;
|
||||
/** Available for the MMAPv1 storage engine only to set the usePowerOf2Sizes and the noPadding flag */
|
||||
flags?: number;
|
||||
/** Allows users to specify configuration to the storage engine on a per-collection basis when creating a collection */
|
||||
storageEngine?: Document;
|
||||
/** Allows users to specify validation rules or expressions for the collection. For more information, see Document Validation */
|
||||
validator?: Document;
|
||||
/** Determines how strictly MongoDB applies the validation rules to existing documents during an update */
|
||||
validationLevel?: string;
|
||||
/** Determines whether to error on invalid documents or just warn about the violations but allow invalid documents to be inserted */
|
||||
validationAction?: string;
|
||||
/** Allows users to specify a default configuration for indexes when creating a collection */
|
||||
indexOptionDefaults?: Document;
|
||||
/** The name of the source collection or view from which to create the view. The name is not the full namespace of the collection or view (i.e., does not include the database name and implies the same database as the view to create) */
|
||||
viewOn?: string;
|
||||
/** An array that consists of the aggregation pipeline stage. Creates the view by applying the specified pipeline to the viewOn collection or view */
|
||||
pipeline?: Document[];
|
||||
/** A primary key factory function for generation of custom _id keys. */
|
||||
pkFactory?: PkFactory;
|
||||
/** A document specifying configuration options for timeseries collections. */
|
||||
timeseries?: TimeSeriesCollectionOptions;
|
||||
/** A document specifying configuration options for clustered collections. For MongoDB 5.3 and above. */
|
||||
clusteredIndex?: ClusteredCollectionOptions;
|
||||
/** The number of seconds after which a document in a timeseries or clustered collection expires. */
|
||||
expireAfterSeconds?: number;
|
||||
/** @experimental */
|
||||
encryptedFields?: Document;
|
||||
/**
|
||||
* If set, enables pre-update and post-update document events to be included for any
|
||||
* change streams that listen on this collection.
|
||||
*/
|
||||
changeStreamPreAndPostImages?: { enabled: boolean };
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
const INVALID_QE_VERSION =
|
||||
'Driver support of Queryable Encryption is incompatible with server. Upgrade server to use Queryable Encryption.';
|
||||
|
||||
/** @internal */
|
||||
export class CreateCollectionOperation extends CommandOperation<Collection> {
|
||||
override options: CreateCollectionOptions;
|
||||
db: Db;
|
||||
name: string;
|
||||
|
||||
constructor(db: Db, name: string, options: CreateCollectionOptions = {}) {
|
||||
super(db, options);
|
||||
|
||||
this.options = options;
|
||||
this.db = db;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Collection>
|
||||
): void {
|
||||
(async () => {
|
||||
const db = this.db;
|
||||
const name = this.name;
|
||||
const options = this.options;
|
||||
|
||||
const encryptedFields: Document | undefined =
|
||||
options.encryptedFields ??
|
||||
db.client.options.autoEncryption?.encryptedFieldsMap?.[`${db.databaseName}.${name}`];
|
||||
|
||||
if (encryptedFields) {
|
||||
// Creating a QE collection required min server of 7.0.0
|
||||
if (server.description.maxWireVersion < MIN_SUPPORTED_QE_WIRE_VERSION) {
|
||||
throw new MongoCompatibilityError(
|
||||
`${INVALID_QE_VERSION} The minimum server version required is ${MIN_SUPPORTED_QE_SERVER_VERSION}`
|
||||
);
|
||||
}
|
||||
// Create auxilliary collections for queryable encryption support.
|
||||
const escCollection = encryptedFields.escCollection ?? `enxcol_.${name}.esc`;
|
||||
const ecocCollection = encryptedFields.ecocCollection ?? `enxcol_.${name}.ecoc`;
|
||||
|
||||
for (const collectionName of [escCollection, ecocCollection]) {
|
||||
const createOp = new CreateCollectionOperation(db, collectionName, {
|
||||
clusteredIndex: {
|
||||
key: { _id: 1 },
|
||||
unique: true
|
||||
}
|
||||
});
|
||||
await createOp.executeWithoutEncryptedFieldsCheck(server, session);
|
||||
}
|
||||
|
||||
if (!options.encryptedFields) {
|
||||
this.options = { ...this.options, encryptedFields };
|
||||
}
|
||||
}
|
||||
|
||||
const coll = await this.executeWithoutEncryptedFieldsCheck(server, session);
|
||||
|
||||
if (encryptedFields) {
|
||||
// Create the required index for queryable encryption support.
|
||||
const createIndexOp = new CreateIndexOperation(db, name, { __safeContent__: 1 }, {});
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
createIndexOp.execute(server, session, err => (err ? reject(err) : resolve()));
|
||||
});
|
||||
}
|
||||
|
||||
return coll;
|
||||
})().then(
|
||||
coll => callback(undefined, coll),
|
||||
err => callback(err)
|
||||
);
|
||||
}
|
||||
|
||||
private executeWithoutEncryptedFieldsCheck(
|
||||
server: Server,
|
||||
session: ClientSession | undefined
|
||||
): Promise<Collection> {
|
||||
return new Promise<Collection>((resolve, reject) => {
|
||||
const db = this.db;
|
||||
const name = this.name;
|
||||
const options = this.options;
|
||||
|
||||
const done: Callback = err => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(new Collection(db, name, options));
|
||||
};
|
||||
|
||||
const cmd: Document = { create: name };
|
||||
for (const n in options) {
|
||||
if (
|
||||
(options as any)[n] != null &&
|
||||
typeof (options as any)[n] !== 'function' &&
|
||||
!ILLEGAL_COMMAND_FIELDS.has(n)
|
||||
) {
|
||||
cmd[n] = (options as any)[n];
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise just execute the command
|
||||
super.executeCommand(server, session, cmd, done);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(CreateCollectionOperation, [Aspect.WRITE_OPERATION]);
|
174
node_modules/mongodb/src/operations/delete.ts
generated
vendored
Normal file
174
node_modules/mongodb/src/operations/delete.ts
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import { MongoCompatibilityError, MongoServerError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback, MongoDBNamespace } from '../utils';
|
||||
import type { WriteConcernOptions } from '../write_concern';
|
||||
import { CollationOptions, CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects, Hint } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface DeleteOptions extends CommandOperationOptions, WriteConcernOptions {
|
||||
/** If true, when an insert fails, don't execute the remaining writes. If false, continue with remaining inserts when one fails. */
|
||||
ordered?: boolean;
|
||||
/** Specifies the collation to use for the operation */
|
||||
collation?: CollationOptions;
|
||||
/** Specify that the update query should only consider plans using the hinted index */
|
||||
hint?: string | Document;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface DeleteResult {
|
||||
/** Indicates whether this write result was acknowledged. If not, then all other members of this result will be undefined. */
|
||||
acknowledged: boolean;
|
||||
/** The number of documents that were deleted */
|
||||
deletedCount: number;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface DeleteStatement {
|
||||
/** The query that matches documents to delete. */
|
||||
q: Document;
|
||||
/** The number of matching documents to delete. */
|
||||
limit: number;
|
||||
/** Specifies the collation to use for the operation. */
|
||||
collation?: CollationOptions;
|
||||
/** A document or string that specifies the index to use to support the query predicate. */
|
||||
hint?: Hint;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class DeleteOperation extends CommandOperation<DeleteResult> {
|
||||
override options: DeleteOptions;
|
||||
statements: DeleteStatement[];
|
||||
|
||||
constructor(ns: MongoDBNamespace, statements: DeleteStatement[], options: DeleteOptions) {
|
||||
super(undefined, options);
|
||||
this.options = options;
|
||||
this.ns = ns;
|
||||
this.statements = statements;
|
||||
}
|
||||
|
||||
override get canRetryWrite(): boolean {
|
||||
if (super.canRetryWrite === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.statements.every(op => (op.limit != null ? op.limit > 0 : true));
|
||||
}
|
||||
|
||||
override execute(server: Server, session: ClientSession | undefined, callback: Callback): void {
|
||||
const options = this.options ?? {};
|
||||
const ordered = typeof options.ordered === 'boolean' ? options.ordered : true;
|
||||
const command: Document = {
|
||||
delete: this.ns.collection,
|
||||
deletes: this.statements,
|
||||
ordered
|
||||
};
|
||||
|
||||
if (options.let) {
|
||||
command.let = options.let;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (options.comment !== undefined) {
|
||||
command.comment = options.comment;
|
||||
}
|
||||
|
||||
const unacknowledgedWrite = this.writeConcern && this.writeConcern.w === 0;
|
||||
if (unacknowledgedWrite) {
|
||||
if (this.statements.find((o: Document) => o.hint)) {
|
||||
// TODO(NODE-3541): fix error for hint with unacknowledged writes
|
||||
callback(new MongoCompatibilityError(`hint is not supported with unacknowledged writes`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
export class DeleteOneOperation extends DeleteOperation {
|
||||
constructor(collection: Collection, filter: Document, options: DeleteOptions) {
|
||||
super(collection.s.namespace, [makeDeleteStatement(filter, { ...options, limit: 1 })], options);
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<DeleteResult>
|
||||
): void {
|
||||
super.execute(server, session, (err, res) => {
|
||||
if (err || res == null) return callback(err);
|
||||
if (res.code) return callback(new MongoServerError(res));
|
||||
if (res.writeErrors) return callback(new MongoServerError(res.writeErrors[0]));
|
||||
if (this.explain) return callback(undefined, res);
|
||||
|
||||
callback(undefined, {
|
||||
acknowledged: this.writeConcern?.w !== 0 ?? true,
|
||||
deletedCount: res.n
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class DeleteManyOperation extends DeleteOperation {
|
||||
constructor(collection: Collection, filter: Document, options: DeleteOptions) {
|
||||
super(collection.s.namespace, [makeDeleteStatement(filter, options)], options);
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<DeleteResult>
|
||||
): void {
|
||||
super.execute(server, session, (err, res) => {
|
||||
if (err || res == null) return callback(err);
|
||||
if (res.code) return callback(new MongoServerError(res));
|
||||
if (res.writeErrors) return callback(new MongoServerError(res.writeErrors[0]));
|
||||
if (this.explain) return callback(undefined, res);
|
||||
|
||||
callback(undefined, {
|
||||
acknowledged: this.writeConcern?.w !== 0 ?? true,
|
||||
deletedCount: res.n
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function makeDeleteStatement(
|
||||
filter: Document,
|
||||
options: DeleteOptions & { limit?: number }
|
||||
): DeleteStatement {
|
||||
const op: DeleteStatement = {
|
||||
q: filter,
|
||||
limit: typeof options.limit === 'number' ? options.limit : 0
|
||||
};
|
||||
|
||||
if (options.collation) {
|
||||
op.collation = options.collation;
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
op.hint = options.hint;
|
||||
}
|
||||
|
||||
return op;
|
||||
}
|
||||
|
||||
defineAspects(DeleteOperation, [Aspect.RETRYABLE, Aspect.WRITE_OPERATION]);
|
||||
defineAspects(DeleteOneOperation, [
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.WRITE_OPERATION,
|
||||
Aspect.EXPLAINABLE,
|
||||
Aspect.SKIP_COLLATION
|
||||
]);
|
||||
defineAspects(DeleteManyOperation, [
|
||||
Aspect.WRITE_OPERATION,
|
||||
Aspect.EXPLAINABLE,
|
||||
Aspect.SKIP_COLLATION
|
||||
]);
|
90
node_modules/mongodb/src/operations/distinct.ts
generated
vendored
Normal file
90
node_modules/mongodb/src/operations/distinct.ts
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, decorateWithCollation, decorateWithReadConcern } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export type DistinctOptions = CommandOperationOptions;
|
||||
|
||||
/**
|
||||
* Return a list of distinct values for the given key across a collection.
|
||||
* @internal
|
||||
*/
|
||||
export class DistinctOperation extends CommandOperation<any[]> {
|
||||
override options: DistinctOptions;
|
||||
collection: Collection;
|
||||
/** Field of the document to find distinct values for. */
|
||||
key: string;
|
||||
/** The query for filtering the set of documents to which we apply the distinct filter. */
|
||||
query: Document;
|
||||
|
||||
/**
|
||||
* Construct a Distinct operation.
|
||||
*
|
||||
* @param collection - Collection instance.
|
||||
* @param key - Field of the document to find distinct values for.
|
||||
* @param query - The query for filtering the set of documents to which we apply the distinct filter.
|
||||
* @param options - Optional settings. See Collection.prototype.distinct for a list of options.
|
||||
*/
|
||||
constructor(collection: Collection, key: string, query: Document, options?: DistinctOptions) {
|
||||
super(collection, options);
|
||||
|
||||
this.options = options ?? {};
|
||||
this.collection = collection;
|
||||
this.key = key;
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<any[]>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
const key = this.key;
|
||||
const query = this.query;
|
||||
const options = this.options;
|
||||
|
||||
// Distinct command
|
||||
const cmd: Document = {
|
||||
distinct: coll.collectionName,
|
||||
key: key,
|
||||
query: query
|
||||
};
|
||||
|
||||
// Add maxTimeMS if defined
|
||||
if (typeof options.maxTimeMS === 'number') {
|
||||
cmd.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (typeof options.comment !== 'undefined') {
|
||||
cmd.comment = options.comment;
|
||||
}
|
||||
|
||||
// Do we have a readConcern specified
|
||||
decorateWithReadConcern(cmd, coll, options);
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(cmd, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, cmd, (err, result) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(undefined, this.explain ? result : result.values);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(DistinctOperation, [Aspect.READ_OPERATION, Aspect.RETRYABLE, Aspect.EXPLAINABLE]);
|
119
node_modules/mongodb/src/operations/drop.ts
generated
vendored
Normal file
119
node_modules/mongodb/src/operations/drop.ts
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Db } from '../db';
|
||||
import { MONGODB_ERROR_CODES, MongoServerError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface DropCollectionOptions extends CommandOperationOptions {
|
||||
/** @experimental */
|
||||
encryptedFields?: Document;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class DropCollectionOperation extends CommandOperation<boolean> {
|
||||
override options: DropCollectionOptions;
|
||||
db: Db;
|
||||
name: string;
|
||||
|
||||
constructor(db: Db, name: string, options: DropCollectionOptions = {}) {
|
||||
super(db, options);
|
||||
this.db = db;
|
||||
this.options = options;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<boolean>
|
||||
): void {
|
||||
(async () => {
|
||||
const db = this.db;
|
||||
const options = this.options;
|
||||
const name = this.name;
|
||||
|
||||
const encryptedFieldsMap = db.client.options.autoEncryption?.encryptedFieldsMap;
|
||||
let encryptedFields: Document | undefined =
|
||||
options.encryptedFields ?? encryptedFieldsMap?.[`${db.databaseName}.${name}`];
|
||||
|
||||
if (!encryptedFields && encryptedFieldsMap) {
|
||||
// If the MongoClient was configured with an encryptedFieldsMap,
|
||||
// and no encryptedFields config was available in it or explicitly
|
||||
// passed as an argument, the spec tells us to look one up using
|
||||
// listCollections().
|
||||
const listCollectionsResult = await db
|
||||
.listCollections({ name }, { nameOnly: false })
|
||||
.toArray();
|
||||
encryptedFields = listCollectionsResult?.[0]?.options?.encryptedFields;
|
||||
}
|
||||
|
||||
if (encryptedFields) {
|
||||
const escCollection = encryptedFields.escCollection || `enxcol_.${name}.esc`;
|
||||
const ecocCollection = encryptedFields.ecocCollection || `enxcol_.${name}.ecoc`;
|
||||
|
||||
for (const collectionName of [escCollection, ecocCollection]) {
|
||||
// Drop auxilliary collections, ignoring potential NamespaceNotFound errors.
|
||||
const dropOp = new DropCollectionOperation(db, collectionName);
|
||||
try {
|
||||
await dropOp.executeWithoutEncryptedFieldsCheck(server, session);
|
||||
} catch (err) {
|
||||
if (
|
||||
!(err instanceof MongoServerError) ||
|
||||
err.code !== MONGODB_ERROR_CODES.NamespaceNotFound
|
||||
) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.executeWithoutEncryptedFieldsCheck(server, session);
|
||||
})().then(
|
||||
result => callback(undefined, result),
|
||||
err => callback(err)
|
||||
);
|
||||
}
|
||||
|
||||
private executeWithoutEncryptedFieldsCheck(
|
||||
server: Server,
|
||||
session: ClientSession | undefined
|
||||
): Promise<boolean> {
|
||||
return new Promise<boolean>((resolve, reject) => {
|
||||
super.executeCommand(server, session, { drop: this.name }, (err, result) => {
|
||||
if (err) return reject(err);
|
||||
resolve(!!result.ok);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export type DropDatabaseOptions = CommandOperationOptions;
|
||||
|
||||
/** @internal */
|
||||
export class DropDatabaseOperation extends CommandOperation<boolean> {
|
||||
override options: DropDatabaseOptions;
|
||||
|
||||
constructor(db: Db, options: DropDatabaseOptions) {
|
||||
super(db, options);
|
||||
this.options = options;
|
||||
}
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<boolean>
|
||||
): void {
|
||||
super.executeCommand(server, session, { dropDatabase: 1 }, (err, result) => {
|
||||
if (err) return callback(err);
|
||||
if (result.ok) return callback(undefined, true);
|
||||
callback(undefined, false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(DropCollectionOperation, [Aspect.WRITE_OPERATION]);
|
||||
defineAspects(DropDatabaseOperation, [Aspect.WRITE_OPERATION]);
|
62
node_modules/mongodb/src/operations/estimated_document_count.ts
generated
vendored
Normal file
62
node_modules/mongodb/src/operations/estimated_document_count.ts
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface EstimatedDocumentCountOptions extends CommandOperationOptions {
|
||||
/**
|
||||
* The maximum amount of time to allow the operation to run.
|
||||
*
|
||||
* This option is sent only if the caller explicitly provides a value. The default is to not send a value.
|
||||
*/
|
||||
maxTimeMS?: number;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class EstimatedDocumentCountOperation extends CommandOperation<number> {
|
||||
override options: EstimatedDocumentCountOptions;
|
||||
collectionName: string;
|
||||
|
||||
constructor(collection: Collection, options: EstimatedDocumentCountOptions = {}) {
|
||||
super(collection, options);
|
||||
this.options = options;
|
||||
this.collectionName = collection.collectionName;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<number>
|
||||
): void {
|
||||
const cmd: Document = { count: this.collectionName };
|
||||
|
||||
if (typeof this.options.maxTimeMS === 'number') {
|
||||
cmd.maxTimeMS = this.options.maxTimeMS;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (this.options.comment !== undefined) {
|
||||
cmd.comment = this.options.comment;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, cmd, (err, response) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(undefined, response?.n || 0);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(EstimatedDocumentCountOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.CURSOR_CREATING
|
||||
]);
|
82
node_modules/mongodb/src/operations/eval.ts
generated
vendored
Normal file
82
node_modules/mongodb/src/operations/eval.ts
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { Code, Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Db } from '../db';
|
||||
import { MongoServerError } from '../error';
|
||||
import { ReadPreference } from '../read_preference';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
|
||||
/** @public */
|
||||
export interface EvalOptions extends CommandOperationOptions {
|
||||
nolock?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class EvalOperation extends CommandOperation<Document> {
|
||||
override options: EvalOptions;
|
||||
code: Code;
|
||||
parameters?: Document | Document[];
|
||||
|
||||
constructor(
|
||||
db: Db | Collection,
|
||||
code: Code,
|
||||
parameters?: Document | Document[],
|
||||
options?: EvalOptions
|
||||
) {
|
||||
super(db, options);
|
||||
|
||||
this.options = options ?? {};
|
||||
this.code = code;
|
||||
this.parameters = parameters;
|
||||
// force primary read preference
|
||||
Object.defineProperty(this, 'readPreference', {
|
||||
value: ReadPreference.primary,
|
||||
configurable: false,
|
||||
writable: false
|
||||
});
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
let finalCode = this.code;
|
||||
let finalParameters: Document[] = [];
|
||||
|
||||
// If not a code object translate to one
|
||||
if (!(finalCode && (finalCode as unknown as { _bsontype: string })._bsontype === 'Code')) {
|
||||
finalCode = new Code(finalCode as never);
|
||||
}
|
||||
|
||||
// Ensure the parameters are correct
|
||||
if (this.parameters != null && typeof this.parameters !== 'function') {
|
||||
finalParameters = Array.isArray(this.parameters) ? this.parameters : [this.parameters];
|
||||
}
|
||||
|
||||
// Create execution selector
|
||||
const cmd: Document = { $eval: finalCode, args: finalParameters };
|
||||
|
||||
// Check if the nolock parameter is passed in
|
||||
if (this.options.nolock) {
|
||||
cmd.nolock = this.options.nolock;
|
||||
}
|
||||
|
||||
// Execute the command
|
||||
super.executeCommand(server, session, cmd, (err, result) => {
|
||||
if (err) return callback(err);
|
||||
if (result && result.ok === 1) {
|
||||
return callback(undefined, result.retval);
|
||||
}
|
||||
|
||||
if (result) {
|
||||
callback(new MongoServerError({ message: `eval failed: ${result.errmsg}` }));
|
||||
return;
|
||||
}
|
||||
|
||||
callback(err, result);
|
||||
});
|
||||
}
|
||||
}
|
270
node_modules/mongodb/src/operations/execute_operation.ts
generated
vendored
Normal file
270
node_modules/mongodb/src/operations/execute_operation.ts
generated
vendored
Normal file
@@ -0,0 +1,270 @@
|
||||
import type { Document } from '../bson';
|
||||
import {
|
||||
isRetryableReadError,
|
||||
isRetryableWriteError,
|
||||
MongoCompatibilityError,
|
||||
MONGODB_ERROR_CODES,
|
||||
MongoError,
|
||||
MongoErrorLabel,
|
||||
MongoExpiredSessionError,
|
||||
MongoNetworkError,
|
||||
MongoNotConnectedError,
|
||||
MongoRuntimeError,
|
||||
MongoServerError,
|
||||
MongoTransactionError,
|
||||
MongoUnexpectedServerResponseError
|
||||
} from '../error';
|
||||
import type { MongoClient } from '../mongo_client';
|
||||
import { ReadPreference } from '../read_preference';
|
||||
import type { Server } from '../sdam/server';
|
||||
import {
|
||||
sameServerSelector,
|
||||
secondaryWritableServerSelector,
|
||||
ServerSelector
|
||||
} from '../sdam/server_selection';
|
||||
import type { Topology } from '../sdam/topology';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, maybeCallback, supportsRetryableWrites } from '../utils';
|
||||
import { AbstractOperation, Aspect } from './operation';
|
||||
|
||||
const MMAPv1_RETRY_WRITES_ERROR_CODE = MONGODB_ERROR_CODES.IllegalOperation;
|
||||
const MMAPv1_RETRY_WRITES_ERROR_MESSAGE =
|
||||
'This MongoDB deployment does not support retryable writes. Please add retryWrites=false to your connection string.';
|
||||
|
||||
type ResultTypeFromOperation<TOperation> = TOperation extends AbstractOperation<infer K>
|
||||
? K
|
||||
: never;
|
||||
|
||||
/** @internal */
|
||||
export interface ExecutionResult {
|
||||
/** The server selected for the operation */
|
||||
server: Server;
|
||||
/** The session used for this operation, may be implicitly created */
|
||||
session?: ClientSession;
|
||||
/** The raw server response for the operation */
|
||||
response: Document;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given operation with provided arguments.
|
||||
* @internal
|
||||
*
|
||||
* @remarks
|
||||
* This method reduces large amounts of duplication in the entire codebase by providing
|
||||
* a single point for determining whether callbacks or promises should be used. Additionally
|
||||
* it allows for a single point of entry to provide features such as implicit sessions, which
|
||||
* are required by the Driver Sessions specification in the event that a ClientSession is
|
||||
* not provided
|
||||
*
|
||||
* @param topology - The topology to execute this operation on
|
||||
* @param operation - The operation to execute
|
||||
* @param callback - The command result callback
|
||||
*/
|
||||
export function executeOperation<
|
||||
T extends AbstractOperation<TResult>,
|
||||
TResult = ResultTypeFromOperation<T>
|
||||
>(client: MongoClient, operation: T): Promise<TResult>;
|
||||
export function executeOperation<
|
||||
T extends AbstractOperation<TResult>,
|
||||
TResult = ResultTypeFromOperation<T>
|
||||
>(client: MongoClient, operation: T, callback: Callback<TResult>): void;
|
||||
export function executeOperation<
|
||||
T extends AbstractOperation<TResult>,
|
||||
TResult = ResultTypeFromOperation<T>
|
||||
>(client: MongoClient, operation: T, callback?: Callback<TResult>): Promise<TResult> | void;
|
||||
export function executeOperation<
|
||||
T extends AbstractOperation<TResult>,
|
||||
TResult = ResultTypeFromOperation<T>
|
||||
>(client: MongoClient, operation: T, callback?: Callback<TResult>): Promise<TResult> | void {
|
||||
return maybeCallback(() => executeOperationAsync(client, operation), callback);
|
||||
}
|
||||
|
||||
async function executeOperationAsync<
|
||||
T extends AbstractOperation<TResult>,
|
||||
TResult = ResultTypeFromOperation<T>
|
||||
>(client: MongoClient, operation: T): Promise<TResult> {
|
||||
if (!(operation instanceof AbstractOperation)) {
|
||||
// TODO(NODE-3483): Extend MongoRuntimeError
|
||||
throw new MongoRuntimeError('This method requires a valid operation instance');
|
||||
}
|
||||
|
||||
if (client.topology == null) {
|
||||
// Auto connect on operation
|
||||
if (client.s.hasBeenClosed) {
|
||||
throw new MongoNotConnectedError('Client must be connected before running operations');
|
||||
}
|
||||
client.s.options[Symbol.for('@@mdb.skipPingOnConnect')] = true;
|
||||
try {
|
||||
await client.connect();
|
||||
} finally {
|
||||
delete client.s.options[Symbol.for('@@mdb.skipPingOnConnect')];
|
||||
}
|
||||
}
|
||||
|
||||
const { topology } = client;
|
||||
if (topology == null) {
|
||||
throw new MongoRuntimeError('client.connect did not create a topology but also did not throw');
|
||||
}
|
||||
|
||||
// The driver sessions spec mandates that we implicitly create sessions for operations
|
||||
// that are not explicitly provided with a session.
|
||||
let session = operation.session;
|
||||
let owner: symbol | undefined;
|
||||
|
||||
if (session == null) {
|
||||
owner = Symbol();
|
||||
session = client.startSession({ owner, explicit: false });
|
||||
} else if (session.hasEnded) {
|
||||
throw new MongoExpiredSessionError('Use of expired sessions is not permitted');
|
||||
} else if (session.snapshotEnabled && !topology.capabilities.supportsSnapshotReads) {
|
||||
throw new MongoCompatibilityError('Snapshot reads require MongoDB 5.0 or later');
|
||||
}
|
||||
|
||||
const readPreference = operation.readPreference ?? ReadPreference.primary;
|
||||
const inTransaction = !!session?.inTransaction();
|
||||
|
||||
if (inTransaction && !readPreference.equals(ReadPreference.primary)) {
|
||||
throw new MongoTransactionError(
|
||||
`Read preference in a transaction must be primary, not: ${readPreference.mode}`
|
||||
);
|
||||
}
|
||||
|
||||
if (session?.isPinned && session.transaction.isCommitted && !operation.bypassPinningCheck) {
|
||||
session.unpin();
|
||||
}
|
||||
|
||||
let selector: ReadPreference | ServerSelector;
|
||||
|
||||
if (operation.hasAspect(Aspect.MUST_SELECT_SAME_SERVER)) {
|
||||
// GetMore and KillCursor operations must always select the same server, but run through
|
||||
// server selection to potentially force monitor checks if the server is
|
||||
// in an unknown state.
|
||||
selector = sameServerSelector(operation.server?.description);
|
||||
} else if (operation.trySecondaryWrite) {
|
||||
// If operation should try to write to secondary use the custom server selector
|
||||
// otherwise provide the read preference.
|
||||
selector = secondaryWritableServerSelector(topology.commonWireVersion, readPreference);
|
||||
} else {
|
||||
selector = readPreference;
|
||||
}
|
||||
|
||||
const server = await topology.selectServerAsync(selector, { session });
|
||||
|
||||
if (session == null) {
|
||||
// No session also means it is not retryable, early exit
|
||||
return operation.executeAsync(server, undefined);
|
||||
}
|
||||
|
||||
if (!operation.hasAspect(Aspect.RETRYABLE)) {
|
||||
// non-retryable operation, early exit
|
||||
try {
|
||||
return await operation.executeAsync(server, session);
|
||||
} finally {
|
||||
if (session?.owner != null && session.owner === owner) {
|
||||
await session.endSession().catch(() => null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const willRetryRead = topology.s.options.retryReads && !inTransaction && operation.canRetryRead;
|
||||
|
||||
const willRetryWrite =
|
||||
topology.s.options.retryWrites &&
|
||||
!inTransaction &&
|
||||
supportsRetryableWrites(server) &&
|
||||
operation.canRetryWrite;
|
||||
|
||||
const hasReadAspect = operation.hasAspect(Aspect.READ_OPERATION);
|
||||
const hasWriteAspect = operation.hasAspect(Aspect.WRITE_OPERATION);
|
||||
const willRetry = (hasReadAspect && willRetryRead) || (hasWriteAspect && willRetryWrite);
|
||||
|
||||
if (hasWriteAspect && willRetryWrite) {
|
||||
operation.options.willRetryWrite = true;
|
||||
session.incrementTransactionNumber();
|
||||
}
|
||||
|
||||
try {
|
||||
return await operation.executeAsync(server, session);
|
||||
} catch (operationError) {
|
||||
if (willRetry && operationError instanceof MongoError) {
|
||||
return await retryOperation(operation, operationError, {
|
||||
session,
|
||||
topology,
|
||||
selector
|
||||
});
|
||||
}
|
||||
throw operationError;
|
||||
} finally {
|
||||
if (session?.owner != null && session.owner === owner) {
|
||||
await session.endSession().catch(() => null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
type RetryOptions = {
|
||||
session: ClientSession;
|
||||
topology: Topology;
|
||||
selector: ReadPreference | ServerSelector;
|
||||
};
|
||||
|
||||
async function retryOperation<
|
||||
T extends AbstractOperation<TResult>,
|
||||
TResult = ResultTypeFromOperation<T>
|
||||
>(
|
||||
operation: T,
|
||||
originalError: MongoError,
|
||||
{ session, topology, selector }: RetryOptions
|
||||
): Promise<TResult> {
|
||||
const isWriteOperation = operation.hasAspect(Aspect.WRITE_OPERATION);
|
||||
const isReadOperation = operation.hasAspect(Aspect.READ_OPERATION);
|
||||
|
||||
if (isWriteOperation && originalError.code === MMAPv1_RETRY_WRITES_ERROR_CODE) {
|
||||
throw new MongoServerError({
|
||||
message: MMAPv1_RETRY_WRITES_ERROR_MESSAGE,
|
||||
errmsg: MMAPv1_RETRY_WRITES_ERROR_MESSAGE,
|
||||
originalError
|
||||
});
|
||||
}
|
||||
|
||||
if (isWriteOperation && !isRetryableWriteError(originalError)) {
|
||||
throw originalError;
|
||||
}
|
||||
|
||||
if (isReadOperation && !isRetryableReadError(originalError)) {
|
||||
throw originalError;
|
||||
}
|
||||
|
||||
if (
|
||||
originalError instanceof MongoNetworkError &&
|
||||
session.isPinned &&
|
||||
!session.inTransaction() &&
|
||||
operation.hasAspect(Aspect.CURSOR_CREATING)
|
||||
) {
|
||||
// If we have a cursor and the initial command fails with a network error,
|
||||
// we can retry it on another connection. So we need to check it back in, clear the
|
||||
// pool for the service id, and retry again.
|
||||
session.unpin({ force: true, forceClear: true });
|
||||
}
|
||||
|
||||
// select a new server, and attempt to retry the operation
|
||||
const server = await topology.selectServerAsync(selector, { session });
|
||||
|
||||
if (isWriteOperation && !supportsRetryableWrites(server)) {
|
||||
throw new MongoUnexpectedServerResponseError(
|
||||
'Selected server does not support retryable writes'
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
return await operation.executeAsync(server, session);
|
||||
} catch (retryError) {
|
||||
if (
|
||||
retryError instanceof MongoError &&
|
||||
retryError.hasErrorLabel(MongoErrorLabel.NoWritesPerformed)
|
||||
) {
|
||||
throw originalError;
|
||||
}
|
||||
throw retryError;
|
||||
}
|
||||
}
|
262
node_modules/mongodb/src/operations/find.ts
generated
vendored
Normal file
262
node_modules/mongodb/src/operations/find.ts
generated
vendored
Normal file
@@ -0,0 +1,262 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import { MongoInvalidArgumentError } from '../error';
|
||||
import { ReadConcern } from '../read_concern';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { formatSort, Sort } from '../sort';
|
||||
import { Callback, decorateWithExplain, MongoDBNamespace, normalizeHintField } from '../utils';
|
||||
import { CollationOptions, CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects, Hint } from './operation';
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @typeParam TSchema - Unused schema definition, deprecated usage, only specify `FindOptions` with no generic
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
export interface FindOptions<TSchema extends Document = Document>
|
||||
extends Omit<CommandOperationOptions, 'writeConcern'> {
|
||||
/** Sets the limit of documents returned in the query. */
|
||||
limit?: number;
|
||||
/** Set to sort the documents coming back from the query. Array of indexes, `[['a', 1]]` etc. */
|
||||
sort?: Sort;
|
||||
/** The fields to return in the query. Object of fields to either include or exclude (one of, not both), `{'a':1, 'b': 1}` **or** `{'a': 0, 'b': 0}` */
|
||||
projection?: Document;
|
||||
/** Set to skip N documents ahead in your query (useful for pagination). */
|
||||
skip?: number;
|
||||
/** Tell the query to use specific indexes in the query. Object of indexes to use, `{'_id':1}` */
|
||||
hint?: Hint;
|
||||
/** Specify if the cursor can timeout. */
|
||||
timeout?: boolean;
|
||||
/** Specify if the cursor is tailable. */
|
||||
tailable?: boolean;
|
||||
/** Specify if the cursor is a tailable-await cursor. Requires `tailable` to be true */
|
||||
awaitData?: boolean;
|
||||
/** Set the batchSize for the getMoreCommand when iterating over the query results. */
|
||||
batchSize?: number;
|
||||
/** If true, returns only the index keys in the resulting documents. */
|
||||
returnKey?: boolean;
|
||||
/** The inclusive lower bound for a specific index */
|
||||
min?: Document;
|
||||
/** The exclusive upper bound for a specific index */
|
||||
max?: Document;
|
||||
/** Number of milliseconds to wait before aborting the query. */
|
||||
maxTimeMS?: number;
|
||||
/** The maximum amount of time for the server to wait on new documents to satisfy a tailable cursor query. Requires `tailable` and `awaitData` to be true */
|
||||
maxAwaitTimeMS?: number;
|
||||
/** The server normally times out idle cursors after an inactivity period (10 minutes) to prevent excess memory use. Set this option to prevent that. */
|
||||
noCursorTimeout?: boolean;
|
||||
/** Specify collation (MongoDB 3.4 or higher) settings for update operation (see 3.4 documentation for available fields). */
|
||||
collation?: CollationOptions;
|
||||
/** Allows disk use for blocking sort operations exceeding 100MB memory. (MongoDB 3.2 or higher) */
|
||||
allowDiskUse?: boolean;
|
||||
/** Determines whether to close the cursor after the first batch. Defaults to false. */
|
||||
singleBatch?: boolean;
|
||||
/** For queries against a sharded collection, allows the command (or subsequent getMore commands) to return partial results, rather than an error, if one or more queried shards are unavailable. */
|
||||
allowPartialResults?: boolean;
|
||||
/** Determines whether to return the record identifier for each document. If true, adds a field $recordId to the returned documents. */
|
||||
showRecordId?: boolean;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
/**
|
||||
* Option to enable an optimized code path for queries looking for a particular range of `ts` values in the oplog. Requires `tailable` to be true.
|
||||
* @deprecated Starting from MongoDB 4.4 this flag is not needed and will be ignored.
|
||||
*/
|
||||
oplogReplay?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class FindOperation extends CommandOperation<Document> {
|
||||
/**
|
||||
* @remarks WriteConcern can still be present on the options because
|
||||
* we inherit options from the client/db/collection. The
|
||||
* key must be present on the options in order to delete it.
|
||||
* This allows typescript to delete the key but will
|
||||
* not allow a writeConcern to be assigned as a property on options.
|
||||
*/
|
||||
override options: FindOptions & { writeConcern?: never };
|
||||
filter: Document;
|
||||
|
||||
constructor(
|
||||
collection: Collection | undefined,
|
||||
ns: MongoDBNamespace,
|
||||
filter: Document = {},
|
||||
options: FindOptions = {}
|
||||
) {
|
||||
super(collection, options);
|
||||
|
||||
this.options = { ...options };
|
||||
delete this.options.writeConcern;
|
||||
this.ns = ns;
|
||||
|
||||
if (typeof filter !== 'object' || Array.isArray(filter)) {
|
||||
throw new MongoInvalidArgumentError('Query filter must be a plain object or ObjectId');
|
||||
}
|
||||
|
||||
// special case passing in an ObjectId as a filter
|
||||
this.filter = filter != null && filter._bsontype === 'ObjectId' ? { _id: filter } : filter;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
this.server = server;
|
||||
|
||||
const options = this.options;
|
||||
|
||||
let findCommand = makeFindCommand(this.ns, this.filter, options);
|
||||
if (this.explain) {
|
||||
findCommand = decorateWithExplain(findCommand, this.explain);
|
||||
}
|
||||
|
||||
server.command(
|
||||
this.ns,
|
||||
findCommand,
|
||||
{
|
||||
...this.options,
|
||||
...this.bsonOptions,
|
||||
documentsReturnedIn: 'firstBatch',
|
||||
session
|
||||
},
|
||||
callback
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function makeFindCommand(ns: MongoDBNamespace, filter: Document, options: FindOptions): Document {
|
||||
const findCommand: Document = {
|
||||
find: ns.collection,
|
||||
filter
|
||||
};
|
||||
|
||||
if (options.sort) {
|
||||
findCommand.sort = formatSort(options.sort);
|
||||
}
|
||||
|
||||
if (options.projection) {
|
||||
let projection = options.projection;
|
||||
if (projection && Array.isArray(projection)) {
|
||||
projection = projection.length
|
||||
? projection.reduce((result, field) => {
|
||||
result[field] = 1;
|
||||
return result;
|
||||
}, {})
|
||||
: { _id: 1 };
|
||||
}
|
||||
|
||||
findCommand.projection = projection;
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
findCommand.hint = normalizeHintField(options.hint);
|
||||
}
|
||||
|
||||
if (typeof options.skip === 'number') {
|
||||
findCommand.skip = options.skip;
|
||||
}
|
||||
|
||||
if (typeof options.limit === 'number') {
|
||||
if (options.limit < 0) {
|
||||
findCommand.limit = -options.limit;
|
||||
findCommand.singleBatch = true;
|
||||
} else {
|
||||
findCommand.limit = options.limit;
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof options.batchSize === 'number') {
|
||||
if (options.batchSize < 0) {
|
||||
if (
|
||||
options.limit &&
|
||||
options.limit !== 0 &&
|
||||
Math.abs(options.batchSize) < Math.abs(options.limit)
|
||||
) {
|
||||
findCommand.limit = -options.batchSize;
|
||||
}
|
||||
|
||||
findCommand.singleBatch = true;
|
||||
} else {
|
||||
findCommand.batchSize = options.batchSize;
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof options.singleBatch === 'boolean') {
|
||||
findCommand.singleBatch = options.singleBatch;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (options.comment !== undefined) {
|
||||
findCommand.comment = options.comment;
|
||||
}
|
||||
|
||||
if (typeof options.maxTimeMS === 'number') {
|
||||
findCommand.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
const readConcern = ReadConcern.fromOptions(options);
|
||||
if (readConcern) {
|
||||
findCommand.readConcern = readConcern.toJSON();
|
||||
}
|
||||
|
||||
if (options.max) {
|
||||
findCommand.max = options.max;
|
||||
}
|
||||
|
||||
if (options.min) {
|
||||
findCommand.min = options.min;
|
||||
}
|
||||
|
||||
if (typeof options.returnKey === 'boolean') {
|
||||
findCommand.returnKey = options.returnKey;
|
||||
}
|
||||
|
||||
if (typeof options.showRecordId === 'boolean') {
|
||||
findCommand.showRecordId = options.showRecordId;
|
||||
}
|
||||
|
||||
if (typeof options.tailable === 'boolean') {
|
||||
findCommand.tailable = options.tailable;
|
||||
}
|
||||
|
||||
if (typeof options.oplogReplay === 'boolean') {
|
||||
findCommand.oplogReplay = options.oplogReplay;
|
||||
}
|
||||
|
||||
if (typeof options.timeout === 'boolean') {
|
||||
findCommand.noCursorTimeout = !options.timeout;
|
||||
} else if (typeof options.noCursorTimeout === 'boolean') {
|
||||
findCommand.noCursorTimeout = options.noCursorTimeout;
|
||||
}
|
||||
|
||||
if (typeof options.awaitData === 'boolean') {
|
||||
findCommand.awaitData = options.awaitData;
|
||||
}
|
||||
|
||||
if (typeof options.allowPartialResults === 'boolean') {
|
||||
findCommand.allowPartialResults = options.allowPartialResults;
|
||||
}
|
||||
|
||||
if (options.collation) {
|
||||
findCommand.collation = options.collation;
|
||||
}
|
||||
|
||||
if (typeof options.allowDiskUse === 'boolean') {
|
||||
findCommand.allowDiskUse = options.allowDiskUse;
|
||||
}
|
||||
|
||||
if (options.let) {
|
||||
findCommand.let = options.let;
|
||||
}
|
||||
|
||||
return findCommand;
|
||||
}
|
||||
|
||||
defineAspects(FindOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXPLAINABLE,
|
||||
Aspect.CURSOR_CREATING
|
||||
]);
|
286
node_modules/mongodb/src/operations/find_and_modify.ts
generated
vendored
Normal file
286
node_modules/mongodb/src/operations/find_and_modify.ts
generated
vendored
Normal file
@@ -0,0 +1,286 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import { MongoCompatibilityError, MongoInvalidArgumentError } from '../error';
|
||||
import { ReadPreference } from '../read_preference';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { formatSort, Sort, SortForCmd } from '../sort';
|
||||
import { Callback, decorateWithCollation, hasAtomicOperators, maxWireVersion } from '../utils';
|
||||
import type { WriteConcern, WriteConcernSettings } from '../write_concern';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export const ReturnDocument = Object.freeze({
|
||||
BEFORE: 'before',
|
||||
AFTER: 'after'
|
||||
} as const);
|
||||
|
||||
/** @public */
|
||||
export type ReturnDocument = (typeof ReturnDocument)[keyof typeof ReturnDocument];
|
||||
|
||||
/** @public */
|
||||
export interface FindOneAndDeleteOptions extends CommandOperationOptions {
|
||||
/** An optional hint for query optimization. See the {@link https://www.mongodb.com/docs/manual/reference/command/update/#update-command-hint|update command} reference for more information.*/
|
||||
hint?: Document;
|
||||
/** Limits the fields to return for all matching documents. */
|
||||
projection?: Document;
|
||||
/** Determines which document the operation modifies if the query selects multiple documents. */
|
||||
sort?: Sort;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface FindOneAndReplaceOptions extends CommandOperationOptions {
|
||||
/** Allow driver to bypass schema validation in MongoDB 3.2 or higher. */
|
||||
bypassDocumentValidation?: boolean;
|
||||
/** An optional hint for query optimization. See the {@link https://www.mongodb.com/docs/manual/reference/command/update/#update-command-hint|update command} reference for more information.*/
|
||||
hint?: Document;
|
||||
/** Limits the fields to return for all matching documents. */
|
||||
projection?: Document;
|
||||
/** When set to 'after', returns the updated document rather than the original. The default is 'before'. */
|
||||
returnDocument?: ReturnDocument;
|
||||
/** Determines which document the operation modifies if the query selects multiple documents. */
|
||||
sort?: Sort;
|
||||
/** Upsert the document if it does not exist. */
|
||||
upsert?: boolean;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface FindOneAndUpdateOptions extends CommandOperationOptions {
|
||||
/** Optional list of array filters referenced in filtered positional operators */
|
||||
arrayFilters?: Document[];
|
||||
/** Allow driver to bypass schema validation in MongoDB 3.2 or higher. */
|
||||
bypassDocumentValidation?: boolean;
|
||||
/** An optional hint for query optimization. See the {@link https://www.mongodb.com/docs/manual/reference/command/update/#update-command-hint|update command} reference for more information.*/
|
||||
hint?: Document;
|
||||
/** Limits the fields to return for all matching documents. */
|
||||
projection?: Document;
|
||||
/** When set to 'after', returns the updated document rather than the original. The default is 'before'. */
|
||||
returnDocument?: ReturnDocument;
|
||||
/** Determines which document the operation modifies if the query selects multiple documents. */
|
||||
sort?: Sort;
|
||||
/** Upsert the document if it does not exist. */
|
||||
upsert?: boolean;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
interface FindAndModifyCmdBase {
|
||||
remove: boolean;
|
||||
new: boolean;
|
||||
upsert: boolean;
|
||||
update?: Document;
|
||||
sort?: SortForCmd;
|
||||
fields?: Document;
|
||||
bypassDocumentValidation?: boolean;
|
||||
arrayFilters?: Document[];
|
||||
maxTimeMS?: number;
|
||||
let?: Document;
|
||||
writeConcern?: WriteConcern | WriteConcernSettings;
|
||||
/**
|
||||
* Comment to apply to the operation.
|
||||
*
|
||||
* In server versions pre-4.4, 'comment' must be string. A server
|
||||
* error will be thrown if any other type is provided.
|
||||
*
|
||||
* In server versions 4.4 and above, 'comment' can be any valid BSON type.
|
||||
*/
|
||||
comment?: unknown;
|
||||
}
|
||||
|
||||
function configureFindAndModifyCmdBaseUpdateOpts(
|
||||
cmdBase: FindAndModifyCmdBase,
|
||||
options: FindOneAndReplaceOptions | FindOneAndUpdateOptions
|
||||
): FindAndModifyCmdBase {
|
||||
cmdBase.new = options.returnDocument === ReturnDocument.AFTER;
|
||||
cmdBase.upsert = options.upsert === true;
|
||||
|
||||
if (options.bypassDocumentValidation === true) {
|
||||
cmdBase.bypassDocumentValidation = options.bypassDocumentValidation;
|
||||
}
|
||||
return cmdBase;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
class FindAndModifyOperation extends CommandOperation<Document> {
|
||||
override options: FindOneAndReplaceOptions | FindOneAndUpdateOptions | FindOneAndDeleteOptions;
|
||||
cmdBase: FindAndModifyCmdBase;
|
||||
collection: Collection;
|
||||
query: Document;
|
||||
doc?: Document;
|
||||
|
||||
constructor(
|
||||
collection: Collection,
|
||||
query: Document,
|
||||
options: FindOneAndReplaceOptions | FindOneAndUpdateOptions | FindOneAndDeleteOptions
|
||||
) {
|
||||
super(collection, options);
|
||||
this.options = options ?? {};
|
||||
this.cmdBase = {
|
||||
remove: false,
|
||||
new: false,
|
||||
upsert: false
|
||||
};
|
||||
|
||||
const sort = formatSort(options.sort);
|
||||
if (sort) {
|
||||
this.cmdBase.sort = sort;
|
||||
}
|
||||
|
||||
if (options.projection) {
|
||||
this.cmdBase.fields = options.projection;
|
||||
}
|
||||
|
||||
if (options.maxTimeMS) {
|
||||
this.cmdBase.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
// Decorate the findAndModify command with the write Concern
|
||||
if (options.writeConcern) {
|
||||
this.cmdBase.writeConcern = options.writeConcern;
|
||||
}
|
||||
|
||||
if (options.let) {
|
||||
this.cmdBase.let = options.let;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (options.comment !== undefined) {
|
||||
this.cmdBase.comment = options.comment;
|
||||
}
|
||||
|
||||
// force primary read preference
|
||||
this.readPreference = ReadPreference.primary;
|
||||
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
const query = this.query;
|
||||
const options = { ...this.options, ...this.bsonOptions };
|
||||
|
||||
// Create findAndModify command object
|
||||
const cmd: Document = {
|
||||
findAndModify: coll.collectionName,
|
||||
query: query,
|
||||
...this.cmdBase
|
||||
};
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(cmd, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
// TODO: once this method becomes a CommandOperation we will have the server
|
||||
// in place to check.
|
||||
const unacknowledgedWrite = this.writeConcern?.w === 0;
|
||||
if (unacknowledgedWrite || maxWireVersion(server) < 8) {
|
||||
callback(
|
||||
new MongoCompatibilityError(
|
||||
'The current topology does not support a hint on findAndModify commands'
|
||||
)
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
cmd.hint = options.hint;
|
||||
}
|
||||
|
||||
// Execute the command
|
||||
super.executeCommand(server, session, cmd, (err, result) => {
|
||||
if (err) return callback(err);
|
||||
return callback(undefined, result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class FindOneAndDeleteOperation extends FindAndModifyOperation {
|
||||
constructor(collection: Collection, filter: Document, options: FindOneAndDeleteOptions) {
|
||||
// Basic validation
|
||||
if (filter == null || typeof filter !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Argument "filter" must be an object');
|
||||
}
|
||||
|
||||
super(collection, filter, options);
|
||||
this.cmdBase.remove = true;
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class FindOneAndReplaceOperation extends FindAndModifyOperation {
|
||||
constructor(
|
||||
collection: Collection,
|
||||
filter: Document,
|
||||
replacement: Document,
|
||||
options: FindOneAndReplaceOptions
|
||||
) {
|
||||
if (filter == null || typeof filter !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Argument "filter" must be an object');
|
||||
}
|
||||
|
||||
if (replacement == null || typeof replacement !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Argument "replacement" must be an object');
|
||||
}
|
||||
|
||||
if (hasAtomicOperators(replacement)) {
|
||||
throw new MongoInvalidArgumentError('Replacement document must not contain atomic operators');
|
||||
}
|
||||
|
||||
super(collection, filter, options);
|
||||
this.cmdBase.update = replacement;
|
||||
configureFindAndModifyCmdBaseUpdateOpts(this.cmdBase, options);
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class FindOneAndUpdateOperation extends FindAndModifyOperation {
|
||||
constructor(
|
||||
collection: Collection,
|
||||
filter: Document,
|
||||
update: Document,
|
||||
options: FindOneAndUpdateOptions
|
||||
) {
|
||||
if (filter == null || typeof filter !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Argument "filter" must be an object');
|
||||
}
|
||||
|
||||
if (update == null || typeof update !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Argument "update" must be an object');
|
||||
}
|
||||
|
||||
if (!hasAtomicOperators(update)) {
|
||||
throw new MongoInvalidArgumentError('Update document requires atomic operators');
|
||||
}
|
||||
|
||||
super(collection, filter, options);
|
||||
this.cmdBase.update = update;
|
||||
configureFindAndModifyCmdBaseUpdateOpts(this.cmdBase, options);
|
||||
|
||||
if (options.arrayFilters) {
|
||||
this.cmdBase.arrayFilters = options.arrayFilters;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(FindAndModifyOperation, [
|
||||
Aspect.WRITE_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXPLAINABLE
|
||||
]);
|
106
node_modules/mongodb/src/operations/get_more.ts
generated
vendored
Normal file
106
node_modules/mongodb/src/operations/get_more.ts
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
import type { Document, Long } from '../bson';
|
||||
import { MongoRuntimeError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, maxWireVersion, MongoDBNamespace } from '../utils';
|
||||
import { AbstractOperation, Aspect, defineAspects, OperationOptions } from './operation';
|
||||
|
||||
/** @internal */
|
||||
export interface GetMoreOptions extends OperationOptions {
|
||||
/** Set the batchSize for the getMoreCommand when iterating over the query results. */
|
||||
batchSize?: number;
|
||||
/**
|
||||
* Comment to apply to the operation.
|
||||
*
|
||||
* getMore only supports 'comment' in server versions 4.4 and above.
|
||||
*/
|
||||
comment?: unknown;
|
||||
/** Number of milliseconds to wait before aborting the query. */
|
||||
maxTimeMS?: number;
|
||||
/** TODO(NODE-4413): Address bug with maxAwaitTimeMS not being passed in from the cursor correctly */
|
||||
maxAwaitTimeMS?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* GetMore command: https://www.mongodb.com/docs/manual/reference/command/getMore/
|
||||
* @internal
|
||||
*/
|
||||
export interface GetMoreCommand {
|
||||
getMore: Long;
|
||||
collection: string;
|
||||
batchSize?: number;
|
||||
maxTimeMS?: number;
|
||||
/** Only supported on wire versions 10 or greater */
|
||||
comment?: unknown;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class GetMoreOperation extends AbstractOperation {
|
||||
cursorId: Long;
|
||||
override options: GetMoreOptions;
|
||||
|
||||
constructor(ns: MongoDBNamespace, cursorId: Long, server: Server, options: GetMoreOptions) {
|
||||
super(options);
|
||||
|
||||
this.options = options;
|
||||
this.ns = ns;
|
||||
this.cursorId = cursorId;
|
||||
this.server = server;
|
||||
}
|
||||
|
||||
/**
|
||||
* Although there is a server already associated with the get more operation, the signature
|
||||
* for execute passes a server so we will just use that one.
|
||||
*/
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
if (server !== this.server) {
|
||||
return callback(
|
||||
new MongoRuntimeError('Getmore must run on the same server operation began on')
|
||||
);
|
||||
}
|
||||
|
||||
if (this.cursorId == null || this.cursorId.isZero()) {
|
||||
return callback(new MongoRuntimeError('Unable to iterate cursor with no id'));
|
||||
}
|
||||
|
||||
const collection = this.ns.collection;
|
||||
if (collection == null) {
|
||||
// Cursors should have adopted the namespace returned by MongoDB
|
||||
// which should always defined a collection name (even a pseudo one, ex. db.aggregate())
|
||||
return callback(new MongoRuntimeError('A collection name must be determined before getMore'));
|
||||
}
|
||||
|
||||
const getMoreCmd: GetMoreCommand = {
|
||||
getMore: this.cursorId,
|
||||
collection
|
||||
};
|
||||
|
||||
if (typeof this.options.batchSize === 'number') {
|
||||
getMoreCmd.batchSize = Math.abs(this.options.batchSize);
|
||||
}
|
||||
|
||||
if (typeof this.options.maxAwaitTimeMS === 'number') {
|
||||
getMoreCmd.maxTimeMS = this.options.maxAwaitTimeMS;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (this.options.comment !== undefined && maxWireVersion(server) >= 9) {
|
||||
getMoreCmd.comment = this.options.comment;
|
||||
}
|
||||
|
||||
const commandOptions = {
|
||||
returnFieldSelector: null,
|
||||
documentsReturnedIn: 'nextBatch',
|
||||
...this.options
|
||||
};
|
||||
|
||||
server.command(this.ns, getMoreCmd, commandOptions, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(GetMoreOperation, [Aspect.READ_OPERATION, Aspect.MUST_SELECT_SAME_SERVER]);
|
518
node_modules/mongodb/src/operations/indexes.ts
generated
vendored
Normal file
518
node_modules/mongodb/src/operations/indexes.ts
generated
vendored
Normal file
@@ -0,0 +1,518 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Db } from '../db';
|
||||
import { MongoCompatibilityError, MONGODB_ERROR_CODES, MongoError } from '../error';
|
||||
import type { OneOrMore } from '../mongo_types';
|
||||
import { ReadPreference } from '../read_preference';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, isObject, maxWireVersion, MongoDBNamespace } from '../utils';
|
||||
import {
|
||||
CollationOptions,
|
||||
CommandOperation,
|
||||
CommandOperationOptions,
|
||||
OperationParent
|
||||
} from './command';
|
||||
import { indexInformation, IndexInformationOptions } from './common_functions';
|
||||
import { AbstractOperation, Aspect, defineAspects } from './operation';
|
||||
|
||||
const VALID_INDEX_OPTIONS = new Set([
|
||||
'background',
|
||||
'unique',
|
||||
'name',
|
||||
'partialFilterExpression',
|
||||
'sparse',
|
||||
'hidden',
|
||||
'expireAfterSeconds',
|
||||
'storageEngine',
|
||||
'collation',
|
||||
'version',
|
||||
|
||||
// text indexes
|
||||
'weights',
|
||||
'default_language',
|
||||
'language_override',
|
||||
'textIndexVersion',
|
||||
|
||||
// 2d-sphere indexes
|
||||
'2dsphereIndexVersion',
|
||||
|
||||
// 2d indexes
|
||||
'bits',
|
||||
'min',
|
||||
'max',
|
||||
|
||||
// geoHaystack Indexes
|
||||
'bucketSize',
|
||||
|
||||
// wildcard indexes
|
||||
'wildcardProjection'
|
||||
]);
|
||||
|
||||
/** @public */
|
||||
export type IndexDirection =
|
||||
| -1
|
||||
| 1
|
||||
| '2d'
|
||||
| '2dsphere'
|
||||
| 'text'
|
||||
| 'geoHaystack'
|
||||
| 'hashed'
|
||||
| number;
|
||||
|
||||
function isIndexDirection(x: unknown): x is IndexDirection {
|
||||
return (
|
||||
typeof x === 'number' || x === '2d' || x === '2dsphere' || x === 'text' || x === 'geoHaystack'
|
||||
);
|
||||
}
|
||||
/** @public */
|
||||
export type IndexSpecification = OneOrMore<
|
||||
| string
|
||||
| [string, IndexDirection]
|
||||
| { [key: string]: IndexDirection }
|
||||
| Map<string, IndexDirection>
|
||||
>;
|
||||
|
||||
/** @public */
|
||||
export interface IndexDescription
|
||||
extends Pick<
|
||||
CreateIndexesOptions,
|
||||
| 'background'
|
||||
| 'unique'
|
||||
| 'partialFilterExpression'
|
||||
| 'sparse'
|
||||
| 'hidden'
|
||||
| 'expireAfterSeconds'
|
||||
| 'storageEngine'
|
||||
| 'version'
|
||||
| 'weights'
|
||||
| 'default_language'
|
||||
| 'language_override'
|
||||
| 'textIndexVersion'
|
||||
| '2dsphereIndexVersion'
|
||||
| 'bits'
|
||||
| 'min'
|
||||
| 'max'
|
||||
| 'bucketSize'
|
||||
| 'wildcardProjection'
|
||||
> {
|
||||
collation?: CollationOptions;
|
||||
name?: string;
|
||||
key: { [key: string]: IndexDirection } | Map<string, IndexDirection>;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface CreateIndexesOptions extends Omit<CommandOperationOptions, 'writeConcern'> {
|
||||
/** Creates the index in the background, yielding whenever possible. */
|
||||
background?: boolean;
|
||||
/** Creates an unique index. */
|
||||
unique?: boolean;
|
||||
/** Override the autogenerated index name (useful if the resulting name is larger than 128 bytes) */
|
||||
name?: string;
|
||||
/** Creates a partial index based on the given filter object (MongoDB 3.2 or higher) */
|
||||
partialFilterExpression?: Document;
|
||||
/** Creates a sparse index. */
|
||||
sparse?: boolean;
|
||||
/** Allows you to expire data on indexes applied to a data (MongoDB 2.2 or higher) */
|
||||
expireAfterSeconds?: number;
|
||||
/** Allows users to configure the storage engine on a per-index basis when creating an index. (MongoDB 3.0 or higher) */
|
||||
storageEngine?: Document;
|
||||
/** (MongoDB 4.4. or higher) Specifies how many data-bearing members of a replica set, including the primary, must complete the index builds successfully before the primary marks the indexes as ready. This option accepts the same values for the "w" field in a write concern plus "votingMembers", which indicates all voting data-bearing nodes. */
|
||||
commitQuorum?: number | string;
|
||||
/** Specifies the index version number, either 0 or 1. */
|
||||
version?: number;
|
||||
// text indexes
|
||||
weights?: Document;
|
||||
default_language?: string;
|
||||
language_override?: string;
|
||||
textIndexVersion?: number;
|
||||
// 2d-sphere indexes
|
||||
'2dsphereIndexVersion'?: number;
|
||||
// 2d indexes
|
||||
bits?: number;
|
||||
/** For geospatial indexes set the lower bound for the co-ordinates. */
|
||||
min?: number;
|
||||
/** For geospatial indexes set the high bound for the co-ordinates. */
|
||||
max?: number;
|
||||
// geoHaystack Indexes
|
||||
bucketSize?: number;
|
||||
// wildcard indexes
|
||||
wildcardProjection?: Document;
|
||||
/** Specifies that the index should exist on the target collection but should not be used by the query planner when executing operations. (MongoDB 4.4 or higher) */
|
||||
hidden?: boolean;
|
||||
}
|
||||
|
||||
function isSingleIndexTuple(t: unknown): t is [string, IndexDirection] {
|
||||
return Array.isArray(t) && t.length === 2 && isIndexDirection(t[1]);
|
||||
}
|
||||
|
||||
function makeIndexSpec(
|
||||
indexSpec: IndexSpecification,
|
||||
options?: CreateIndexesOptions
|
||||
): IndexDescription {
|
||||
const key: Map<string, IndexDirection> = new Map();
|
||||
|
||||
const indexSpecs =
|
||||
!Array.isArray(indexSpec) || isSingleIndexTuple(indexSpec) ? [indexSpec] : indexSpec;
|
||||
|
||||
// Iterate through array and handle different types
|
||||
for (const spec of indexSpecs) {
|
||||
if (typeof spec === 'string') {
|
||||
key.set(spec, 1);
|
||||
} else if (Array.isArray(spec)) {
|
||||
key.set(spec[0], spec[1] ?? 1);
|
||||
} else if (spec instanceof Map) {
|
||||
for (const [property, value] of spec) {
|
||||
key.set(property, value);
|
||||
}
|
||||
} else if (isObject(spec)) {
|
||||
for (const [property, value] of Object.entries(spec)) {
|
||||
key.set(property, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { ...options, key };
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class IndexesOperation extends AbstractOperation<Document[]> {
|
||||
override options: IndexInformationOptions;
|
||||
collection: Collection;
|
||||
|
||||
constructor(collection: Collection, options: IndexInformationOptions) {
|
||||
super(options);
|
||||
this.options = options;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document[]>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
const options = this.options;
|
||||
|
||||
indexInformation(
|
||||
coll.s.db,
|
||||
coll.collectionName,
|
||||
{ full: true, ...options, readPreference: this.readPreference, session },
|
||||
callback
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class CreateIndexesOperation<
|
||||
T extends string | string[] = string[]
|
||||
> extends CommandOperation<T> {
|
||||
override options: CreateIndexesOptions;
|
||||
collectionName: string;
|
||||
indexes: ReadonlyArray<Omit<IndexDescription, 'key'> & { key: Map<string, IndexDirection> }>;
|
||||
|
||||
constructor(
|
||||
parent: OperationParent,
|
||||
collectionName: string,
|
||||
indexes: IndexDescription[],
|
||||
options?: CreateIndexesOptions
|
||||
) {
|
||||
super(parent, options);
|
||||
|
||||
this.options = options ?? {};
|
||||
this.collectionName = collectionName;
|
||||
this.indexes = indexes.map(userIndex => {
|
||||
// Ensure the key is a Map to preserve index key ordering
|
||||
const key =
|
||||
userIndex.key instanceof Map ? userIndex.key : new Map(Object.entries(userIndex.key));
|
||||
const name = userIndex.name != null ? userIndex.name : Array.from(key).flat().join('_');
|
||||
const validIndexOptions = Object.fromEntries(
|
||||
Object.entries({ ...userIndex }).filter(([optionName]) =>
|
||||
VALID_INDEX_OPTIONS.has(optionName)
|
||||
)
|
||||
);
|
||||
return {
|
||||
...validIndexOptions,
|
||||
name,
|
||||
key
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<T>
|
||||
): void {
|
||||
const options = this.options;
|
||||
const indexes = this.indexes;
|
||||
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
|
||||
const cmd: Document = { createIndexes: this.collectionName, indexes };
|
||||
|
||||
if (options.commitQuorum != null) {
|
||||
if (serverWireVersion < 9) {
|
||||
callback(
|
||||
new MongoCompatibilityError(
|
||||
'Option `commitQuorum` for `createIndexes` not supported on servers < 4.4'
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
cmd.commitQuorum = options.commitQuorum;
|
||||
}
|
||||
|
||||
// collation is set on each index, it should not be defined at the root
|
||||
this.options.collation = undefined;
|
||||
|
||||
super.executeCommand(server, session, cmd, err => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const indexNames = indexes.map(index => index.name || '');
|
||||
callback(undefined, indexNames as T);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class CreateIndexOperation extends CreateIndexesOperation<string> {
|
||||
constructor(
|
||||
parent: OperationParent,
|
||||
collectionName: string,
|
||||
indexSpec: IndexSpecification,
|
||||
options?: CreateIndexesOptions
|
||||
) {
|
||||
super(parent, collectionName, [makeIndexSpec(indexSpec, options)], options);
|
||||
}
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<string>
|
||||
): void {
|
||||
super.execute(server, session, (err, indexNames) => {
|
||||
if (err || !indexNames) return callback(err);
|
||||
return callback(undefined, indexNames[0]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class EnsureIndexOperation extends CreateIndexOperation {
|
||||
db: Db;
|
||||
|
||||
constructor(
|
||||
db: Db,
|
||||
collectionName: string,
|
||||
indexSpec: IndexSpecification,
|
||||
options?: CreateIndexesOptions
|
||||
) {
|
||||
super(db, collectionName, indexSpec, options);
|
||||
|
||||
this.readPreference = ReadPreference.primary;
|
||||
this.db = db;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
override execute(server: Server, session: ClientSession | undefined, callback: Callback): void {
|
||||
const indexName = this.indexes[0].name;
|
||||
const cursor = this.db.collection(this.collectionName).listIndexes({ session });
|
||||
cursor.toArray().then(
|
||||
indexes => {
|
||||
indexes = Array.isArray(indexes) ? indexes : [indexes];
|
||||
if (indexes.some(index => index.name === indexName)) {
|
||||
callback(undefined, indexName);
|
||||
return;
|
||||
}
|
||||
super.execute(server, session, callback);
|
||||
},
|
||||
error => {
|
||||
if (error instanceof MongoError && error.code === MONGODB_ERROR_CODES.NamespaceNotFound) {
|
||||
// ignore "NamespaceNotFound" errors
|
||||
return super.execute(server, session, callback);
|
||||
}
|
||||
return callback(error);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export type DropIndexesOptions = CommandOperationOptions;
|
||||
|
||||
/** @internal */
|
||||
export class DropIndexOperation extends CommandOperation<Document> {
|
||||
override options: DropIndexesOptions;
|
||||
collection: Collection;
|
||||
indexName: string;
|
||||
|
||||
constructor(collection: Collection, indexName: string, options?: DropIndexesOptions) {
|
||||
super(collection, options);
|
||||
|
||||
this.options = options ?? {};
|
||||
this.collection = collection;
|
||||
this.indexName = indexName;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const cmd = { dropIndexes: this.collection.collectionName, index: this.indexName };
|
||||
super.executeCommand(server, session, cmd, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class DropIndexesOperation extends DropIndexOperation {
|
||||
constructor(collection: Collection, options: DropIndexesOptions) {
|
||||
super(collection, '*', options);
|
||||
}
|
||||
|
||||
override execute(server: Server, session: ClientSession | undefined, callback: Callback): void {
|
||||
super.execute(server, session, err => {
|
||||
if (err) return callback(err, false);
|
||||
callback(undefined, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface ListIndexesOptions extends Omit<CommandOperationOptions, 'writeConcern'> {
|
||||
/** The batchSize for the returned command cursor or if pre 2.8 the systems batch collection */
|
||||
batchSize?: number;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class ListIndexesOperation extends CommandOperation<Document> {
|
||||
/**
|
||||
* @remarks WriteConcern can still be present on the options because
|
||||
* we inherit options from the client/db/collection. The
|
||||
* key must be present on the options in order to delete it.
|
||||
* This allows typescript to delete the key but will
|
||||
* not allow a writeConcern to be assigned as a property on options.
|
||||
*/
|
||||
override options: ListIndexesOptions & { writeConcern?: never };
|
||||
collectionNamespace: MongoDBNamespace;
|
||||
|
||||
constructor(collection: Collection, options?: ListIndexesOptions) {
|
||||
super(collection, options);
|
||||
|
||||
this.options = { ...options };
|
||||
delete this.options.writeConcern;
|
||||
this.collectionNamespace = collection.s.namespace;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
|
||||
const cursor = this.options.batchSize ? { batchSize: this.options.batchSize } : {};
|
||||
|
||||
const command: Document = { listIndexes: this.collectionNamespace.collection, cursor };
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (serverWireVersion >= 9 && this.options.comment !== undefined) {
|
||||
command.comment = this.options.comment;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class IndexExistsOperation extends AbstractOperation<boolean> {
|
||||
override options: IndexInformationOptions;
|
||||
collection: Collection;
|
||||
indexes: string | string[];
|
||||
|
||||
constructor(
|
||||
collection: Collection,
|
||||
indexes: string | string[],
|
||||
options: IndexInformationOptions
|
||||
) {
|
||||
super(options);
|
||||
this.options = options;
|
||||
this.collection = collection;
|
||||
this.indexes = indexes;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<boolean>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
const indexes = this.indexes;
|
||||
|
||||
indexInformation(
|
||||
coll.s.db,
|
||||
coll.collectionName,
|
||||
{ ...this.options, readPreference: this.readPreference, session },
|
||||
(err, indexInformation) => {
|
||||
// If we have an error return
|
||||
if (err != null) return callback(err);
|
||||
// Let's check for the index names
|
||||
if (!Array.isArray(indexes)) return callback(undefined, indexInformation[indexes] != null);
|
||||
// Check in list of indexes
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
if (indexInformation[indexes[i]] == null) {
|
||||
return callback(undefined, false);
|
||||
}
|
||||
}
|
||||
|
||||
// All keys found return true
|
||||
return callback(undefined, true);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class IndexInformationOperation extends AbstractOperation<Document> {
|
||||
override options: IndexInformationOptions;
|
||||
db: Db;
|
||||
name: string;
|
||||
|
||||
constructor(db: Db, name: string, options?: IndexInformationOptions) {
|
||||
super(options);
|
||||
this.options = options ?? {};
|
||||
this.db = db;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const db = this.db;
|
||||
const name = this.name;
|
||||
|
||||
indexInformation(
|
||||
db,
|
||||
name,
|
||||
{ ...this.options, readPreference: this.readPreference, session },
|
||||
callback
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(ListIndexesOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.CURSOR_CREATING
|
||||
]);
|
||||
defineAspects(CreateIndexesOperation, [Aspect.WRITE_OPERATION]);
|
||||
defineAspects(CreateIndexOperation, [Aspect.WRITE_OPERATION]);
|
||||
defineAspects(EnsureIndexOperation, [Aspect.WRITE_OPERATION]);
|
||||
defineAspects(DropIndexOperation, [Aspect.WRITE_OPERATION]);
|
||||
defineAspects(DropIndexesOperation, [Aspect.WRITE_OPERATION]);
|
158
node_modules/mongodb/src/operations/insert.ts
generated
vendored
Normal file
158
node_modules/mongodb/src/operations/insert.ts
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { BulkWriteOptions } from '../bulk/common';
|
||||
import type { Collection } from '../collection';
|
||||
import { MongoInvalidArgumentError, MongoServerError } from '../error';
|
||||
import type { InferIdType } from '../mongo_types';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback, MongoDBNamespace } from '../utils';
|
||||
import { WriteConcern } from '../write_concern';
|
||||
import { BulkWriteOperation } from './bulk_write';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { prepareDocs } from './common_functions';
|
||||
import { AbstractOperation, Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @internal */
|
||||
export class InsertOperation extends CommandOperation<Document> {
|
||||
override options: BulkWriteOptions;
|
||||
documents: Document[];
|
||||
|
||||
constructor(ns: MongoDBNamespace, documents: Document[], options: BulkWriteOptions) {
|
||||
super(undefined, options);
|
||||
this.options = { ...options, checkKeys: options.checkKeys ?? false };
|
||||
this.ns = ns;
|
||||
this.documents = documents;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const options = this.options ?? {};
|
||||
const ordered = typeof options.ordered === 'boolean' ? options.ordered : true;
|
||||
const command: Document = {
|
||||
insert: this.ns.collection,
|
||||
documents: this.documents,
|
||||
ordered
|
||||
};
|
||||
|
||||
if (typeof options.bypassDocumentValidation === 'boolean') {
|
||||
command.bypassDocumentValidation = options.bypassDocumentValidation;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (options.comment !== undefined) {
|
||||
command.comment = options.comment;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface InsertOneOptions extends CommandOperationOptions {
|
||||
/** Allow driver to bypass schema validation in MongoDB 3.2 or higher. */
|
||||
bypassDocumentValidation?: boolean;
|
||||
/** Force server to assign _id values instead of driver. */
|
||||
forceServerObjectId?: boolean;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface InsertOneResult<TSchema = Document> {
|
||||
/** Indicates whether this write result was acknowledged. If not, then all other members of this result will be undefined */
|
||||
acknowledged: boolean;
|
||||
/** The identifier that was inserted. If the server generated the identifier, this value will be null as the driver does not have access to that data */
|
||||
insertedId: InferIdType<TSchema>;
|
||||
}
|
||||
|
||||
export class InsertOneOperation extends InsertOperation {
|
||||
constructor(collection: Collection, doc: Document, options: InsertOneOptions) {
|
||||
super(collection.s.namespace, prepareDocs(collection, [doc], options), options);
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<InsertOneResult>
|
||||
): void {
|
||||
super.execute(server, session, (err, res) => {
|
||||
if (err || res == null) return callback(err);
|
||||
if (res.code) return callback(new MongoServerError(res));
|
||||
if (res.writeErrors) {
|
||||
// This should be a WriteError but we can't change it now because of error hierarchy
|
||||
return callback(new MongoServerError(res.writeErrors[0]));
|
||||
}
|
||||
|
||||
callback(undefined, {
|
||||
acknowledged: this.writeConcern?.w !== 0 ?? true,
|
||||
insertedId: this.documents[0]._id
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface InsertManyResult<TSchema = Document> {
|
||||
/** Indicates whether this write result was acknowledged. If not, then all other members of this result will be undefined */
|
||||
acknowledged: boolean;
|
||||
/** The number of inserted documents for this operations */
|
||||
insertedCount: number;
|
||||
/** Map of the index of the inserted document to the id of the inserted document */
|
||||
insertedIds: { [key: number]: InferIdType<TSchema> };
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class InsertManyOperation extends AbstractOperation<InsertManyResult> {
|
||||
override options: BulkWriteOptions;
|
||||
collection: Collection;
|
||||
docs: Document[];
|
||||
|
||||
constructor(collection: Collection, docs: Document[], options: BulkWriteOptions) {
|
||||
super(options);
|
||||
|
||||
if (!Array.isArray(docs)) {
|
||||
throw new MongoInvalidArgumentError('Argument "docs" must be an array of documents');
|
||||
}
|
||||
|
||||
this.options = options;
|
||||
this.collection = collection;
|
||||
this.docs = docs;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<InsertManyResult>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
const options = { ...this.options, ...this.bsonOptions, readPreference: this.readPreference };
|
||||
const writeConcern = WriteConcern.fromOptions(options);
|
||||
const bulkWriteOperation = new BulkWriteOperation(
|
||||
coll,
|
||||
prepareDocs(coll, this.docs, options).map(document => ({ insertOne: { document } })),
|
||||
options
|
||||
);
|
||||
|
||||
bulkWriteOperation.execute(server, session, (err, res) => {
|
||||
if (err || res == null) {
|
||||
if (err && err.message === 'Operation must be an object with an operation key') {
|
||||
err = new MongoInvalidArgumentError(
|
||||
'Collection.insertMany() cannot be called with an array that has null/undefined values'
|
||||
);
|
||||
}
|
||||
return callback(err);
|
||||
}
|
||||
callback(undefined, {
|
||||
acknowledged: writeConcern?.w !== 0 ?? true,
|
||||
insertedCount: res.insertedCount,
|
||||
insertedIds: res.insertedIds
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(InsertOperation, [Aspect.RETRYABLE, Aspect.WRITE_OPERATION]);
|
||||
defineAspects(InsertOneOperation, [Aspect.RETRYABLE, Aspect.WRITE_OPERATION]);
|
||||
defineAspects(InsertManyOperation, [Aspect.WRITE_OPERATION]);
|
44
node_modules/mongodb/src/operations/is_capped.ts
generated
vendored
Normal file
44
node_modules/mongodb/src/operations/is_capped.ts
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { Collection } from '../collection';
|
||||
import { MongoAPIError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { AbstractOperation, OperationOptions } from './operation';
|
||||
|
||||
/** @internal */
|
||||
export class IsCappedOperation extends AbstractOperation<boolean> {
|
||||
override options: OperationOptions;
|
||||
collection: Collection;
|
||||
|
||||
constructor(collection: Collection, options: OperationOptions) {
|
||||
super(options);
|
||||
this.options = options;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<boolean>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
|
||||
coll.s.db
|
||||
.listCollections(
|
||||
{ name: coll.collectionName },
|
||||
{ ...this.options, nameOnly: false, readPreference: this.readPreference, session }
|
||||
)
|
||||
.toArray()
|
||||
.then(
|
||||
collections => {
|
||||
if (collections.length === 0) {
|
||||
// TODO(NODE-3485)
|
||||
return callback(new MongoAPIError(`collection ${coll.namespace} not found`));
|
||||
}
|
||||
|
||||
callback(undefined, !!collections[0].options?.capped);
|
||||
},
|
||||
error => callback(error)
|
||||
);
|
||||
}
|
||||
}
|
53
node_modules/mongodb/src/operations/kill_cursors.ts
generated
vendored
Normal file
53
node_modules/mongodb/src/operations/kill_cursors.ts
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { Long } from '../bson';
|
||||
import { MongoRuntimeError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback, MongoDBNamespace } from '../utils';
|
||||
import { AbstractOperation, Aspect, defineAspects, OperationOptions } from './operation';
|
||||
|
||||
/**
|
||||
* https://www.mongodb.com/docs/manual/reference/command/killCursors/
|
||||
* @internal
|
||||
*/
|
||||
interface KillCursorsCommand {
|
||||
killCursors: string;
|
||||
cursors: Long[];
|
||||
comment?: unknown;
|
||||
}
|
||||
|
||||
export class KillCursorsOperation extends AbstractOperation {
|
||||
cursorId: Long;
|
||||
|
||||
constructor(cursorId: Long, ns: MongoDBNamespace, server: Server, options: OperationOptions) {
|
||||
super(options);
|
||||
this.ns = ns;
|
||||
this.cursorId = cursorId;
|
||||
this.server = server;
|
||||
}
|
||||
|
||||
execute(server: Server, session: ClientSession | undefined, callback: Callback<void>): void {
|
||||
if (server !== this.server) {
|
||||
return callback(
|
||||
new MongoRuntimeError('Killcursor must run on the same server operation began on')
|
||||
);
|
||||
}
|
||||
|
||||
const killCursors = this.ns.collection;
|
||||
if (killCursors == null) {
|
||||
// Cursors should have adopted the namespace returned by MongoDB
|
||||
// which should always defined a collection name (even a pseudo one, ex. db.aggregate())
|
||||
return callback(
|
||||
new MongoRuntimeError('A collection name must be determined before killCursors')
|
||||
);
|
||||
}
|
||||
|
||||
const killCursorsCommand: KillCursorsCommand = {
|
||||
killCursors,
|
||||
cursors: [this.cursorId]
|
||||
};
|
||||
|
||||
server.command(this.ns, killCursorsCommand, { session }, () => callback());
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(KillCursorsOperation, [Aspect.MUST_SELECT_SAME_SERVER]);
|
99
node_modules/mongodb/src/operations/list_collections.ts
generated
vendored
Normal file
99
node_modules/mongodb/src/operations/list_collections.ts
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
import type { Binary, Document } from '../bson';
|
||||
import type { Db } from '../db';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, maxWireVersion } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface ListCollectionsOptions extends Omit<CommandOperationOptions, 'writeConcern'> {
|
||||
/** Since 4.0: If true, will only return the collection name in the response, and will omit additional info */
|
||||
nameOnly?: boolean;
|
||||
/** Since 4.0: If true and nameOnly is true, allows a user without the required privilege (i.e. listCollections action on the database) to run the command when access control is enforced. */
|
||||
authorizedCollections?: boolean;
|
||||
/** The batchSize for the returned command cursor or if pre 2.8 the systems batch collection */
|
||||
batchSize?: number;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class ListCollectionsOperation extends CommandOperation<string[]> {
|
||||
/**
|
||||
* @remarks WriteConcern can still be present on the options because
|
||||
* we inherit options from the client/db/collection. The
|
||||
* key must be present on the options in order to delete it.
|
||||
* This allows typescript to delete the key but will
|
||||
* not allow a writeConcern to be assigned as a property on options.
|
||||
*/
|
||||
override options: ListCollectionsOptions & { writeConcern?: never };
|
||||
db: Db;
|
||||
filter: Document;
|
||||
nameOnly: boolean;
|
||||
authorizedCollections: boolean;
|
||||
batchSize?: number;
|
||||
|
||||
constructor(db: Db, filter: Document, options?: ListCollectionsOptions) {
|
||||
super(db, options);
|
||||
|
||||
this.options = { ...options };
|
||||
delete this.options.writeConcern;
|
||||
this.db = db;
|
||||
this.filter = filter;
|
||||
this.nameOnly = !!this.options.nameOnly;
|
||||
this.authorizedCollections = !!this.options.authorizedCollections;
|
||||
|
||||
if (typeof this.options.batchSize === 'number') {
|
||||
this.batchSize = this.options.batchSize;
|
||||
}
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<string[]>
|
||||
): void {
|
||||
return super.executeCommand(
|
||||
server,
|
||||
session,
|
||||
this.generateCommand(maxWireVersion(server)),
|
||||
callback
|
||||
);
|
||||
}
|
||||
|
||||
/* This is here for the purpose of unit testing the final command that gets sent. */
|
||||
generateCommand(wireVersion: number): Document {
|
||||
const command: Document = {
|
||||
listCollections: 1,
|
||||
filter: this.filter,
|
||||
cursor: this.batchSize ? { batchSize: this.batchSize } : {},
|
||||
nameOnly: this.nameOnly,
|
||||
authorizedCollections: this.authorizedCollections
|
||||
};
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (wireVersion >= 9 && this.options.comment !== undefined) {
|
||||
command.comment = this.options.comment;
|
||||
}
|
||||
|
||||
return command;
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface CollectionInfo extends Document {
|
||||
name: string;
|
||||
type?: string;
|
||||
options?: Document;
|
||||
info?: {
|
||||
readOnly?: false;
|
||||
uuid?: Binary;
|
||||
};
|
||||
idIndex?: Document;
|
||||
}
|
||||
|
||||
defineAspects(ListCollectionsOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.CURSOR_CREATING
|
||||
]);
|
65
node_modules/mongodb/src/operations/list_databases.ts
generated
vendored
Normal file
65
node_modules/mongodb/src/operations/list_databases.ts
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Db } from '../db';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, maxWireVersion, MongoDBNamespace } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface ListDatabasesResult {
|
||||
databases: ({ name: string; sizeOnDisk?: number; empty?: boolean } & Document)[];
|
||||
totalSize?: number;
|
||||
totalSizeMb?: number;
|
||||
ok: 1 | 0;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface ListDatabasesOptions extends CommandOperationOptions {
|
||||
/** A query predicate that determines which databases are listed */
|
||||
filter?: Document;
|
||||
/** A flag to indicate whether the command should return just the database names, or return both database names and size information */
|
||||
nameOnly?: boolean;
|
||||
/** A flag that determines which databases are returned based on the user privileges when access control is enabled */
|
||||
authorizedDatabases?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class ListDatabasesOperation extends CommandOperation<ListDatabasesResult> {
|
||||
override options: ListDatabasesOptions;
|
||||
|
||||
constructor(db: Db, options?: ListDatabasesOptions) {
|
||||
super(db, options);
|
||||
this.options = options ?? {};
|
||||
this.ns = new MongoDBNamespace('admin', '$cmd');
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<ListDatabasesResult>
|
||||
): void {
|
||||
const cmd: Document = { listDatabases: 1 };
|
||||
if (this.options.nameOnly) {
|
||||
cmd.nameOnly = Number(cmd.nameOnly);
|
||||
}
|
||||
|
||||
if (this.options.filter) {
|
||||
cmd.filter = this.options.filter;
|
||||
}
|
||||
|
||||
if (typeof this.options.authorizedDatabases === 'boolean') {
|
||||
cmd.authorizedDatabases = this.options.authorizedDatabases;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (maxWireVersion(server) >= 9 && this.options.comment !== undefined) {
|
||||
cmd.comment = this.options.comment;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, cmd, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(ListDatabasesOperation, [Aspect.READ_OPERATION, Aspect.RETRYABLE]);
|
139
node_modules/mongodb/src/operations/operation.ts
generated
vendored
Normal file
139
node_modules/mongodb/src/operations/operation.ts
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
import { promisify } from 'util';
|
||||
|
||||
import { BSONSerializeOptions, Document, resolveBSONOptions } from '../bson';
|
||||
import { ReadPreference, ReadPreferenceLike } from '../read_preference';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback, MongoDBNamespace } from '../utils';
|
||||
|
||||
export const Aspect = {
|
||||
READ_OPERATION: Symbol('READ_OPERATION'),
|
||||
WRITE_OPERATION: Symbol('WRITE_OPERATION'),
|
||||
RETRYABLE: Symbol('RETRYABLE'),
|
||||
EXPLAINABLE: Symbol('EXPLAINABLE'),
|
||||
SKIP_COLLATION: Symbol('SKIP_COLLATION'),
|
||||
CURSOR_CREATING: Symbol('CURSOR_CREATING'),
|
||||
MUST_SELECT_SAME_SERVER: Symbol('MUST_SELECT_SAME_SERVER')
|
||||
} as const;
|
||||
|
||||
/** @public */
|
||||
export type Hint = string | Document;
|
||||
|
||||
export interface OperationConstructor extends Function {
|
||||
aspects?: Set<symbol>;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface OperationOptions extends BSONSerializeOptions {
|
||||
/** Specify ClientSession for this command */
|
||||
session?: ClientSession;
|
||||
willRetryWrite?: boolean;
|
||||
|
||||
/** The preferred read preference (ReadPreference.primary, ReadPreference.primary_preferred, ReadPreference.secondary, ReadPreference.secondary_preferred, ReadPreference.nearest). */
|
||||
readPreference?: ReadPreferenceLike;
|
||||
|
||||
/** @internal Hints to `executeOperation` that this operation should not unpin on an ended transaction */
|
||||
bypassPinningCheck?: boolean;
|
||||
omitReadPreference?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
const kSession = Symbol('session');
|
||||
|
||||
/**
|
||||
* This class acts as a parent class for any operation and is responsible for setting this.options,
|
||||
* as well as setting and getting a session.
|
||||
* Additionally, this class implements `hasAspect`, which determines whether an operation has
|
||||
* a specific aspect.
|
||||
* @internal
|
||||
*/
|
||||
export abstract class AbstractOperation<TResult = any> {
|
||||
ns!: MongoDBNamespace;
|
||||
cmd!: Document;
|
||||
readPreference: ReadPreference;
|
||||
server!: Server;
|
||||
bypassPinningCheck: boolean;
|
||||
trySecondaryWrite: boolean;
|
||||
|
||||
// BSON serialization options
|
||||
bsonOptions?: BSONSerializeOptions;
|
||||
|
||||
options: OperationOptions;
|
||||
|
||||
[kSession]: ClientSession | undefined;
|
||||
|
||||
executeAsync: (server: Server, session: ClientSession | undefined) => Promise<TResult>;
|
||||
|
||||
constructor(options: OperationOptions = {}) {
|
||||
this.executeAsync = promisify(
|
||||
(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: (e: Error, r: TResult) => void
|
||||
) => {
|
||||
this.execute(server, session, callback as any);
|
||||
}
|
||||
);
|
||||
|
||||
this.readPreference = this.hasAspect(Aspect.WRITE_OPERATION)
|
||||
? ReadPreference.primary
|
||||
: ReadPreference.fromOptions(options) ?? ReadPreference.primary;
|
||||
|
||||
// Pull the BSON serialize options from the already-resolved options
|
||||
this.bsonOptions = resolveBSONOptions(options);
|
||||
|
||||
this[kSession] = options.session != null ? options.session : undefined;
|
||||
|
||||
this.options = options;
|
||||
this.bypassPinningCheck = !!options.bypassPinningCheck;
|
||||
this.trySecondaryWrite = false;
|
||||
}
|
||||
|
||||
abstract execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<TResult>
|
||||
): void;
|
||||
|
||||
hasAspect(aspect: symbol): boolean {
|
||||
const ctor = this.constructor as OperationConstructor;
|
||||
if (ctor.aspects == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ctor.aspects.has(aspect);
|
||||
}
|
||||
|
||||
get session(): ClientSession | undefined {
|
||||
return this[kSession];
|
||||
}
|
||||
|
||||
clearSession() {
|
||||
this[kSession] = undefined;
|
||||
}
|
||||
|
||||
get canRetryRead(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
get canRetryWrite(): boolean {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export function defineAspects(
|
||||
operation: OperationConstructor,
|
||||
aspects: symbol | symbol[] | Set<symbol>
|
||||
): Set<symbol> {
|
||||
if (!Array.isArray(aspects) && !(aspects instanceof Set)) {
|
||||
aspects = [aspects];
|
||||
}
|
||||
|
||||
aspects = new Set(aspects);
|
||||
Object.defineProperty(operation, 'aspects', {
|
||||
value: aspects,
|
||||
writable: false
|
||||
});
|
||||
|
||||
return aspects;
|
||||
}
|
45
node_modules/mongodb/src/operations/options_operation.ts
generated
vendored
Normal file
45
node_modules/mongodb/src/operations/options_operation.ts
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import { MongoAPIError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { AbstractOperation, OperationOptions } from './operation';
|
||||
|
||||
/** @internal */
|
||||
export class OptionsOperation extends AbstractOperation<Document> {
|
||||
override options: OperationOptions;
|
||||
collection: Collection;
|
||||
|
||||
constructor(collection: Collection, options: OperationOptions) {
|
||||
super(options);
|
||||
this.options = options;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
|
||||
coll.s.db
|
||||
.listCollections(
|
||||
{ name: coll.collectionName },
|
||||
{ ...this.options, nameOnly: false, readPreference: this.readPreference, session }
|
||||
)
|
||||
.toArray()
|
||||
.then(
|
||||
collections => {
|
||||
if (collections.length === 0) {
|
||||
// TODO(NODE-3485)
|
||||
return callback(new MongoAPIError(`collection ${coll.namespace} not found`));
|
||||
}
|
||||
|
||||
callback(undefined, collections[0].options);
|
||||
},
|
||||
error => callback(error)
|
||||
);
|
||||
}
|
||||
}
|
39
node_modules/mongodb/src/operations/profiling_level.ts
generated
vendored
Normal file
39
node_modules/mongodb/src/operations/profiling_level.ts
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
import type { Db } from '../db';
|
||||
import { MongoRuntimeError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
|
||||
/** @public */
|
||||
export type ProfilingLevelOptions = CommandOperationOptions;
|
||||
|
||||
/** @internal */
|
||||
export class ProfilingLevelOperation extends CommandOperation<string> {
|
||||
override options: ProfilingLevelOptions;
|
||||
|
||||
constructor(db: Db, options: ProfilingLevelOptions) {
|
||||
super(db, options);
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<string>
|
||||
): void {
|
||||
super.executeCommand(server, session, { profile: -1 }, (err, doc) => {
|
||||
if (err == null && doc.ok === 1) {
|
||||
const was = doc.was;
|
||||
if (was === 0) return callback(undefined, 'off');
|
||||
if (was === 1) return callback(undefined, 'slow_only');
|
||||
if (was === 2) return callback(undefined, 'all');
|
||||
// TODO(NODE-3483)
|
||||
return callback(new MongoRuntimeError(`Illegal profiling level value ${was}`));
|
||||
} else {
|
||||
// TODO(NODE-3483): Consider MongoUnexpectedServerResponseError
|
||||
err != null ? callback(err) : callback(new MongoRuntimeError('Error with profile command'));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
33
node_modules/mongodb/src/operations/remove_user.ts
generated
vendored
Normal file
33
node_modules/mongodb/src/operations/remove_user.ts
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { Db } from '../db';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/** @public */
|
||||
export type RemoveUserOptions = CommandOperationOptions;
|
||||
|
||||
/** @internal */
|
||||
export class RemoveUserOperation extends CommandOperation<boolean> {
|
||||
override options: RemoveUserOptions;
|
||||
username: string;
|
||||
|
||||
constructor(db: Db, username: string, options: RemoveUserOptions) {
|
||||
super(db, options);
|
||||
this.options = options;
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<boolean>
|
||||
): void {
|
||||
super.executeCommand(server, session, { dropUser: this.username }, err => {
|
||||
callback(err, err ? false : true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(RemoveUserOperation, [Aspect.WRITE_OPERATION]);
|
67
node_modules/mongodb/src/operations/rename.ts
generated
vendored
Normal file
67
node_modules/mongodb/src/operations/rename.ts
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
import type { Document } from '../bson';
|
||||
import { Collection } from '../collection';
|
||||
import { MongoServerError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, checkCollectionName } from '../utils';
|
||||
import type { CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
import { RunAdminCommandOperation } from './run_command';
|
||||
|
||||
/** @public */
|
||||
export interface RenameOptions extends CommandOperationOptions {
|
||||
/** Drop the target name collection if it previously exists. */
|
||||
dropTarget?: boolean;
|
||||
/** Unclear */
|
||||
new_collection?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class RenameOperation extends RunAdminCommandOperation {
|
||||
override options: RenameOptions;
|
||||
collection: Collection;
|
||||
newName: string;
|
||||
|
||||
constructor(collection: Collection, newName: string, options: RenameOptions) {
|
||||
// Check the collection name
|
||||
checkCollectionName(newName);
|
||||
|
||||
// Build the command
|
||||
const renameCollection = collection.namespace;
|
||||
const toCollection = collection.s.namespace.withCollection(newName).toString();
|
||||
const dropTarget = typeof options.dropTarget === 'boolean' ? options.dropTarget : false;
|
||||
const cmd = { renameCollection: renameCollection, to: toCollection, dropTarget: dropTarget };
|
||||
|
||||
super(collection, cmd, options);
|
||||
this.options = options;
|
||||
this.collection = collection;
|
||||
this.newName = newName;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Collection>
|
||||
): void {
|
||||
const coll = this.collection;
|
||||
|
||||
super.execute(server, session, (err, doc) => {
|
||||
if (err) return callback(err);
|
||||
// We have an error
|
||||
if (doc?.errmsg) {
|
||||
return callback(new MongoServerError(doc));
|
||||
}
|
||||
|
||||
let newColl: Collection<Document>;
|
||||
try {
|
||||
newColl = new Collection(coll.s.db, this.newName, coll.s.options);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
return callback(undefined, newColl);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(RenameOperation, [Aspect.WRITE_OPERATION]);
|
73
node_modules/mongodb/src/operations/run_command.ts
generated
vendored
Normal file
73
node_modules/mongodb/src/operations/run_command.ts
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
import type { BSONSerializeOptions, Document } from '../bson';
|
||||
import type { ReadPreferenceLike } from '../read_preference';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, MongoDBNamespace } from '../utils';
|
||||
import { CommandOperation, OperationParent } from './command';
|
||||
|
||||
/** @public */
|
||||
export type RunCommandOptions = {
|
||||
/** Specify ClientSession for this command */
|
||||
session?: ClientSession;
|
||||
/** The read preference */
|
||||
readPreference?: ReadPreferenceLike;
|
||||
|
||||
// The following options were "accidentally" supported
|
||||
// Since the options are generally supported through inheritance
|
||||
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
willRetryWrite?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
omitReadPreference?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
writeConcern?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
explain?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
readConcern?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
collation?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
maxTimeMS?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
comment?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
retryWrites?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
dbName?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
authdb?: any;
|
||||
/** @deprecated This is an internal option that has undefined behavior for this API */
|
||||
noResponse?: any;
|
||||
|
||||
/** @internal Used for transaction commands */
|
||||
bypassPinningCheck?: boolean;
|
||||
} & BSONSerializeOptions;
|
||||
|
||||
/** @internal */
|
||||
export class RunCommandOperation<T = Document> extends CommandOperation<T> {
|
||||
override options: RunCommandOptions;
|
||||
command: Document;
|
||||
|
||||
constructor(parent: OperationParent | undefined, command: Document, options?: RunCommandOptions) {
|
||||
super(parent, options);
|
||||
this.options = options ?? {};
|
||||
this.command = command;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<T>
|
||||
): void {
|
||||
const command = this.command;
|
||||
this.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
export class RunAdminCommandOperation<T = Document> extends RunCommandOperation<T> {
|
||||
constructor(parent: OperationParent | undefined, command: Document, options?: RunCommandOptions) {
|
||||
super(parent, command, options);
|
||||
this.ns = new MongoDBNamespace('admin');
|
||||
}
|
||||
}
|
48
node_modules/mongodb/src/operations/search_indexes/create.ts
generated
vendored
Normal file
48
node_modules/mongodb/src/operations/search_indexes/create.ts
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import type { Document } from 'bson';
|
||||
|
||||
import type { Collection } from '../../collection';
|
||||
import type { Server } from '../../sdam/server';
|
||||
import type { ClientSession } from '../../sessions';
|
||||
import type { Callback } from '../../utils';
|
||||
import { AbstractOperation } from '../operation';
|
||||
|
||||
/** @internal */
|
||||
export interface SearchIndexDescription {
|
||||
/** The name of the index. */
|
||||
name?: string;
|
||||
|
||||
/** The index definition. */
|
||||
description: Document;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class CreateSearchIndexesOperation extends AbstractOperation<string[]> {
|
||||
constructor(
|
||||
private readonly collection: Collection,
|
||||
private readonly descriptions: ReadonlyArray<SearchIndexDescription>
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
execute(server: Server, session: ClientSession | undefined, callback: Callback<string[]>): void {
|
||||
const namespace = this.collection.fullNamespace;
|
||||
const command = {
|
||||
createSearchIndexes: namespace.collection,
|
||||
indexes: this.descriptions
|
||||
};
|
||||
|
||||
server.command(namespace, command, { session }, (err, res) => {
|
||||
if (err || !res) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const indexesCreated: Array<{ name: string }> = res?.indexesCreated ?? [];
|
||||
|
||||
callback(
|
||||
undefined,
|
||||
indexesCreated.map(({ name }) => name)
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
35
node_modules/mongodb/src/operations/search_indexes/drop.ts
generated
vendored
Normal file
35
node_modules/mongodb/src/operations/search_indexes/drop.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import type { Document } from 'bson';
|
||||
|
||||
import type { Collection } from '../../collection';
|
||||
import type { Server } from '../../sdam/server';
|
||||
import type { ClientSession } from '../../sessions';
|
||||
import type { Callback } from '../../utils';
|
||||
import { AbstractOperation } from '../operation';
|
||||
|
||||
/** @internal */
|
||||
export class DropSearchIndexOperation extends AbstractOperation<void> {
|
||||
constructor(private readonly collection: Collection, private readonly name: string) {
|
||||
super();
|
||||
}
|
||||
|
||||
execute(server: Server, session: ClientSession | undefined, callback: Callback<void>): void {
|
||||
const namespace = this.collection.fullNamespace;
|
||||
|
||||
const command: Document = {
|
||||
dropSearchIndex: namespace.collection
|
||||
};
|
||||
|
||||
if (typeof this.name === 'string') {
|
||||
command.name = this.name;
|
||||
}
|
||||
|
||||
server.command(namespace, command, { session }, err => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback();
|
||||
});
|
||||
}
|
||||
}
|
36
node_modules/mongodb/src/operations/search_indexes/update.ts
generated
vendored
Normal file
36
node_modules/mongodb/src/operations/search_indexes/update.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import type { Document } from 'bson';
|
||||
|
||||
import type { Collection } from '../../collection';
|
||||
import type { Server } from '../../sdam/server';
|
||||
import type { ClientSession } from '../../sessions';
|
||||
import type { Callback } from '../../utils';
|
||||
import { AbstractOperation } from '../operation';
|
||||
|
||||
/** @internal */
|
||||
export class UpdateSearchIndexOperation extends AbstractOperation<void> {
|
||||
constructor(
|
||||
private readonly collection: Collection,
|
||||
private readonly name: string,
|
||||
private readonly definition: Document
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
execute(server: Server, session: ClientSession | undefined, callback: Callback<void>): void {
|
||||
const namespace = this.collection.fullNamespace;
|
||||
const command = {
|
||||
updateSearchIndex: namespace.collection,
|
||||
name: this.name,
|
||||
definition: this.definition
|
||||
};
|
||||
|
||||
server.command(namespace, command, { session }, err => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback();
|
||||
});
|
||||
}
|
||||
}
|
74
node_modules/mongodb/src/operations/set_profiling_level.ts
generated
vendored
Normal file
74
node_modules/mongodb/src/operations/set_profiling_level.ts
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import type { Db } from '../db';
|
||||
import { MongoInvalidArgumentError, MongoRuntimeError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { enumToString } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
|
||||
const levelValues = new Set(['off', 'slow_only', 'all']);
|
||||
|
||||
/** @public */
|
||||
export const ProfilingLevel = Object.freeze({
|
||||
off: 'off',
|
||||
slowOnly: 'slow_only',
|
||||
all: 'all'
|
||||
} as const);
|
||||
|
||||
/** @public */
|
||||
export type ProfilingLevel = (typeof ProfilingLevel)[keyof typeof ProfilingLevel];
|
||||
|
||||
/** @public */
|
||||
export type SetProfilingLevelOptions = CommandOperationOptions;
|
||||
|
||||
/** @internal */
|
||||
export class SetProfilingLevelOperation extends CommandOperation<ProfilingLevel> {
|
||||
override options: SetProfilingLevelOptions;
|
||||
level: ProfilingLevel;
|
||||
profile: 0 | 1 | 2;
|
||||
|
||||
constructor(db: Db, level: ProfilingLevel, options: SetProfilingLevelOptions) {
|
||||
super(db, options);
|
||||
this.options = options;
|
||||
switch (level) {
|
||||
case ProfilingLevel.off:
|
||||
this.profile = 0;
|
||||
break;
|
||||
case ProfilingLevel.slowOnly:
|
||||
this.profile = 1;
|
||||
break;
|
||||
case ProfilingLevel.all:
|
||||
this.profile = 2;
|
||||
break;
|
||||
default:
|
||||
this.profile = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
this.level = level;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<ProfilingLevel>
|
||||
): void {
|
||||
const level = this.level;
|
||||
|
||||
if (!levelValues.has(level)) {
|
||||
return callback(
|
||||
new MongoInvalidArgumentError(
|
||||
`Profiling level must be one of "${enumToString(ProfilingLevel)}"`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// TODO(NODE-3483): Determine error to put here
|
||||
super.executeCommand(server, session, { profile: this.profile }, (err, doc) => {
|
||||
if (err == null && doc.ok === 1) return callback(undefined, level);
|
||||
return err != null
|
||||
? callback(err)
|
||||
: callback(new MongoRuntimeError('Error with profile command'));
|
||||
});
|
||||
}
|
||||
}
|
280
node_modules/mongodb/src/operations/stats.ts
generated
vendored
Normal file
280
node_modules/mongodb/src/operations/stats.ts
generated
vendored
Normal file
@@ -0,0 +1,280 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import type { Db } from '../db';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects } from './operation';
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @deprecated the `collStats` operation will be removed in the next major release. Please
|
||||
* use an aggregation pipeline with the [`$collStats`](https://www.mongodb.com/docs/manual/reference/operator/aggregation/collStats/) stage instead
|
||||
*/
|
||||
export interface CollStatsOptions extends CommandOperationOptions {
|
||||
/** Divide the returned sizes by scale value. */
|
||||
scale?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all the collection statistics.
|
||||
* @internal
|
||||
*/
|
||||
export class CollStatsOperation extends CommandOperation<Document> {
|
||||
override options: CollStatsOptions;
|
||||
collectionName: string;
|
||||
|
||||
/**
|
||||
* Construct a Stats operation.
|
||||
*
|
||||
* @param collection - Collection instance
|
||||
* @param options - Optional settings. See Collection.prototype.stats for a list of options.
|
||||
*/
|
||||
constructor(collection: Collection, options?: CollStatsOptions) {
|
||||
super(collection, options);
|
||||
this.options = options ?? {};
|
||||
this.collectionName = collection.collectionName;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<CollStats>
|
||||
): void {
|
||||
const command: Document = { collStats: this.collectionName };
|
||||
if (this.options.scale != null) {
|
||||
command.scale = this.options.scale;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface DbStatsOptions extends CommandOperationOptions {
|
||||
/** Divide the returned sizes by scale value. */
|
||||
scale?: number;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class DbStatsOperation extends CommandOperation<Document> {
|
||||
override options: DbStatsOptions;
|
||||
|
||||
constructor(db: Db, options: DbStatsOptions) {
|
||||
super(db, options);
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const command: Document = { dbStats: true };
|
||||
if (this.options.scale != null) {
|
||||
command.scale = this.options.scale;
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated the `collStats` operation will be removed in the next major release. Please
|
||||
* use an aggregation pipeline with the [`$collStats`](https://www.mongodb.com/docs/manual/reference/operator/aggregation/collStats/) stage instead
|
||||
* @public
|
||||
* @see https://www.mongodb.com/docs/manual/reference/command/collStats/
|
||||
*/
|
||||
export interface CollStats extends Document {
|
||||
/** Namespace */
|
||||
ns: string;
|
||||
/** Number of documents */
|
||||
count: number;
|
||||
/** Collection size in bytes */
|
||||
size: number;
|
||||
/** Average object size in bytes */
|
||||
avgObjSize: number;
|
||||
/** (Pre)allocated space for the collection in bytes */
|
||||
storageSize: number;
|
||||
/** Number of extents (contiguously allocated chunks of datafile space) */
|
||||
numExtents: number;
|
||||
/** Number of indexes */
|
||||
nindexes: number;
|
||||
/** Size of the most recently created extent in bytes */
|
||||
lastExtentSize: number;
|
||||
/** Padding can speed up updates if documents grow */
|
||||
paddingFactor: number;
|
||||
/** A number that indicates the user-set flags on the collection. userFlags only appears when using the mmapv1 storage engine */
|
||||
userFlags?: number;
|
||||
/** Total index size in bytes */
|
||||
totalIndexSize: number;
|
||||
/** Size of specific indexes in bytes */
|
||||
indexSizes: {
|
||||
_id_: number;
|
||||
[index: string]: number;
|
||||
};
|
||||
/** `true` if the collection is capped */
|
||||
capped: boolean;
|
||||
/** The maximum number of documents that may be present in a capped collection */
|
||||
max: number;
|
||||
/** The maximum size of a capped collection */
|
||||
maxSize: number;
|
||||
/** This document contains data reported directly by the WiredTiger engine and other data for internal diagnostic use */
|
||||
wiredTiger?: WiredTigerData;
|
||||
/** The fields in this document are the names of the indexes, while the values themselves are documents that contain statistics for the index provided by the storage engine */
|
||||
indexDetails?: any;
|
||||
ok: number;
|
||||
|
||||
/** The amount of storage available for reuse. The scale argument affects this value. */
|
||||
freeStorageSize?: number;
|
||||
/** An array that contains the names of the indexes that are currently being built on the collection */
|
||||
indexBuilds?: number;
|
||||
/** The sum of the storageSize and totalIndexSize. The scale argument affects this value */
|
||||
totalSize: number;
|
||||
/** The scale value used by the command. */
|
||||
scaleFactor: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @deprecated This type is only used for the deprecated `collStats` operation and will be removed in the next major release.
|
||||
*/
|
||||
export interface WiredTigerData extends Document {
|
||||
LSM: {
|
||||
'bloom filter false positives': number;
|
||||
'bloom filter hits': number;
|
||||
'bloom filter misses': number;
|
||||
'bloom filter pages evicted from cache': number;
|
||||
'bloom filter pages read into cache': number;
|
||||
'bloom filters in the LSM tree': number;
|
||||
'chunks in the LSM tree': number;
|
||||
'highest merge generation in the LSM tree': number;
|
||||
'queries that could have benefited from a Bloom filter that did not exist': number;
|
||||
'sleep for LSM checkpoint throttle': number;
|
||||
'sleep for LSM merge throttle': number;
|
||||
'total size of bloom filters': number;
|
||||
} & Document;
|
||||
'block-manager': {
|
||||
'allocations requiring file extension': number;
|
||||
'blocks allocated': number;
|
||||
'blocks freed': number;
|
||||
'checkpoint size': number;
|
||||
'file allocation unit size': number;
|
||||
'file bytes available for reuse': number;
|
||||
'file magic number': number;
|
||||
'file major version number': number;
|
||||
'file size in bytes': number;
|
||||
'minor version number': number;
|
||||
};
|
||||
btree: {
|
||||
'btree checkpoint generation': number;
|
||||
'column-store fixed-size leaf pages': number;
|
||||
'column-store internal pages': number;
|
||||
'column-store variable-size RLE encoded values': number;
|
||||
'column-store variable-size deleted values': number;
|
||||
'column-store variable-size leaf pages': number;
|
||||
'fixed-record size': number;
|
||||
'maximum internal page key size': number;
|
||||
'maximum internal page size': number;
|
||||
'maximum leaf page key size': number;
|
||||
'maximum leaf page size': number;
|
||||
'maximum leaf page value size': number;
|
||||
'maximum tree depth': number;
|
||||
'number of key/value pairs': number;
|
||||
'overflow pages': number;
|
||||
'pages rewritten by compaction': number;
|
||||
'row-store internal pages': number;
|
||||
'row-store leaf pages': number;
|
||||
} & Document;
|
||||
cache: {
|
||||
'bytes currently in the cache': number;
|
||||
'bytes read into cache': number;
|
||||
'bytes written from cache': number;
|
||||
'checkpoint blocked page eviction': number;
|
||||
'data source pages selected for eviction unable to be evicted': number;
|
||||
'hazard pointer blocked page eviction': number;
|
||||
'in-memory page passed criteria to be split': number;
|
||||
'in-memory page splits': number;
|
||||
'internal pages evicted': number;
|
||||
'internal pages split during eviction': number;
|
||||
'leaf pages split during eviction': number;
|
||||
'modified pages evicted': number;
|
||||
'overflow pages read into cache': number;
|
||||
'overflow values cached in memory': number;
|
||||
'page split during eviction deepened the tree': number;
|
||||
'page written requiring lookaside records': number;
|
||||
'pages read into cache': number;
|
||||
'pages read into cache requiring lookaside entries': number;
|
||||
'pages requested from the cache': number;
|
||||
'pages written from cache': number;
|
||||
'pages written requiring in-memory restoration': number;
|
||||
'tracked dirty bytes in the cache': number;
|
||||
'unmodified pages evicted': number;
|
||||
} & Document;
|
||||
cache_walk: {
|
||||
'Average difference between current eviction generation when the page was last considered': number;
|
||||
'Average on-disk page image size seen': number;
|
||||
'Clean pages currently in cache': number;
|
||||
'Current eviction generation': number;
|
||||
'Dirty pages currently in cache': number;
|
||||
'Entries in the root page': number;
|
||||
'Internal pages currently in cache': number;
|
||||
'Leaf pages currently in cache': number;
|
||||
'Maximum difference between current eviction generation when the page was last considered': number;
|
||||
'Maximum page size seen': number;
|
||||
'Minimum on-disk page image size seen': number;
|
||||
'On-disk page image sizes smaller than a single allocation unit': number;
|
||||
'Pages created in memory and never written': number;
|
||||
'Pages currently queued for eviction': number;
|
||||
'Pages that could not be queued for eviction': number;
|
||||
'Refs skipped during cache traversal': number;
|
||||
'Size of the root page': number;
|
||||
'Total number of pages currently in cache': number;
|
||||
} & Document;
|
||||
compression: {
|
||||
'compressed pages read': number;
|
||||
'compressed pages written': number;
|
||||
'page written failed to compress': number;
|
||||
'page written was too small to compress': number;
|
||||
'raw compression call failed, additional data available': number;
|
||||
'raw compression call failed, no additional data available': number;
|
||||
'raw compression call succeeded': number;
|
||||
} & Document;
|
||||
cursor: {
|
||||
'bulk-loaded cursor-insert calls': number;
|
||||
'create calls': number;
|
||||
'cursor-insert key and value bytes inserted': number;
|
||||
'cursor-remove key bytes removed': number;
|
||||
'cursor-update value bytes updated': number;
|
||||
'insert calls': number;
|
||||
'next calls': number;
|
||||
'prev calls': number;
|
||||
'remove calls': number;
|
||||
'reset calls': number;
|
||||
'restarted searches': number;
|
||||
'search calls': number;
|
||||
'search near calls': number;
|
||||
'truncate calls': number;
|
||||
'update calls': number;
|
||||
};
|
||||
reconciliation: {
|
||||
'dictionary matches': number;
|
||||
'fast-path pages deleted': number;
|
||||
'internal page key bytes discarded using suffix compression': number;
|
||||
'internal page multi-block writes': number;
|
||||
'internal-page overflow keys': number;
|
||||
'leaf page key bytes discarded using prefix compression': number;
|
||||
'leaf page multi-block writes': number;
|
||||
'leaf-page overflow keys': number;
|
||||
'maximum blocks required for a page': number;
|
||||
'overflow values written': number;
|
||||
'page checksum matches': number;
|
||||
'page reconciliation calls': number;
|
||||
'page reconciliation calls for eviction': number;
|
||||
'pages deleted': number;
|
||||
} & Document;
|
||||
}
|
||||
|
||||
defineAspects(CollStatsOperation, [Aspect.READ_OPERATION]);
|
||||
defineAspects(DbStatsOperation, [Aspect.READ_OPERATION]);
|
310
node_modules/mongodb/src/operations/update.ts
generated
vendored
Normal file
310
node_modules/mongodb/src/operations/update.ts
generated
vendored
Normal file
@@ -0,0 +1,310 @@
|
||||
import type { Document } from '../bson';
|
||||
import type { Collection } from '../collection';
|
||||
import { MongoCompatibilityError, MongoInvalidArgumentError, MongoServerError } from '../error';
|
||||
import type { InferIdType } from '../mongo_types';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import { Callback, hasAtomicOperators, MongoDBNamespace } from '../utils';
|
||||
import { CollationOptions, CommandOperation, CommandOperationOptions } from './command';
|
||||
import { Aspect, defineAspects, Hint } from './operation';
|
||||
|
||||
/** @public */
|
||||
export interface UpdateOptions extends CommandOperationOptions {
|
||||
/** A set of filters specifying to which array elements an update should apply */
|
||||
arrayFilters?: Document[];
|
||||
/** If true, allows the write to opt-out of document level validation */
|
||||
bypassDocumentValidation?: boolean;
|
||||
/** Specifies a collation */
|
||||
collation?: CollationOptions;
|
||||
/** Specify that the update query should only consider plans using the hinted index */
|
||||
hint?: Hint;
|
||||
/** When true, creates a new document if no document matches the query */
|
||||
upsert?: boolean;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* `TSchema` is the schema of the collection
|
||||
*/
|
||||
export interface UpdateResult<TSchema extends Document = Document> {
|
||||
/** Indicates whether this write result was acknowledged. If not, then all other members of this result will be undefined */
|
||||
acknowledged: boolean;
|
||||
/** The number of documents that matched the filter */
|
||||
matchedCount: number;
|
||||
/** The number of documents that were modified */
|
||||
modifiedCount: number;
|
||||
/** The number of documents that were upserted */
|
||||
upsertedCount: number;
|
||||
/** The identifier of the inserted document if an upsert took place */
|
||||
upsertedId: InferIdType<TSchema> | null;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface UpdateStatement {
|
||||
/** The query that matches documents to update. */
|
||||
q: Document;
|
||||
/** The modifications to apply. */
|
||||
u: Document | Document[];
|
||||
/** If true, perform an insert if no documents match the query. */
|
||||
upsert?: boolean;
|
||||
/** If true, updates all documents that meet the query criteria. */
|
||||
multi?: boolean;
|
||||
/** Specifies the collation to use for the operation. */
|
||||
collation?: CollationOptions;
|
||||
/** An array of filter documents that determines which array elements to modify for an update operation on an array field. */
|
||||
arrayFilters?: Document[];
|
||||
/** A document or string that specifies the index to use to support the query predicate. */
|
||||
hint?: Hint;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class UpdateOperation extends CommandOperation<Document> {
|
||||
override options: UpdateOptions & { ordered?: boolean };
|
||||
statements: UpdateStatement[];
|
||||
|
||||
constructor(
|
||||
ns: MongoDBNamespace,
|
||||
statements: UpdateStatement[],
|
||||
options: UpdateOptions & { ordered?: boolean }
|
||||
) {
|
||||
super(undefined, options);
|
||||
this.options = options;
|
||||
this.ns = ns;
|
||||
|
||||
this.statements = statements;
|
||||
}
|
||||
|
||||
override get canRetryWrite(): boolean {
|
||||
if (super.canRetryWrite === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.statements.every(op => op.multi == null || op.multi === false);
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const options = this.options ?? {};
|
||||
const ordered = typeof options.ordered === 'boolean' ? options.ordered : true;
|
||||
const command: Document = {
|
||||
update: this.ns.collection,
|
||||
updates: this.statements,
|
||||
ordered
|
||||
};
|
||||
|
||||
if (typeof options.bypassDocumentValidation === 'boolean') {
|
||||
command.bypassDocumentValidation = options.bypassDocumentValidation;
|
||||
}
|
||||
|
||||
if (options.let) {
|
||||
command.let = options.let;
|
||||
}
|
||||
|
||||
// we check for undefined specifically here to allow falsy values
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
if (options.comment !== undefined) {
|
||||
command.comment = options.comment;
|
||||
}
|
||||
|
||||
const unacknowledgedWrite = this.writeConcern && this.writeConcern.w === 0;
|
||||
if (unacknowledgedWrite) {
|
||||
if (this.statements.find((o: Document) => o.hint)) {
|
||||
// TODO(NODE-3541): fix error for hint with unacknowledged writes
|
||||
callback(new MongoCompatibilityError(`hint is not supported with unacknowledged writes`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
super.executeCommand(server, session, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class UpdateOneOperation extends UpdateOperation {
|
||||
constructor(collection: Collection, filter: Document, update: Document, options: UpdateOptions) {
|
||||
super(
|
||||
collection.s.namespace,
|
||||
[makeUpdateStatement(filter, update, { ...options, multi: false })],
|
||||
options
|
||||
);
|
||||
|
||||
if (!hasAtomicOperators(update)) {
|
||||
throw new MongoInvalidArgumentError('Update document requires atomic operators');
|
||||
}
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<UpdateResult | Document>
|
||||
): void {
|
||||
super.execute(server, session, (err, res) => {
|
||||
if (err || !res) return callback(err);
|
||||
if (this.explain != null) return callback(undefined, res);
|
||||
if (res.code) return callback(new MongoServerError(res));
|
||||
if (res.writeErrors) return callback(new MongoServerError(res.writeErrors[0]));
|
||||
|
||||
callback(undefined, {
|
||||
acknowledged: this.writeConcern?.w !== 0 ?? true,
|
||||
modifiedCount: res.nModified != null ? res.nModified : res.n,
|
||||
upsertedId:
|
||||
Array.isArray(res.upserted) && res.upserted.length > 0 ? res.upserted[0]._id : null,
|
||||
upsertedCount: Array.isArray(res.upserted) && res.upserted.length ? res.upserted.length : 0,
|
||||
matchedCount: Array.isArray(res.upserted) && res.upserted.length > 0 ? 0 : res.n
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class UpdateManyOperation extends UpdateOperation {
|
||||
constructor(collection: Collection, filter: Document, update: Document, options: UpdateOptions) {
|
||||
super(
|
||||
collection.s.namespace,
|
||||
[makeUpdateStatement(filter, update, { ...options, multi: true })],
|
||||
options
|
||||
);
|
||||
|
||||
if (!hasAtomicOperators(update)) {
|
||||
throw new MongoInvalidArgumentError('Update document requires atomic operators');
|
||||
}
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<UpdateResult | Document>
|
||||
): void {
|
||||
super.execute(server, session, (err, res) => {
|
||||
if (err || !res) return callback(err);
|
||||
if (this.explain != null) return callback(undefined, res);
|
||||
if (res.code) return callback(new MongoServerError(res));
|
||||
if (res.writeErrors) return callback(new MongoServerError(res.writeErrors[0]));
|
||||
|
||||
callback(undefined, {
|
||||
acknowledged: this.writeConcern?.w !== 0 ?? true,
|
||||
modifiedCount: res.nModified != null ? res.nModified : res.n,
|
||||
upsertedId:
|
||||
Array.isArray(res.upserted) && res.upserted.length > 0 ? res.upserted[0]._id : null,
|
||||
upsertedCount: Array.isArray(res.upserted) && res.upserted.length ? res.upserted.length : 0,
|
||||
matchedCount: Array.isArray(res.upserted) && res.upserted.length > 0 ? 0 : res.n
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface ReplaceOptions extends CommandOperationOptions {
|
||||
/** If true, allows the write to opt-out of document level validation */
|
||||
bypassDocumentValidation?: boolean;
|
||||
/** Specifies a collation */
|
||||
collation?: CollationOptions;
|
||||
/** Specify that the update query should only consider plans using the hinted index */
|
||||
hint?: string | Document;
|
||||
/** When true, creates a new document if no document matches the query */
|
||||
upsert?: boolean;
|
||||
/** Map of parameter names and values that can be accessed using $$var (requires MongoDB 5.0). */
|
||||
let?: Document;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class ReplaceOneOperation extends UpdateOperation {
|
||||
constructor(
|
||||
collection: Collection,
|
||||
filter: Document,
|
||||
replacement: Document,
|
||||
options: ReplaceOptions
|
||||
) {
|
||||
super(
|
||||
collection.s.namespace,
|
||||
[makeUpdateStatement(filter, replacement, { ...options, multi: false })],
|
||||
options
|
||||
);
|
||||
|
||||
if (hasAtomicOperators(replacement)) {
|
||||
throw new MongoInvalidArgumentError('Replacement document must not contain atomic operators');
|
||||
}
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<UpdateResult | Document>
|
||||
): void {
|
||||
super.execute(server, session, (err, res) => {
|
||||
if (err || !res) return callback(err);
|
||||
if (this.explain != null) return callback(undefined, res);
|
||||
if (res.code) return callback(new MongoServerError(res));
|
||||
if (res.writeErrors) return callback(new MongoServerError(res.writeErrors[0]));
|
||||
|
||||
callback(undefined, {
|
||||
acknowledged: this.writeConcern?.w !== 0 ?? true,
|
||||
modifiedCount: res.nModified != null ? res.nModified : res.n,
|
||||
upsertedId:
|
||||
Array.isArray(res.upserted) && res.upserted.length > 0 ? res.upserted[0]._id : null,
|
||||
upsertedCount: Array.isArray(res.upserted) && res.upserted.length ? res.upserted.length : 0,
|
||||
matchedCount: Array.isArray(res.upserted) && res.upserted.length > 0 ? 0 : res.n
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function makeUpdateStatement(
|
||||
filter: Document,
|
||||
update: Document | Document[],
|
||||
options: UpdateOptions & { multi?: boolean }
|
||||
): UpdateStatement {
|
||||
if (filter == null || typeof filter !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Selector must be a valid JavaScript object');
|
||||
}
|
||||
|
||||
if (update == null || typeof update !== 'object') {
|
||||
throw new MongoInvalidArgumentError('Document must be a valid JavaScript object');
|
||||
}
|
||||
|
||||
const op: UpdateStatement = { q: filter, u: update };
|
||||
if (typeof options.upsert === 'boolean') {
|
||||
op.upsert = options.upsert;
|
||||
}
|
||||
|
||||
if (options.multi) {
|
||||
op.multi = options.multi;
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
op.hint = options.hint;
|
||||
}
|
||||
|
||||
if (options.arrayFilters) {
|
||||
op.arrayFilters = options.arrayFilters;
|
||||
}
|
||||
|
||||
if (options.collation) {
|
||||
op.collation = options.collation;
|
||||
}
|
||||
|
||||
return op;
|
||||
}
|
||||
|
||||
defineAspects(UpdateOperation, [Aspect.RETRYABLE, Aspect.WRITE_OPERATION, Aspect.SKIP_COLLATION]);
|
||||
defineAspects(UpdateOneOperation, [
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.WRITE_OPERATION,
|
||||
Aspect.EXPLAINABLE,
|
||||
Aspect.SKIP_COLLATION
|
||||
]);
|
||||
defineAspects(UpdateManyOperation, [
|
||||
Aspect.WRITE_OPERATION,
|
||||
Aspect.EXPLAINABLE,
|
||||
Aspect.SKIP_COLLATION
|
||||
]);
|
||||
defineAspects(ReplaceOneOperation, [
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.WRITE_OPERATION,
|
||||
Aspect.SKIP_COLLATION
|
||||
]);
|
59
node_modules/mongodb/src/operations/validate_collection.ts
generated
vendored
Normal file
59
node_modules/mongodb/src/operations/validate_collection.ts
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { Admin } from '../admin';
|
||||
import type { Document } from '../bson';
|
||||
import { MongoRuntimeError } from '../error';
|
||||
import type { Server } from '../sdam/server';
|
||||
import type { ClientSession } from '../sessions';
|
||||
import type { Callback } from '../utils';
|
||||
import { CommandOperation, CommandOperationOptions } from './command';
|
||||
|
||||
/** @public */
|
||||
export interface ValidateCollectionOptions extends CommandOperationOptions {
|
||||
/** Validates a collection in the background, without interrupting read or write traffic (only in MongoDB 4.4+) */
|
||||
background?: boolean;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class ValidateCollectionOperation extends CommandOperation<Document> {
|
||||
override options: ValidateCollectionOptions;
|
||||
collectionName: string;
|
||||
command: Document;
|
||||
|
||||
constructor(admin: Admin, collectionName: string, options: ValidateCollectionOptions) {
|
||||
// Decorate command with extra options
|
||||
const command: Document = { validate: collectionName };
|
||||
const keys = Object.keys(options);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
if (Object.prototype.hasOwnProperty.call(options, keys[i]) && keys[i] !== 'session') {
|
||||
command[keys[i]] = (options as Document)[keys[i]];
|
||||
}
|
||||
}
|
||||
|
||||
super(admin.s.db, options);
|
||||
this.options = options;
|
||||
this.command = command;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
override execute(
|
||||
server: Server,
|
||||
session: ClientSession | undefined,
|
||||
callback: Callback<Document>
|
||||
): void {
|
||||
const collectionName = this.collectionName;
|
||||
|
||||
super.executeCommand(server, session, this.command, (err, doc) => {
|
||||
if (err != null) return callback(err);
|
||||
|
||||
// TODO(NODE-3483): Replace these with MongoUnexpectedServerResponseError
|
||||
if (doc.ok === 0) return callback(new MongoRuntimeError('Error with validate command'));
|
||||
if (doc.result != null && typeof doc.result !== 'string')
|
||||
return callback(new MongoRuntimeError('Error with validation data'));
|
||||
if (doc.result != null && doc.result.match(/exception|corrupt/) != null)
|
||||
return callback(new MongoRuntimeError(`Invalid collection ${collectionName}`));
|
||||
if (doc.valid != null && !doc.valid)
|
||||
return callback(new MongoRuntimeError(`Invalid collection ${collectionName}`));
|
||||
|
||||
return callback(undefined, doc);
|
||||
});
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user