First commit

This commit is contained in:
Aravind142857
2023-06-05 20:06:13 -05:00
commit 4e3c08d1c4
672 changed files with 179969 additions and 0 deletions

47
node_modules/mongodb/lib/cmap/auth/auth_provider.js generated vendored Normal file
View File

@@ -0,0 +1,47 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AuthProvider = exports.AuthContext = void 0;
const error_1 = require("../../error");
/**
* Context used during authentication
* @internal
*/
class AuthContext {
constructor(connection, credentials, options) {
/** If the context is for reauthentication. */
this.reauthenticating = false;
this.connection = connection;
this.credentials = credentials;
this.options = options;
}
}
exports.AuthContext = AuthContext;
class AuthProvider {
/**
* Prepare the handshake document before the initial handshake.
*
* @param handshakeDoc - The document used for the initial handshake on a connection
* @param authContext - Context for authentication flow
*/
async prepare(handshakeDoc, _authContext) {
return handshakeDoc;
}
/**
* Reauthenticate.
* @param context - The shared auth context.
*/
async reauth(context) {
if (context.reauthenticating) {
throw new error_1.MongoRuntimeError('Reauthentication already in progress.');
}
try {
context.reauthenticating = true;
await this.auth(context);
}
finally {
context.reauthenticating = false;
}
}
}
exports.AuthProvider = AuthProvider;
//# sourceMappingURL=auth_provider.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"auth_provider.js","sourceRoot":"","sources":["../../../src/cmap/auth/auth_provider.ts"],"names":[],"mappings":";;;AACA,uCAAgD;AAKhD;;;GAGG;AACH,MAAa,WAAW;IAetB,YACE,UAAsB,EACtB,WAAyC,EACzC,OAA0B;QAb5B,8CAA8C;QAC9C,qBAAgB,GAAG,KAAK,CAAC;QAcvB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;CACF;AAxBD,kCAwBC;AAED,MAAsB,YAAY;IAChC;;;;;OAKG;IACH,KAAK,CAAC,OAAO,CACX,YAA+B,EAC/B,YAAyB;QAEzB,OAAO,YAAY,CAAC;IACtB,CAAC;IASD;;;OAGG;IACH,KAAK,CAAC,MAAM,CAAC,OAAoB;QAC/B,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,MAAM,IAAI,yBAAiB,CAAC,uCAAuC,CAAC,CAAC;SACtE;QACD,IAAI;YACF,OAAO,CAAC,gBAAgB,GAAG,IAAI,CAAC;YAChC,MAAM,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;SAC1B;gBAAS;YACR,OAAO,CAAC,gBAAgB,GAAG,KAAK,CAAC;SAClC;IACH,CAAC;CACF;AApCD,oCAoCC"}

143
node_modules/mongodb/lib/cmap/auth/gssapi.js generated vendored Normal file
View File

@@ -0,0 +1,143 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.resolveCname = exports.performGSSAPICanonicalizeHostName = exports.GSSAPI = exports.GSSAPICanonicalizationValue = void 0;
const dns = require("dns");
const deps_1 = require("../../deps");
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const auth_provider_1 = require("./auth_provider");
/** @public */
exports.GSSAPICanonicalizationValue = Object.freeze({
on: true,
off: false,
none: 'none',
forward: 'forward',
forwardAndReverse: 'forwardAndReverse'
});
async function externalCommand(connection, command) {
return connection.commandAsync((0, utils_1.ns)('$external.$cmd'), command, undefined);
}
class GSSAPI extends auth_provider_1.AuthProvider {
async auth(authContext) {
const { connection, credentials } = authContext;
if (credentials == null) {
throw new error_1.MongoMissingCredentialsError('Credentials required for GSSAPI authentication');
}
const { username } = credentials;
const client = await makeKerberosClient(authContext);
const payload = await client.step('');
const saslStartResponse = await externalCommand(connection, saslStart(payload));
const negotiatedPayload = await negotiate(client, 10, saslStartResponse.payload);
const saslContinueResponse = await externalCommand(connection, saslContinue(negotiatedPayload, saslStartResponse.conversationId));
const finalizePayload = await finalize(client, username, saslContinueResponse.payload);
await externalCommand(connection, {
saslContinue: 1,
conversationId: saslContinueResponse.conversationId,
payload: finalizePayload
});
}
}
exports.GSSAPI = GSSAPI;
async function makeKerberosClient(authContext) {
const { hostAddress } = authContext.options;
const { credentials } = authContext;
if (!hostAddress || typeof hostAddress.host !== 'string' || !credentials) {
throw new error_1.MongoInvalidArgumentError('Connection must have host and port and credentials defined.');
}
if ('kModuleError' in deps_1.Kerberos) {
throw deps_1.Kerberos['kModuleError'];
}
const { initializeClient } = deps_1.Kerberos;
const { username, password } = credentials;
const mechanismProperties = credentials.mechanismProperties;
const serviceName = mechanismProperties.SERVICE_NAME ?? 'mongodb';
const host = await performGSSAPICanonicalizeHostName(hostAddress.host, mechanismProperties);
const initOptions = {};
if (password != null) {
// TODO(NODE-5139): These do not match the typescript options in initializeClient
Object.assign(initOptions, { user: username, password: password });
}
const spnHost = mechanismProperties.SERVICE_HOST ?? host;
let spn = `${serviceName}${process.platform === 'win32' ? '/' : '@'}${spnHost}`;
if ('SERVICE_REALM' in mechanismProperties) {
spn = `${spn}@${mechanismProperties.SERVICE_REALM}`;
}
return initializeClient(spn, initOptions);
}
function saslStart(payload) {
return {
saslStart: 1,
mechanism: 'GSSAPI',
payload,
autoAuthorize: 1
};
}
function saslContinue(payload, conversationId) {
return {
saslContinue: 1,
conversationId,
payload
};
}
async function negotiate(client, retries, payload) {
try {
const response = await client.step(payload);
return response || '';
}
catch (error) {
if (retries === 0) {
// Retries exhausted, raise error
throw error;
}
// Adjust number of retries and call step again
return negotiate(client, retries - 1, payload);
}
}
async function finalize(client, user, payload) {
// GSS Client Unwrap
const response = await client.unwrap(payload);
return client.wrap(response || '', { user });
}
async function performGSSAPICanonicalizeHostName(host, mechanismProperties) {
const mode = mechanismProperties.CANONICALIZE_HOST_NAME;
if (!mode || mode === exports.GSSAPICanonicalizationValue.none) {
return host;
}
// If forward and reverse or true
if (mode === exports.GSSAPICanonicalizationValue.on ||
mode === exports.GSSAPICanonicalizationValue.forwardAndReverse) {
// Perform the lookup of the ip address.
const { address } = await dns.promises.lookup(host);
try {
// Perform a reverse ptr lookup on the ip address.
const results = await dns.promises.resolvePtr(address);
// If the ptr did not error but had no results, return the host.
return results.length > 0 ? results[0] : host;
}
catch (error) {
// This can error as ptr records may not exist for all ips. In this case
// fallback to a cname lookup as dns.lookup() does not return the
// cname.
return resolveCname(host);
}
}
else {
// The case for forward is just to resolve the cname as dns.lookup()
// will not return it.
return resolveCname(host);
}
}
exports.performGSSAPICanonicalizeHostName = performGSSAPICanonicalizeHostName;
async function resolveCname(host) {
// Attempt to resolve the host name
try {
const results = await dns.promises.resolveCname(host);
// Get the first resolved host id
return results.length > 0 ? results[0] : host;
}
catch {
return host;
}
}
exports.resolveCname = resolveCname;
//# sourceMappingURL=gssapi.js.map

1
node_modules/mongodb/lib/cmap/auth/gssapi.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"gssapi.js","sourceRoot":"","sources":["../../../src/cmap/auth/gssapi.ts"],"names":[],"mappings":";;;AAAA,2BAA2B;AAE3B,qCAAsD;AACtD,uCAAsF;AACtF,uCAAiC;AAEjC,mDAA4D;AAE5D,cAAc;AACD,QAAA,2BAA2B,GAAG,MAAM,CAAC,MAAM,CAAC;IACvD,EAAE,EAAE,IAAI;IACR,GAAG,EAAE,KAAK;IACV,IAAI,EAAE,MAAM;IACZ,OAAO,EAAE,SAAS;IAClB,iBAAiB,EAAE,mBAAmB;CAC9B,CAAC,CAAC;AAaZ,KAAK,UAAU,eAAe,CAC5B,UAAsB,EACtB,OAAuE;IAEvE,OAAO,UAAU,CAAC,YAAY,CAAC,IAAA,UAAE,EAAC,gBAAgB,CAAC,EAAE,OAAO,EAAE,SAAS,CAGrE,CAAC;AACL,CAAC;AAED,MAAa,MAAO,SAAQ,4BAAY;IAC7B,KAAK,CAAC,IAAI,CAAC,WAAwB;QAC1C,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,GAAG,WAAW,CAAC;QAChD,IAAI,WAAW,IAAI,IAAI,EAAE;YACvB,MAAM,IAAI,oCAA4B,CAAC,gDAAgD,CAAC,CAAC;SAC1F;QAED,MAAM,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC;QAEjC,MAAM,MAAM,GAAG,MAAM,kBAAkB,CAAC,WAAW,CAAC,CAAC;QAErD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAEtC,MAAM,iBAAiB,GAAG,MAAM,eAAe,CAAC,UAAU,EAAE,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC;QAEhF,MAAM,iBAAiB,GAAG,MAAM,SAAS,CAAC,MAAM,EAAE,EAAE,EAAE,iBAAiB,CAAC,OAAO,CAAC,CAAC;QAEjF,MAAM,oBAAoB,GAAG,MAAM,eAAe,CAChD,UAAU,EACV,YAAY,CAAC,iBAAiB,EAAE,iBAAiB,CAAC,cAAc,CAAC,CAClE,CAAC;QAEF,MAAM,eAAe,GAAG,MAAM,QAAQ,CAAC,MAAM,EAAE,QAAQ,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;QAEvF,MAAM,eAAe,CAAC,UAAU,EAAE;YAChC,YAAY,EAAE,CAAC;YACf,cAAc,EAAE,oBAAoB,CAAC,cAAc;YACnD,OAAO,EAAE,eAAe;SACzB,CAAC,CAAC;IACL,CAAC;CACF;AA9BD,wBA8BC;AAED,KAAK,UAAU,kBAAkB,CAAC,WAAwB;IACxD,MAAM,EAAE,WAAW,EAAE,GAAG,WAAW,CAAC,OAAO,CAAC;IAC5C,MAAM,EAAE,WAAW,EAAE,GAAG,WAAW,CAAC;IACpC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,CAAC,IAAI,KAAK,QAAQ,IAAI,CAAC,WAAW,EAAE;QACxE,MAAM,IAAI,iCAAyB,CACjC,6DAA6D,CAC9D,CAAC;KACH;IAED,IAAI,cAAc,IAAI,eAAQ,EAAE;QAC9B,MAAM,eAAQ,CAAC,cAAc,CAAC,CAAC;KAChC;IACD,MAAM,EAAE,gBAAgB,EAAE,GAAG,eAAQ,CAAC;IAEtC,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC;IAC3C,MAAM,mBAAmB,GAAG,WAAW,CAAC,mBAA0C,CAAC;IAEnF,MAAM,WAAW,GAAG,mBAAmB,CAAC,YAAY,IAAI,SAAS,CAAC;IAElE,MAAM,IAAI,GAAG,MAAM,iCAAiC,CAAC,WAAW,CAAC,IAAI,EAAE,mBAAmB,CAAC,CAAC;IAE5F,MAAM,WAAW,GAAG,EAAE,CAAC;IACvB,IAAI,QAAQ,IAAI,IAAI,EAAE;QACpB,iFAAiF;QACjF,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC,CAAC;KACpE;IAED,MAAM,OAAO,GAAG,mBAAmB,CAAC,YAAY,IAAI,IAAI,CAAC;IACzD,IAAI,GAAG,GAAG,GAAG,WAAW,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,OAAO,EAAE,CAAC;IAChF,IAAI,eAAe,IAAI,mBAAmB,EAAE;QAC1C,GAAG,GAAG,GAAG,GAAG,IAAI,mBAAmB,CAAC,aAAa,EAAE,CAAC;KACrD;IAED,OAAO,gBAAgB,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;AAC5C,CAAC;AAED,SAAS,SAAS,CAAC,OAAe;IAChC,OAAO;QACL,SAAS,EAAE,CAAC;QACZ,SAAS,EAAE,QAAQ;QACnB,OAAO;QACP,aAAa,EAAE,CAAC;KACR,CAAC;AACb,CAAC;AAED,SAAS,YAAY,CAAC,OAAe,EAAE,cAAsB;IAC3D,OAAO;QACL,YAAY,EAAE,CAAC;QACf,cAAc;QACd,OAAO;KACC,CAAC;AACb,CAAC;AAED,KAAK,UAAU,SAAS,CACtB,MAAsB,EACtB,OAAe,EACf,OAAe;IAEf,IAAI;QACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC5C,OAAO,QAAQ,IAAI,EAAE,CAAC;KACvB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,OAAO,KAAK,CAAC,EAAE;YACjB,iCAAiC;YACjC,MAAM,KAAK,CAAC;SACb;QACD,+CAA+C;QAC/C,OAAO,SAAS,CAAC,MAAM,EAAE,OAAO,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC;KAChD;AACH,CAAC;AAED,KAAK,UAAU,QAAQ,CAAC,MAAsB,EAAE,IAAY,EAAE,OAAe;IAC3E,oBAAoB;IACpB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IAC9C,OAAO,MAAM,CAAC,IAAI,CAAC,QAAQ,IAAI,EAAE,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;AAC/C,CAAC;AAEM,KAAK,UAAU,iCAAiC,CACrD,IAAY,EACZ,mBAAwC;IAExC,MAAM,IAAI,GAAG,mBAAmB,CAAC,sBAAsB,CAAC;IACxD,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,mCAA2B,CAAC,IAAI,EAAE;QACtD,OAAO,IAAI,CAAC;KACb;IAED,iCAAiC;IACjC,IACE,IAAI,KAAK,mCAA2B,CAAC,EAAE;QACvC,IAAI,KAAK,mCAA2B,CAAC,iBAAiB,EACtD;QACA,wCAAwC;QACxC,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI;YACF,kDAAkD;YAClD,MAAM,OAAO,GAAG,MAAM,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;YACvD,gEAAgE;YAChE,OAAO,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;SAC/C;QAAC,OAAO,KAAK,EAAE;YACd,wEAAwE;YACxE,iEAAiE;YACjE,SAAS;YACT,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;SAC3B;KACF;SAAM;QACL,oEAAoE;QACpE,sBAAsB;QACtB,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;KAC3B;AACH,CAAC;AAjCD,8EAiCC;AAEM,KAAK,UAAU,YAAY,CAAC,IAAY;IAC7C,mCAAmC;IACnC,IAAI;QACF,MAAM,OAAO,GAAG,MAAM,GAAG,CAAC,QAAQ,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;QACtD,iCAAiC;QACjC,OAAO,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;KAC/C;IAAC,MAAM;QACN,OAAO,IAAI,CAAC;KACb;AACH,CAAC;AATD,oCASC"}

170
node_modules/mongodb/lib/cmap/auth/mongo_credentials.js generated vendored Normal file
View File

@@ -0,0 +1,170 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoCredentials = exports.DEFAULT_ALLOWED_HOSTS = void 0;
const error_1 = require("../../error");
const gssapi_1 = require("./gssapi");
const providers_1 = require("./providers");
// https://github.com/mongodb/specifications/blob/master/source/auth/auth.rst
function getDefaultAuthMechanism(hello) {
if (hello) {
// If hello contains saslSupportedMechs, use scram-sha-256
// if it is available, else scram-sha-1
if (Array.isArray(hello.saslSupportedMechs)) {
return hello.saslSupportedMechs.includes(providers_1.AuthMechanism.MONGODB_SCRAM_SHA256)
? providers_1.AuthMechanism.MONGODB_SCRAM_SHA256
: providers_1.AuthMechanism.MONGODB_SCRAM_SHA1;
}
// Fallback to legacy selection method. If wire version >= 3, use scram-sha-1
if (hello.maxWireVersion >= 3) {
return providers_1.AuthMechanism.MONGODB_SCRAM_SHA1;
}
}
// Default for wireprotocol < 3
return providers_1.AuthMechanism.MONGODB_CR;
}
const ALLOWED_HOSTS_ERROR = 'Auth mechanism property ALLOWED_HOSTS must be an array of strings.';
/** @internal */
exports.DEFAULT_ALLOWED_HOSTS = [
'*.mongodb.net',
'*.mongodb-dev.net',
'*.mongodbgov.net',
'localhost',
'127.0.0.1',
'::1'
];
/**
* A representation of the credentials used by MongoDB
* @public
*/
class MongoCredentials {
constructor(options) {
this.username = options.username ?? '';
this.password = options.password;
this.source = options.source;
if (!this.source && options.db) {
this.source = options.db;
}
this.mechanism = options.mechanism || providers_1.AuthMechanism.MONGODB_DEFAULT;
this.mechanismProperties = options.mechanismProperties || {};
if (this.mechanism.match(/MONGODB-AWS/i)) {
if (!this.username && process.env.AWS_ACCESS_KEY_ID) {
this.username = process.env.AWS_ACCESS_KEY_ID;
}
if (!this.password && process.env.AWS_SECRET_ACCESS_KEY) {
this.password = process.env.AWS_SECRET_ACCESS_KEY;
}
if (this.mechanismProperties.AWS_SESSION_TOKEN == null &&
process.env.AWS_SESSION_TOKEN != null) {
this.mechanismProperties = {
...this.mechanismProperties,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN
};
}
}
if (this.mechanism === providers_1.AuthMechanism.MONGODB_OIDC && !this.mechanismProperties.ALLOWED_HOSTS) {
this.mechanismProperties = {
...this.mechanismProperties,
ALLOWED_HOSTS: exports.DEFAULT_ALLOWED_HOSTS
};
}
Object.freeze(this.mechanismProperties);
Object.freeze(this);
}
/** Determines if two MongoCredentials objects are equivalent */
equals(other) {
return (this.mechanism === other.mechanism &&
this.username === other.username &&
this.password === other.password &&
this.source === other.source);
}
/**
* If the authentication mechanism is set to "default", resolves the authMechanism
* based on the server version and server supported sasl mechanisms.
*
* @param hello - A hello response from the server
*/
resolveAuthMechanism(hello) {
// If the mechanism is not "default", then it does not need to be resolved
if (this.mechanism.match(/DEFAULT/i)) {
return new MongoCredentials({
username: this.username,
password: this.password,
source: this.source,
mechanism: getDefaultAuthMechanism(hello),
mechanismProperties: this.mechanismProperties
});
}
return this;
}
validate() {
if ((this.mechanism === providers_1.AuthMechanism.MONGODB_GSSAPI ||
this.mechanism === providers_1.AuthMechanism.MONGODB_CR ||
this.mechanism === providers_1.AuthMechanism.MONGODB_PLAIN ||
this.mechanism === providers_1.AuthMechanism.MONGODB_SCRAM_SHA1 ||
this.mechanism === providers_1.AuthMechanism.MONGODB_SCRAM_SHA256) &&
!this.username) {
throw new error_1.MongoMissingCredentialsError(`Username required for mechanism '${this.mechanism}'`);
}
if (this.mechanism === providers_1.AuthMechanism.MONGODB_OIDC) {
if (this.username && this.mechanismProperties.PROVIDER_NAME) {
throw new error_1.MongoInvalidArgumentError(`username and PROVIDER_NAME may not be used together for mechanism '${this.mechanism}'.`);
}
if (this.mechanismProperties.PROVIDER_NAME &&
this.mechanismProperties.PROVIDER_NAME !== 'aws') {
throw new error_1.MongoInvalidArgumentError(`Currently only a PROVIDER_NAME of 'aws' is supported for mechanism '${this.mechanism}'.`);
}
if (this.mechanismProperties.REFRESH_TOKEN_CALLBACK &&
!this.mechanismProperties.REQUEST_TOKEN_CALLBACK) {
throw new error_1.MongoInvalidArgumentError(`A REQUEST_TOKEN_CALLBACK must be provided when using a REFRESH_TOKEN_CALLBACK for mechanism '${this.mechanism}'`);
}
if (!this.mechanismProperties.PROVIDER_NAME &&
!this.mechanismProperties.REQUEST_TOKEN_CALLBACK) {
throw new error_1.MongoInvalidArgumentError(`Either a PROVIDER_NAME or a REQUEST_TOKEN_CALLBACK must be specified for mechanism '${this.mechanism}'.`);
}
if (this.mechanismProperties.ALLOWED_HOSTS) {
const hosts = this.mechanismProperties.ALLOWED_HOSTS;
if (!Array.isArray(hosts)) {
throw new error_1.MongoInvalidArgumentError(ALLOWED_HOSTS_ERROR);
}
for (const host of hosts) {
if (typeof host !== 'string') {
throw new error_1.MongoInvalidArgumentError(ALLOWED_HOSTS_ERROR);
}
}
}
}
if (providers_1.AUTH_MECHS_AUTH_SRC_EXTERNAL.has(this.mechanism)) {
if (this.source != null && this.source !== '$external') {
// TODO(NODE-3485): Replace this with a MongoAuthValidationError
throw new error_1.MongoAPIError(`Invalid source '${this.source}' for mechanism '${this.mechanism}' specified.`);
}
}
if (this.mechanism === providers_1.AuthMechanism.MONGODB_PLAIN && this.source == null) {
// TODO(NODE-3485): Replace this with a MongoAuthValidationError
throw new error_1.MongoAPIError('PLAIN Authentication Mechanism needs an auth source');
}
if (this.mechanism === providers_1.AuthMechanism.MONGODB_X509 && this.password != null) {
if (this.password === '') {
Reflect.set(this, 'password', undefined);
return;
}
// TODO(NODE-3485): Replace this with a MongoAuthValidationError
throw new error_1.MongoAPIError(`Password not allowed for mechanism MONGODB-X509`);
}
const canonicalization = this.mechanismProperties.CANONICALIZE_HOST_NAME ?? false;
if (!Object.values(gssapi_1.GSSAPICanonicalizationValue).includes(canonicalization)) {
throw new error_1.MongoAPIError(`Invalid CANONICALIZE_HOST_NAME value: ${canonicalization}`);
}
}
static merge(creds, options) {
return new MongoCredentials({
username: options.username ?? creds?.username ?? '',
password: options.password ?? creds?.password ?? '',
mechanism: options.mechanism ?? creds?.mechanism ?? providers_1.AuthMechanism.MONGODB_DEFAULT,
mechanismProperties: options.mechanismProperties ?? creds?.mechanismProperties ?? {},
source: options.source ?? options.db ?? creds?.source ?? 'admin'
});
}
}
exports.MongoCredentials = MongoCredentials;
//# sourceMappingURL=mongo_credentials.js.map

File diff suppressed because one or more lines are too long

35
node_modules/mongodb/lib/cmap/auth/mongocr.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoCR = void 0;
const crypto = require("crypto");
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const auth_provider_1 = require("./auth_provider");
class MongoCR extends auth_provider_1.AuthProvider {
async auth(authContext) {
const { connection, credentials } = authContext;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
const { username, password, source } = credentials;
const { nonce } = await connection.commandAsync((0, utils_1.ns)(`${source}.$cmd`), { getnonce: 1 }, undefined);
const hashPassword = crypto
.createHash('md5')
.update(`${username}:mongo:${password}`, 'utf8')
.digest('hex');
// Final key
const key = crypto
.createHash('md5')
.update(`${nonce}${username}${hashPassword}`, 'utf8')
.digest('hex');
const authenticateCommand = {
authenticate: 1,
user: username,
nonce,
key
};
await connection.commandAsync((0, utils_1.ns)(`${source}.$cmd`), authenticateCommand, undefined);
}
}
exports.MongoCR = MongoCR;
//# sourceMappingURL=mongocr.js.map

1
node_modules/mongodb/lib/cmap/auth/mongocr.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"mongocr.js","sourceRoot":"","sources":["../../../src/cmap/auth/mongocr.ts"],"names":[],"mappings":";;;AAAA,iCAAiC;AAEjC,uCAA2D;AAC3D,uCAAiC;AACjC,mDAA4D;AAE5D,MAAa,OAAQ,SAAQ,4BAAY;IAC9B,KAAK,CAAC,IAAI,CAAC,WAAwB;QAC1C,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,GAAG,WAAW,CAAC;QAChD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,oCAA4B,CAAC,uCAAuC,CAAC,CAAC;SACjF;QAED,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,WAAW,CAAC;QAEnD,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,UAAU,CAAC,YAAY,CAC7C,IAAA,UAAE,EAAC,GAAG,MAAM,OAAO,CAAC,EACpB,EAAE,QAAQ,EAAE,CAAC,EAAE,EACf,SAAS,CACV,CAAC;QAEF,MAAM,YAAY,GAAG,MAAM;aACxB,UAAU,CAAC,KAAK,CAAC;aACjB,MAAM,CAAC,GAAG,QAAQ,UAAU,QAAQ,EAAE,EAAE,MAAM,CAAC;aAC/C,MAAM,CAAC,KAAK,CAAC,CAAC;QAEjB,YAAY;QACZ,MAAM,GAAG,GAAG,MAAM;aACf,UAAU,CAAC,KAAK,CAAC;aACjB,MAAM,CAAC,GAAG,KAAK,GAAG,QAAQ,GAAG,YAAY,EAAE,EAAE,MAAM,CAAC;aACpD,MAAM,CAAC,KAAK,CAAC,CAAC;QAEjB,MAAM,mBAAmB,GAAG;YAC1B,YAAY,EAAE,CAAC;YACf,IAAI,EAAE,QAAQ;YACd,KAAK;YACL,GAAG;SACJ,CAAC;QAEF,MAAM,UAAU,CAAC,YAAY,CAAC,IAAA,UAAE,EAAC,GAAG,MAAM,OAAO,CAAC,EAAE,mBAAmB,EAAE,SAAS,CAAC,CAAC;IACtF,CAAC;CACF;AAnCD,0BAmCC"}

223
node_modules/mongodb/lib/cmap/auth/mongodb_aws.js generated vendored Normal file
View File

@@ -0,0 +1,223 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoDBAWS = void 0;
const crypto = require("crypto");
const http = require("http");
const url = require("url");
const util_1 = require("util");
const BSON = require("../../bson");
const deps_1 = require("../../deps");
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const auth_provider_1 = require("./auth_provider");
const mongo_credentials_1 = require("./mongo_credentials");
const providers_1 = require("./providers");
const ASCII_N = 110;
const AWS_RELATIVE_URI = 'http://169.254.170.2';
const AWS_EC2_URI = 'http://169.254.169.254';
const AWS_EC2_PATH = '/latest/meta-data/iam/security-credentials';
const bsonOptions = {
useBigInt64: false,
promoteLongs: true,
promoteValues: true,
promoteBuffers: false,
bsonRegExp: false
};
class MongoDBAWS extends auth_provider_1.AuthProvider {
constructor() {
super();
this.randomBytesAsync = (0, util_1.promisify)(crypto.randomBytes);
}
async auth(authContext) {
const { connection } = authContext;
if (!authContext.credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
if ('kModuleError' in deps_1.aws4) {
throw deps_1.aws4['kModuleError'];
}
const { sign } = deps_1.aws4;
if ((0, utils_1.maxWireVersion)(connection) < 9) {
throw new error_1.MongoCompatibilityError('MONGODB-AWS authentication requires MongoDB version 4.4 or later');
}
if (!authContext.credentials.username) {
authContext.credentials = await makeTempCredentials(authContext.credentials);
}
const { credentials } = authContext;
const accessKeyId = credentials.username;
const secretAccessKey = credentials.password;
const sessionToken = credentials.mechanismProperties.AWS_SESSION_TOKEN;
// If all three defined, include sessionToken, else include username and pass, else no credentials
const awsCredentials = accessKeyId && secretAccessKey && sessionToken
? { accessKeyId, secretAccessKey, sessionToken }
: accessKeyId && secretAccessKey
? { accessKeyId, secretAccessKey }
: undefined;
const db = credentials.source;
const nonce = await this.randomBytesAsync(32);
const saslStart = {
saslStart: 1,
mechanism: 'MONGODB-AWS',
payload: BSON.serialize({ r: nonce, p: ASCII_N }, bsonOptions)
};
const saslStartResponse = await connection.commandAsync((0, utils_1.ns)(`${db}.$cmd`), saslStart, undefined);
const serverResponse = BSON.deserialize(saslStartResponse.payload.buffer, bsonOptions);
const host = serverResponse.h;
const serverNonce = serverResponse.s.buffer;
if (serverNonce.length !== 64) {
// TODO(NODE-3483)
throw new error_1.MongoRuntimeError(`Invalid server nonce length ${serverNonce.length}, expected 64`);
}
if (!utils_1.ByteUtils.equals(serverNonce.subarray(0, nonce.byteLength), nonce)) {
// throw because the serverNonce's leading 32 bytes must equal the client nonce's 32 bytes
// https://github.com/mongodb/specifications/blob/875446db44aade414011731840831f38a6c668df/source/auth/auth.rst#id11
// TODO(NODE-3483)
throw new error_1.MongoRuntimeError('Server nonce does not begin with client nonce');
}
if (host.length < 1 || host.length > 255 || host.indexOf('..') !== -1) {
// TODO(NODE-3483)
throw new error_1.MongoRuntimeError(`Server returned an invalid host: "${host}"`);
}
const body = 'Action=GetCallerIdentity&Version=2011-06-15';
const options = sign({
method: 'POST',
host,
region: deriveRegion(serverResponse.h),
service: 'sts',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': body.length,
'X-MongoDB-Server-Nonce': utils_1.ByteUtils.toBase64(serverNonce),
'X-MongoDB-GS2-CB-Flag': 'n'
},
path: '/',
body
}, awsCredentials);
const payload = {
a: options.headers.Authorization,
d: options.headers['X-Amz-Date']
};
if (sessionToken) {
payload.t = sessionToken;
}
const saslContinue = {
saslContinue: 1,
conversationId: 1,
payload: BSON.serialize(payload, bsonOptions)
};
await connection.commandAsync((0, utils_1.ns)(`${db}.$cmd`), saslContinue, undefined);
}
}
exports.MongoDBAWS = MongoDBAWS;
async function makeTempCredentials(credentials) {
function makeMongoCredentialsFromAWSTemp(creds) {
if (!creds.AccessKeyId || !creds.SecretAccessKey || !creds.Token) {
throw new error_1.MongoMissingCredentialsError('Could not obtain temporary MONGODB-AWS credentials');
}
return new mongo_credentials_1.MongoCredentials({
username: creds.AccessKeyId,
password: creds.SecretAccessKey,
source: credentials.source,
mechanism: providers_1.AuthMechanism.MONGODB_AWS,
mechanismProperties: {
AWS_SESSION_TOKEN: creds.Token
}
});
}
const credentialProvider = (0, deps_1.getAwsCredentialProvider)();
// Check if the AWS credential provider from the SDK is present. If not,
// use the old method.
if ('kModuleError' in credentialProvider) {
// If the environment variable AWS_CONTAINER_CREDENTIALS_RELATIVE_URI
// is set then drivers MUST assume that it was set by an AWS ECS agent
if (process.env.AWS_CONTAINER_CREDENTIALS_RELATIVE_URI) {
return makeMongoCredentialsFromAWSTemp(await request(`${AWS_RELATIVE_URI}${process.env.AWS_CONTAINER_CREDENTIALS_RELATIVE_URI}`));
}
// Otherwise assume we are on an EC2 instance
// get a token
const token = await request(`${AWS_EC2_URI}/latest/api/token`, {
method: 'PUT',
json: false,
headers: { 'X-aws-ec2-metadata-token-ttl-seconds': 30 }
});
// get role name
const roleName = await request(`${AWS_EC2_URI}/${AWS_EC2_PATH}`, {
json: false,
headers: { 'X-aws-ec2-metadata-token': token }
});
// get temp credentials
const creds = await request(`${AWS_EC2_URI}/${AWS_EC2_PATH}/${roleName}`, {
headers: { 'X-aws-ec2-metadata-token': token }
});
return makeMongoCredentialsFromAWSTemp(creds);
}
else {
/*
* Creates a credential provider that will attempt to find credentials from the
* following sources (listed in order of precedence):
*
* - Environment variables exposed via process.env
* - SSO credentials from token cache
* - Web identity token credentials
* - Shared credentials and config ini files
* - The EC2/ECS Instance Metadata Service
*/
const { fromNodeProviderChain } = credentialProvider;
const provider = fromNodeProviderChain();
try {
const creds = await provider();
return makeMongoCredentialsFromAWSTemp({
AccessKeyId: creds.accessKeyId,
SecretAccessKey: creds.secretAccessKey,
Token: creds.sessionToken,
Expiration: creds.expiration
});
}
catch (error) {
throw new error_1.MongoAWSError(error.message);
}
}
}
function deriveRegion(host) {
const parts = host.split('.');
if (parts.length === 1 || parts[1] === 'amazonaws') {
return 'us-east-1';
}
return parts[1];
}
async function request(uri, options = {}) {
return new Promise((resolve, reject) => {
const requestOptions = {
method: 'GET',
timeout: 10000,
json: true,
...url.parse(uri),
...options
};
const req = http.request(requestOptions, res => {
res.setEncoding('utf8');
let data = '';
res.on('data', d => {
data += d;
});
res.once('end', () => {
if (options.json === false) {
resolve(data);
return;
}
try {
const parsed = JSON.parse(data);
resolve(parsed);
}
catch {
// TODO(NODE-3483)
reject(new error_1.MongoRuntimeError(`Invalid JSON response: "${data}"`));
}
});
});
req.once('timeout', () => req.destroy(new error_1.MongoAWSError(`AWS request to ${uri} timed out after ${options.timeout} ms`)));
req.once('error', error => reject(error));
req.end();
});
}
//# sourceMappingURL=mongodb_aws.js.map

File diff suppressed because one or more lines are too long

66
node_modules/mongodb/lib/cmap/auth/mongodb_oidc.js generated vendored Normal file
View File

@@ -0,0 +1,66 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoDBOIDC = exports.OIDC_WORKFLOWS = void 0;
const error_1 = require("../../error");
const auth_provider_1 = require("./auth_provider");
const aws_service_workflow_1 = require("./mongodb_oidc/aws_service_workflow");
const callback_workflow_1 = require("./mongodb_oidc/callback_workflow");
/** Error when credentials are missing. */
const MISSING_CREDENTIALS_ERROR = 'AuthContext must provide credentials.';
/** @internal */
exports.OIDC_WORKFLOWS = new Map();
exports.OIDC_WORKFLOWS.set('callback', new callback_workflow_1.CallbackWorkflow());
exports.OIDC_WORKFLOWS.set('aws', new aws_service_workflow_1.AwsServiceWorkflow());
/**
* OIDC auth provider.
* @experimental
*/
class MongoDBOIDC extends auth_provider_1.AuthProvider {
/**
* Instantiate the auth provider.
*/
constructor() {
super();
}
/**
* Authenticate using OIDC
*/
async auth(authContext) {
const { connection, reauthenticating, response } = authContext;
const credentials = getCredentials(authContext);
const workflow = getWorkflow(credentials);
await workflow.execute(connection, credentials, reauthenticating, response);
}
/**
* Add the speculative auth for the initial handshake.
*/
async prepare(handshakeDoc, authContext) {
const credentials = getCredentials(authContext);
const workflow = getWorkflow(credentials);
const result = await workflow.speculativeAuth(credentials);
return { ...handshakeDoc, ...result };
}
}
exports.MongoDBOIDC = MongoDBOIDC;
/**
* Get credentials from the auth context, throwing if they do not exist.
*/
function getCredentials(authContext) {
const { credentials } = authContext;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError(MISSING_CREDENTIALS_ERROR);
}
return credentials;
}
/**
* Gets either a device workflow or callback workflow.
*/
function getWorkflow(credentials) {
const providerName = credentials.mechanismProperties.PROVIDER_NAME;
const workflow = exports.OIDC_WORKFLOWS.get(providerName || 'callback');
if (!workflow) {
throw new error_1.MongoInvalidArgumentError(`Could not load workflow for provider ${credentials.mechanismProperties.PROVIDER_NAME}`);
}
return workflow;
}
//# sourceMappingURL=mongodb_oidc.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"mongodb_oidc.js","sourceRoot":"","sources":["../../../src/cmap/auth/mongodb_oidc.ts"],"names":[],"mappings":";;;AAEA,uCAAsF;AAGtF,mDAA4D;AAE5D,8EAAyE;AACzE,wEAAoE;AAEpE,0CAA0C;AAC1C,MAAM,yBAAyB,GAAG,uCAAuC,CAAC;AAuE1E,gBAAgB;AACH,QAAA,cAAc,GAAgC,IAAI,GAAG,EAAE,CAAC;AACrE,sBAAc,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,oCAAgB,EAAE,CAAC,CAAC;AACvD,sBAAc,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,yCAAkB,EAAE,CAAC,CAAC;AAEpD;;;GAGG;AACH,MAAa,WAAY,SAAQ,4BAAY;IAC3C;;OAEG;IACH;QACE,KAAK,EAAE,CAAC;IACV,CAAC;IAED;;OAEG;IACM,KAAK,CAAC,IAAI,CAAC,WAAwB;QAC1C,MAAM,EAAE,UAAU,EAAE,gBAAgB,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC;QAC/D,MAAM,WAAW,GAAG,cAAc,CAAC,WAAW,CAAC,CAAC;QAChD,MAAM,QAAQ,GAAG,WAAW,CAAC,WAAW,CAAC,CAAC;QAC1C,MAAM,QAAQ,CAAC,OAAO,CAAC,UAAU,EAAE,WAAW,EAAE,gBAAgB,EAAE,QAAQ,CAAC,CAAC;IAC9E,CAAC;IAED;;OAEG;IACM,KAAK,CAAC,OAAO,CACpB,YAA+B,EAC/B,WAAwB;QAExB,MAAM,WAAW,GAAG,cAAc,CAAC,WAAW,CAAC,CAAC;QAChD,MAAM,QAAQ,GAAG,WAAW,CAAC,WAAW,CAAC,CAAC;QAC1C,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAC3D,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,MAAM,EAAE,CAAC;IACxC,CAAC;CACF;AA9BD,kCA8BC;AAED;;GAEG;AACH,SAAS,cAAc,CAAC,WAAwB;IAC9C,MAAM,EAAE,WAAW,EAAE,GAAG,WAAW,CAAC;IACpC,IAAI,CAAC,WAAW,EAAE;QAChB,MAAM,IAAI,oCAA4B,CAAC,yBAAyB,CAAC,CAAC;KACnE;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,SAAS,WAAW,CAAC,WAA6B;IAChD,MAAM,YAAY,GAAG,WAAW,CAAC,mBAAmB,CAAC,aAAa,CAAC;IACnE,MAAM,QAAQ,GAAG,sBAAc,CAAC,GAAG,CAAC,YAAY,IAAI,UAAU,CAAC,CAAC;IAChE,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,iCAAyB,CACjC,wCAAwC,WAAW,CAAC,mBAAmB,CAAC,aAAa,EAAE,CACxF,CAAC;KACH;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC"}

View File

@@ -0,0 +1,30 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AwsServiceWorkflow = void 0;
const fs = require("fs");
const error_1 = require("../../../error");
const service_workflow_1 = require("./service_workflow");
/** Error for when the token is missing in the environment. */
const TOKEN_MISSING_ERROR = 'AWS_WEB_IDENTITY_TOKEN_FILE must be set in the environment.';
/**
* Device workflow implementation for AWS.
*
* @internal
*/
class AwsServiceWorkflow extends service_workflow_1.ServiceWorkflow {
constructor() {
super();
}
/**
* Get the token from the environment.
*/
async getToken() {
const tokenFile = process.env.AWS_WEB_IDENTITY_TOKEN_FILE;
if (!tokenFile) {
throw new error_1.MongoAWSError(TOKEN_MISSING_ERROR);
}
return fs.promises.readFile(tokenFile, 'utf8');
}
}
exports.AwsServiceWorkflow = AwsServiceWorkflow;
//# sourceMappingURL=aws_service_workflow.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"aws_service_workflow.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/aws_service_workflow.ts"],"names":[],"mappings":";;;AAAA,yBAAyB;AAEzB,0CAA+C;AAC/C,yDAAqD;AAErD,8DAA8D;AAC9D,MAAM,mBAAmB,GAAG,6DAA6D,CAAC;AAE1F;;;;GAIG;AACH,MAAa,kBAAmB,SAAQ,kCAAe;IACrD;QACE,KAAK,EAAE,CAAC;IACV,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAQ;QACZ,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC;QAC1D,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,qBAAa,CAAC,mBAAmB,CAAC,CAAC;SAC9C;QACD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACjD,CAAC;CACF;AAfD,gDAeC"}

View File

@@ -0,0 +1,28 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Cache = void 0;
/**
* Base class for OIDC caches.
*/
class Cache {
/**
* Create a new cache.
*/
constructor() {
this.entries = new Map();
}
/**
* Clear the cache.
*/
clear() {
this.entries.clear();
}
/**
* Create a cache key from the address and username.
*/
cacheKey(address, username, callbackHash) {
return JSON.stringify([address, username, callbackHash]);
}
}
exports.Cache = Cache;
//# sourceMappingURL=cache.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"cache.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/cache.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,MAAsB,KAAK;IAGzB;;OAEG;IACH;QACE,IAAI,CAAC,OAAO,GAAG,IAAI,GAAG,EAAa,CAAC;IACtC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;IACvB,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,OAAe,EAAE,QAAgB,EAAE,YAAoB;QAC9D,OAAO,IAAI,CAAC,SAAS,CAAC,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AAvBD,sBAuBC"}

View File

@@ -0,0 +1,83 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CallbackLockCache = void 0;
const error_1 = require("../../../error");
const cache_1 = require("./cache");
/** Error message for when request callback is missing. */
const REQUEST_CALLBACK_REQUIRED_ERROR = 'Auth mechanism property REQUEST_TOKEN_CALLBACK is required.';
/* Counter for function "hashes".*/
let FN_HASH_COUNTER = 0;
/* No function present function */
const NO_FUNCTION = async () => ({ accessToken: 'test' });
/* The map of function hashes */
const FN_HASHES = new WeakMap();
/* Put the no function hash in the map. */
FN_HASHES.set(NO_FUNCTION, FN_HASH_COUNTER);
/**
* A cache of request and refresh callbacks per server/user.
*/
class CallbackLockCache extends cache_1.Cache {
/**
* Get the callbacks for the connection and credentials. If an entry does not
* exist a new one will get set.
*/
getCallbacks(connection, credentials) {
const requestCallback = credentials.mechanismProperties.REQUEST_TOKEN_CALLBACK;
const refreshCallback = credentials.mechanismProperties.REFRESH_TOKEN_CALLBACK;
if (!requestCallback) {
throw new error_1.MongoInvalidArgumentError(REQUEST_CALLBACK_REQUIRED_ERROR);
}
const callbackHash = hashFunctions(requestCallback, refreshCallback);
const key = this.cacheKey(connection.address, credentials.username, callbackHash);
const entry = this.entries.get(key);
if (entry) {
return entry;
}
return this.setCallbacks(key, callbackHash, requestCallback, refreshCallback);
}
/**
* Set locked callbacks on for connection and credentials.
*/
setCallbacks(key, callbackHash, requestCallback, refreshCallback) {
const entry = {
requestCallback: withLock(requestCallback),
refreshCallback: refreshCallback ? withLock(refreshCallback) : undefined,
callbackHash: callbackHash
};
this.entries.set(key, entry);
return entry;
}
}
exports.CallbackLockCache = CallbackLockCache;
/**
* Ensure the callback is only executed one at a time.
*/
function withLock(callback) {
let lock = Promise.resolve();
return async (info, context) => {
await lock;
lock = lock.then(() => callback(info, context));
return lock;
};
}
/**
* Get the hash string for the request and refresh functions.
*/
function hashFunctions(requestFn, refreshFn) {
let requestHash = FN_HASHES.get(requestFn);
let refreshHash = FN_HASHES.get(refreshFn ?? NO_FUNCTION);
if (requestHash == null) {
// Create a new one for the function and put it in the map.
FN_HASH_COUNTER++;
requestHash = FN_HASH_COUNTER;
FN_HASHES.set(requestFn, FN_HASH_COUNTER);
}
if (refreshHash == null && refreshFn) {
// Create a new one for the function and put it in the map.
FN_HASH_COUNTER++;
refreshHash = FN_HASH_COUNTER;
FN_HASHES.set(refreshFn, FN_HASH_COUNTER);
}
return `${requestHash}-${refreshHash}`;
}
//# sourceMappingURL=callback_lock_cache.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"callback_lock_cache.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/callback_lock_cache.ts"],"names":[],"mappings":";;;AAAA,0CAA2D;AAU3D,mCAAgC;AAEhC,0DAA0D;AAC1D,MAAM,+BAA+B,GACnC,6DAA6D,CAAC;AAChE,mCAAmC;AACnC,IAAI,eAAe,GAAG,CAAC,CAAC;AACxB,kCAAkC;AAClC,MAAM,WAAW,GAAwB,KAAK,IAAI,EAAE,CAAC,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;AAC/E,gCAAgC;AAChC,MAAM,SAAS,GAAG,IAAI,OAAO,EAAqD,CAAC;AACnF,0CAA0C;AAC1C,SAAS,CAAC,GAAG,CAAC,WAAW,EAAE,eAAe,CAAC,CAAC;AAW5C;;GAEG;AACH,MAAa,iBAAkB,SAAQ,aAAqB;IAC1D;;;OAGG;IACH,YAAY,CAAC,UAAsB,EAAE,WAA6B;QAChE,MAAM,eAAe,GAAG,WAAW,CAAC,mBAAmB,CAAC,sBAAsB,CAAC;QAC/E,MAAM,eAAe,GAAG,WAAW,CAAC,mBAAmB,CAAC,sBAAsB,CAAC;QAC/E,IAAI,CAAC,eAAe,EAAE;YACpB,MAAM,IAAI,iCAAyB,CAAC,+BAA+B,CAAC,CAAC;SACtE;QACD,MAAM,YAAY,GAAG,aAAa,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;QACrE,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;QAClF,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACpC,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAC;SACd;QACD,OAAO,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,YAAY,EAAE,eAAe,EAAE,eAAe,CAAC,CAAC;IAChF,CAAC;IAED;;OAEG;IACK,YAAY,CAClB,GAAW,EACX,YAAoB,EACpB,eAAoC,EACpC,eAAqC;QAErC,MAAM,KAAK,GAAG;YACZ,eAAe,EAAE,QAAQ,CAAC,eAAe,CAAC;YAC1C,eAAe,EAAE,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,SAAS;YACxE,YAAY,EAAE,YAAY;SAC3B,CAAC;QACF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC7B,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AArCD,8CAqCC;AAED;;GAEG;AACH,SAAS,QAAQ,CAAC,QAAmD;IACnE,IAAI,IAAI,GAAiB,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3C,OAAO,KAAK,EAAE,IAAmB,EAAE,OAA4B,EAA8B,EAAE;QAC7F,MAAM,IAAI,CAAC;QACX,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAA8B,EAAE,SAA+B;IACpF,IAAI,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;IAC3C,IAAI,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,IAAI,WAAW,CAAC,CAAC;IAC1D,IAAI,WAAW,IAAI,IAAI,EAAE;QACvB,2DAA2D;QAC3D,eAAe,EAAE,CAAC;QAClB,WAAW,GAAG,eAAe,CAAC;QAC9B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;KAC3C;IACD,IAAI,WAAW,IAAI,IAAI,IAAI,SAAS,EAAE;QACpC,2DAA2D;QAC3D,eAAe,EAAE,CAAC;QAClB,WAAW,GAAG,eAAe,CAAC;QAC9B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;KAC3C;IACD,OAAO,GAAG,WAAW,IAAI,WAAW,EAAE,CAAC;AACzC,CAAC"}

View File

@@ -0,0 +1,204 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CallbackWorkflow = void 0;
const bson_1 = require("bson");
const error_1 = require("../../../error");
const utils_1 = require("../../../utils");
const providers_1 = require("../providers");
const callback_lock_cache_1 = require("./callback_lock_cache");
const token_entry_cache_1 = require("./token_entry_cache");
/** The current version of OIDC implementation. */
const OIDC_VERSION = 0;
/** 5 minutes in seconds */
const TIMEOUT_S = 300;
/** Properties allowed on results of callbacks. */
const RESULT_PROPERTIES = ['accessToken', 'expiresInSeconds', 'refreshToken'];
/** Error message when the callback result is invalid. */
const CALLBACK_RESULT_ERROR = 'User provided OIDC callbacks must return a valid object with an accessToken.';
/**
* OIDC implementation of a callback based workflow.
* @internal
*/
class CallbackWorkflow {
/**
* Instantiate the workflow
*/
constructor() {
this.cache = new token_entry_cache_1.TokenEntryCache();
this.callbackCache = new callback_lock_cache_1.CallbackLockCache();
}
/**
* Get the document to add for speculative authentication. This also needs
* to add a db field from the credentials source.
*/
async speculativeAuth(credentials) {
const document = startCommandDocument(credentials);
document.db = credentials.source;
return { speculativeAuthenticate: document };
}
/**
* Execute the OIDC callback workflow.
*/
async execute(connection, credentials, reauthenticating, response) {
// Get the callbacks with locks from the callback lock cache.
const { requestCallback, refreshCallback, callbackHash } = this.callbackCache.getCallbacks(connection, credentials);
// Look for an existing entry in the cache.
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
let result;
if (entry) {
// Reauthentication cannot use a token from the cache since the server has
// stated it is invalid by the request for reauthentication.
if (entry.isValid() && !reauthenticating) {
// Presence of a valid cache entry means we can skip to the finishing step.
result = await this.finishAuthentication(connection, credentials, entry.tokenResult, response?.speculativeAuthenticate?.conversationId);
}
else {
// Presence of an expired cache entry means we must fetch a new one and
// then execute the final step.
const tokenResult = await this.fetchAccessToken(connection, credentials, entry.serverInfo, reauthenticating, callbackHash, requestCallback, refreshCallback);
try {
result = await this.finishAuthentication(connection, credentials, tokenResult, reauthenticating ? undefined : response?.speculativeAuthenticate?.conversationId);
}
catch (error) {
// If we are reauthenticating and this errors with reauthentication
// required, we need to do the entire process over again and clear
// the cache entry.
if (reauthenticating &&
error instanceof error_1.MongoError &&
error.code === error_1.MONGODB_ERROR_CODES.Reauthenticate) {
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
result = await this.execute(connection, credentials, reauthenticating);
}
else {
throw error;
}
}
}
}
else {
// No entry in the cache requires us to do all authentication steps
// from start to finish, including getting a fresh token for the cache.
const startDocument = await this.startAuthentication(connection, credentials, reauthenticating, response);
const conversationId = startDocument.conversationId;
const serverResult = bson_1.BSON.deserialize(startDocument.payload.buffer);
const tokenResult = await this.fetchAccessToken(connection, credentials, serverResult, reauthenticating, callbackHash, requestCallback, refreshCallback);
result = await this.finishAuthentication(connection, credentials, tokenResult, conversationId);
}
return result;
}
/**
* Starts the callback authentication process. If there is a speculative
* authentication document from the initial handshake, then we will use that
* value to get the issuer, otherwise we will send the saslStart command.
*/
async startAuthentication(connection, credentials, reauthenticating, response) {
let result;
if (!reauthenticating && response?.speculativeAuthenticate) {
result = response.speculativeAuthenticate;
}
else {
result = await connection.commandAsync((0, utils_1.ns)(credentials.source), startCommandDocument(credentials), undefined);
}
return result;
}
/**
* Finishes the callback authentication process.
*/
async finishAuthentication(connection, credentials, tokenResult, conversationId) {
const result = await connection.commandAsync((0, utils_1.ns)(credentials.source), finishCommandDocument(tokenResult.accessToken, conversationId), undefined);
return result;
}
/**
* Fetches an access token using either the request or refresh callbacks and
* puts it in the cache.
*/
async fetchAccessToken(connection, credentials, serverInfo, reauthenticating, callbackHash, requestCallback, refreshCallback) {
// Get the token from the cache.
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
let result;
const context = { timeoutSeconds: TIMEOUT_S, version: OIDC_VERSION };
// Check if there's a token in the cache.
if (entry) {
// If the cache entry is valid, return the token result.
if (entry.isValid() && !reauthenticating) {
return entry.tokenResult;
}
// If the cache entry is not valid, remove it from the cache and first attempt
// to use the refresh callback to get a new token. If no refresh callback
// exists, then fallback to the request callback.
if (refreshCallback) {
context.refreshToken = entry.tokenResult.refreshToken;
result = await refreshCallback(serverInfo, context);
}
else {
result = await requestCallback(serverInfo, context);
}
}
else {
// With no token in the cache we use the request callback.
result = await requestCallback(serverInfo, context);
}
// Validate that the result returned by the callback is acceptable. If it is not
// we must clear the token result from the cache.
if (isCallbackResultInvalid(result)) {
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
throw new error_1.MongoMissingCredentialsError(CALLBACK_RESULT_ERROR);
}
// Cleanup the cache.
this.cache.deleteExpiredEntries();
// Put the new entry into the cache.
this.cache.addEntry(connection.address, credentials.username || '', callbackHash, result, serverInfo);
return result;
}
}
exports.CallbackWorkflow = CallbackWorkflow;
/**
* Generate the finishing command document for authentication. Will be a
* saslStart or saslContinue depending on the presence of a conversation id.
*/
function finishCommandDocument(token, conversationId) {
if (conversationId != null && typeof conversationId === 'number') {
return {
saslContinue: 1,
conversationId: conversationId,
payload: new bson_1.Binary(bson_1.BSON.serialize({ jwt: token }))
};
}
// saslContinue requires a conversationId in the command to be valid so in this
// case the server allows "step two" to actually be a saslStart with the token
// as the jwt since the use of the cached value has no correlating conversating
// on the particular connection.
return {
saslStart: 1,
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
payload: new bson_1.Binary(bson_1.BSON.serialize({ jwt: token }))
};
}
/**
* Determines if a result returned from a request or refresh callback
* function is invalid. This means the result is nullish, doesn't contain
* the accessToken required field, and does not contain extra fields.
*/
function isCallbackResultInvalid(tokenResult) {
if (tokenResult == null || typeof tokenResult !== 'object')
return true;
if (!('accessToken' in tokenResult))
return true;
return !Object.getOwnPropertyNames(tokenResult).every(prop => RESULT_PROPERTIES.includes(prop));
}
/**
* Generate the saslStart command document.
*/
function startCommandDocument(credentials) {
const payload = {};
if (credentials.username) {
payload.n = credentials.username;
}
return {
saslStart: 1,
autoAuthorize: 1,
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
payload: new bson_1.Binary(bson_1.BSON.serialize(payload))
};
}
//# sourceMappingURL=callback_workflow.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,43 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.commandDocument = exports.ServiceWorkflow = void 0;
const bson_1 = require("bson");
const utils_1 = require("../../../utils");
const providers_1 = require("../providers");
/**
* Common behaviour for OIDC device workflows.
* @internal
*/
class ServiceWorkflow {
/**
* Execute the workflow. Looks for AWS_WEB_IDENTITY_TOKEN_FILE in the environment
* and then attempts to read the token from that path.
*/
async execute(connection, credentials) {
const token = await this.getToken();
const command = commandDocument(token);
return connection.commandAsync((0, utils_1.ns)(credentials.source), command, undefined);
}
/**
* Get the document to add for speculative authentication.
*/
async speculativeAuth(credentials) {
const token = await this.getToken();
const document = commandDocument(token);
document.db = credentials.source;
return { speculativeAuthenticate: document };
}
}
exports.ServiceWorkflow = ServiceWorkflow;
/**
* Create the saslStart command document.
*/
function commandDocument(token) {
return {
saslStart: 1,
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
payload: bson_1.BSON.serialize({ jwt: token })
};
}
exports.commandDocument = commandDocument;
//# sourceMappingURL=service_workflow.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"service_workflow.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/service_workflow.ts"],"names":[],"mappings":";;;AAAA,+BAA2C;AAE3C,0CAAoC;AAIpC,4CAA6C;AAE7C;;;GAGG;AACH,MAAsB,eAAe;IACnC;;;OAGG;IACH,KAAK,CAAC,OAAO,CAAC,UAAsB,EAAE,WAA6B;QACjE,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAC;QACpC,MAAM,OAAO,GAAG,eAAe,CAAC,KAAK,CAAC,CAAC;QACvC,OAAO,UAAU,CAAC,YAAY,CAAC,IAAA,UAAE,EAAC,WAAW,CAAC,MAAM,CAAC,EAAE,OAAO,EAAE,SAAS,CAAC,CAAC;IAC7E,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,eAAe,CAAC,WAA6B;QACjD,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAC;QACpC,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,CAAC,CAAC;QACxC,QAAQ,CAAC,EAAE,GAAG,WAAW,CAAC,MAAM,CAAC;QACjC,OAAO,EAAE,uBAAuB,EAAE,QAAQ,EAAE,CAAC;IAC/C,CAAC;CAMF;AAzBD,0CAyBC;AAED;;GAEG;AACH,SAAgB,eAAe,CAAC,KAAa;IAC3C,OAAO;QACL,SAAS,EAAE,CAAC;QACZ,SAAS,EAAE,yBAAa,CAAC,YAAY;QACrC,OAAO,EAAE,WAAI,CAAC,SAAS,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;KACxC,CAAC;AACJ,CAAC;AAND,0CAMC"}

View File

@@ -0,0 +1,71 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TokenEntryCache = exports.TokenEntry = void 0;
const cache_1 = require("./cache");
/* 5 minutes in milliseconds */
const EXPIRATION_BUFFER_MS = 300000;
/* Default expiration is now for when no expiration provided */
const DEFAULT_EXPIRATION_SECS = 0;
/** @internal */
class TokenEntry {
/**
* Instantiate the entry.
*/
constructor(tokenResult, serverInfo, expiration) {
this.tokenResult = tokenResult;
this.serverInfo = serverInfo;
this.expiration = expiration;
}
/**
* The entry is still valid if the expiration is more than
* 5 minutes from the expiration time.
*/
isValid() {
return this.expiration - Date.now() > EXPIRATION_BUFFER_MS;
}
}
exports.TokenEntry = TokenEntry;
/**
* Cache of OIDC token entries.
* @internal
*/
class TokenEntryCache extends cache_1.Cache {
/**
* Set an entry in the token cache.
*/
addEntry(address, username, callbackHash, tokenResult, serverInfo) {
const entry = new TokenEntry(tokenResult, serverInfo, expirationTime(tokenResult.expiresInSeconds));
this.entries.set(this.cacheKey(address, username, callbackHash), entry);
return entry;
}
/**
* Delete an entry from the cache.
*/
deleteEntry(address, username, callbackHash) {
this.entries.delete(this.cacheKey(address, username, callbackHash));
}
/**
* Get an entry from the cache.
*/
getEntry(address, username, callbackHash) {
return this.entries.get(this.cacheKey(address, username, callbackHash));
}
/**
* Delete all expired entries from the cache.
*/
deleteExpiredEntries() {
for (const [key, entry] of this.entries) {
if (!entry.isValid()) {
this.entries.delete(key);
}
}
}
}
exports.TokenEntryCache = TokenEntryCache;
/**
* Get an expiration time in milliseconds past epoch. Defaults to immediate.
*/
function expirationTime(expiresInSeconds) {
return Date.now() + (expiresInSeconds ?? DEFAULT_EXPIRATION_SECS) * 1000;
}
//# sourceMappingURL=token_entry_cache.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"token_entry_cache.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/token_entry_cache.ts"],"names":[],"mappings":";;;AACA,mCAAgC;AAEhC,+BAA+B;AAC/B,MAAM,oBAAoB,GAAG,MAAM,CAAC;AACpC,+DAA+D;AAC/D,MAAM,uBAAuB,GAAG,CAAC,CAAC;AAClC,gBAAgB;AAChB,MAAa,UAAU;IAKrB;;OAEG;IACH,YAAY,WAA8B,EAAE,UAAyB,EAAE,UAAkB;QACvF,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,oBAAoB,CAAC;IAC7D,CAAC;CACF;AArBD,gCAqBC;AAED;;;GAGG;AACH,MAAa,eAAgB,SAAQ,aAAiB;IACpD;;OAEG;IACH,QAAQ,CACN,OAAe,EACf,QAAgB,EAChB,YAAoB,EACpB,WAA8B,EAC9B,UAAyB;QAEzB,MAAM,KAAK,GAAG,IAAI,UAAU,CAC1B,WAAW,EACX,UAAU,EACV,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,CAC7C,CAAC;QACF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,OAAe,EAAE,QAAgB,EAAE,YAAoB;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,OAAe,EAAE,QAAgB,EAAE,YAAoB;QAC9D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,CAAC,CAAC;IAC1E,CAAC;IAED;;OAEG;IACH,oBAAoB;QAClB,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE;YACvC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,EAAE;gBACpB,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aAC1B;SACF;IACH,CAAC;CACF;AA5CD,0CA4CC;AAED;;GAEG;AACH,SAAS,cAAc,CAAC,gBAAyB;IAC/C,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,gBAAgB,IAAI,uBAAuB,CAAC,GAAG,IAAI,CAAC;AAC3E,CAAC"}

26
node_modules/mongodb/lib/cmap/auth/plain.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Plain = void 0;
const bson_1 = require("../../bson");
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const auth_provider_1 = require("./auth_provider");
class Plain extends auth_provider_1.AuthProvider {
async auth(authContext) {
const { connection, credentials } = authContext;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
const { username, password } = credentials;
const payload = new bson_1.Binary(Buffer.from(`\x00${username}\x00${password}`));
const command = {
saslStart: 1,
mechanism: 'PLAIN',
payload: payload,
autoAuthorize: 1
};
await connection.commandAsync((0, utils_1.ns)('$external.$cmd'), command, undefined);
}
}
exports.Plain = Plain;
//# sourceMappingURL=plain.js.map

1
node_modules/mongodb/lib/cmap/auth/plain.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"plain.js","sourceRoot":"","sources":["../../../src/cmap/auth/plain.ts"],"names":[],"mappings":";;;AAAA,qCAAoC;AACpC,uCAA2D;AAC3D,uCAAiC;AACjC,mDAA4D;AAE5D,MAAa,KAAM,SAAQ,4BAAY;IAC5B,KAAK,CAAC,IAAI,CAAC,WAAwB;QAC1C,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,GAAG,WAAW,CAAC;QAChD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,oCAA4B,CAAC,uCAAuC,CAAC,CAAC;SACjF;QAED,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC;QAE3C,MAAM,OAAO,GAAG,IAAI,aAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,QAAQ,OAAO,QAAQ,EAAE,CAAC,CAAC,CAAC;QAC1E,MAAM,OAAO,GAAG;YACd,SAAS,EAAE,CAAC;YACZ,SAAS,EAAE,OAAO;YAClB,OAAO,EAAE,OAAO;YAChB,aAAa,EAAE,CAAC;SACjB,CAAC;QAEF,MAAM,UAAU,CAAC,YAAY,CAAC,IAAA,UAAE,EAAC,gBAAgB,CAAC,EAAE,OAAO,EAAE,SAAS,CAAC,CAAC;IAC1E,CAAC;CACF;AAnBD,sBAmBC"}

24
node_modules/mongodb/lib/cmap/auth/providers.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AUTH_MECHS_AUTH_SRC_EXTERNAL = exports.AuthMechanism = void 0;
/** @public */
exports.AuthMechanism = Object.freeze({
MONGODB_AWS: 'MONGODB-AWS',
MONGODB_CR: 'MONGODB-CR',
MONGODB_DEFAULT: 'DEFAULT',
MONGODB_GSSAPI: 'GSSAPI',
MONGODB_PLAIN: 'PLAIN',
MONGODB_SCRAM_SHA1: 'SCRAM-SHA-1',
MONGODB_SCRAM_SHA256: 'SCRAM-SHA-256',
MONGODB_X509: 'MONGODB-X509',
/** @experimental */
MONGODB_OIDC: 'MONGODB-OIDC'
});
/** @internal */
exports.AUTH_MECHS_AUTH_SRC_EXTERNAL = new Set([
exports.AuthMechanism.MONGODB_GSSAPI,
exports.AuthMechanism.MONGODB_AWS,
exports.AuthMechanism.MONGODB_OIDC,
exports.AuthMechanism.MONGODB_X509
]);
//# sourceMappingURL=providers.js.map

1
node_modules/mongodb/lib/cmap/auth/providers.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"providers.js","sourceRoot":"","sources":["../../../src/cmap/auth/providers.ts"],"names":[],"mappings":";;;AAAA,cAAc;AACD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC;IACzC,WAAW,EAAE,aAAa;IAC1B,UAAU,EAAE,YAAY;IACxB,eAAe,EAAE,SAAS;IAC1B,cAAc,EAAE,QAAQ;IACxB,aAAa,EAAE,OAAO;IACtB,kBAAkB,EAAE,aAAa;IACjC,oBAAoB,EAAE,eAAe;IACrC,YAAY,EAAE,cAAc;IAC5B,oBAAoB;IACpB,YAAY,EAAE,cAAc;CACpB,CAAC,CAAC;AAKZ,gBAAgB;AACH,QAAA,4BAA4B,GAAG,IAAI,GAAG,CAAgB;IACjE,qBAAa,CAAC,cAAc;IAC5B,qBAAa,CAAC,WAAW;IACzB,qBAAa,CAAC,YAAY;IAC1B,qBAAa,CAAC,YAAY;CAC3B,CAAC,CAAC"}

260
node_modules/mongodb/lib/cmap/auth/scram.js generated vendored Normal file
View File

@@ -0,0 +1,260 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ScramSHA256 = exports.ScramSHA1 = void 0;
const crypto = require("crypto");
const util_1 = require("util");
const bson_1 = require("../../bson");
const deps_1 = require("../../deps");
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const auth_provider_1 = require("./auth_provider");
const providers_1 = require("./providers");
class ScramSHA extends auth_provider_1.AuthProvider {
constructor(cryptoMethod) {
super();
this.cryptoMethod = cryptoMethod || 'sha1';
this.randomBytesAsync = (0, util_1.promisify)(crypto.randomBytes);
}
async prepare(handshakeDoc, authContext) {
const cryptoMethod = this.cryptoMethod;
const credentials = authContext.credentials;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
if (cryptoMethod === 'sha256' && deps_1.saslprep == null) {
(0, utils_1.emitWarning)('Warning: no saslprep library specified. Passwords will not be sanitized');
}
const nonce = await this.randomBytesAsync(24);
// store the nonce for later use
authContext.nonce = nonce;
const request = {
...handshakeDoc,
speculativeAuthenticate: {
...makeFirstMessage(cryptoMethod, credentials, nonce),
db: credentials.source
}
};
return request;
}
async auth(authContext) {
const { reauthenticating, response } = authContext;
if (response?.speculativeAuthenticate && !reauthenticating) {
return continueScramConversation(this.cryptoMethod, response.speculativeAuthenticate, authContext);
}
return executeScram(this.cryptoMethod, authContext);
}
}
function cleanUsername(username) {
return username.replace('=', '=3D').replace(',', '=2C');
}
function clientFirstMessageBare(username, nonce) {
// NOTE: This is done b/c Javascript uses UTF-16, but the server is hashing in UTF-8.
// Since the username is not sasl-prep-d, we need to do this here.
return Buffer.concat([
Buffer.from('n=', 'utf8'),
Buffer.from(username, 'utf8'),
Buffer.from(',r=', 'utf8'),
Buffer.from(nonce.toString('base64'), 'utf8')
]);
}
function makeFirstMessage(cryptoMethod, credentials, nonce) {
const username = cleanUsername(credentials.username);
const mechanism = cryptoMethod === 'sha1' ? providers_1.AuthMechanism.MONGODB_SCRAM_SHA1 : providers_1.AuthMechanism.MONGODB_SCRAM_SHA256;
// NOTE: This is done b/c Javascript uses UTF-16, but the server is hashing in UTF-8.
// Since the username is not sasl-prep-d, we need to do this here.
return {
saslStart: 1,
mechanism,
payload: new bson_1.Binary(Buffer.concat([Buffer.from('n,,', 'utf8'), clientFirstMessageBare(username, nonce)])),
autoAuthorize: 1,
options: { skipEmptyExchange: true }
};
}
async function executeScram(cryptoMethod, authContext) {
const { connection, credentials } = authContext;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
if (!authContext.nonce) {
throw new error_1.MongoInvalidArgumentError('AuthContext must contain a valid nonce property');
}
const nonce = authContext.nonce;
const db = credentials.source;
const saslStartCmd = makeFirstMessage(cryptoMethod, credentials, nonce);
const response = await connection.commandAsync((0, utils_1.ns)(`${db}.$cmd`), saslStartCmd, undefined);
await continueScramConversation(cryptoMethod, response, authContext);
}
async function continueScramConversation(cryptoMethod, response, authContext) {
const connection = authContext.connection;
const credentials = authContext.credentials;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
if (!authContext.nonce) {
throw new error_1.MongoInvalidArgumentError('Unable to continue SCRAM without valid nonce');
}
const nonce = authContext.nonce;
const db = credentials.source;
const username = cleanUsername(credentials.username);
const password = credentials.password;
let processedPassword;
if (cryptoMethod === 'sha256') {
processedPassword = 'kModuleError' in deps_1.saslprep ? password : (0, deps_1.saslprep)(password);
}
else {
processedPassword = passwordDigest(username, password);
}
const payload = Buffer.isBuffer(response.payload)
? new bson_1.Binary(response.payload)
: response.payload;
const dict = parsePayload(payload.value());
const iterations = parseInt(dict.i, 10);
if (iterations && iterations < 4096) {
// TODO(NODE-3483)
throw new error_1.MongoRuntimeError(`Server returned an invalid iteration count ${iterations}`);
}
const salt = dict.s;
const rnonce = dict.r;
if (rnonce.startsWith('nonce')) {
// TODO(NODE-3483)
throw new error_1.MongoRuntimeError(`Server returned an invalid nonce: ${rnonce}`);
}
// Set up start of proof
const withoutProof = `c=biws,r=${rnonce}`;
const saltedPassword = HI(processedPassword, Buffer.from(salt, 'base64'), iterations, cryptoMethod);
const clientKey = HMAC(cryptoMethod, saltedPassword, 'Client Key');
const serverKey = HMAC(cryptoMethod, saltedPassword, 'Server Key');
const storedKey = H(cryptoMethod, clientKey);
const authMessage = [clientFirstMessageBare(username, nonce), payload.value(), withoutProof].join(',');
const clientSignature = HMAC(cryptoMethod, storedKey, authMessage);
const clientProof = `p=${xor(clientKey, clientSignature)}`;
const clientFinal = [withoutProof, clientProof].join(',');
const serverSignature = HMAC(cryptoMethod, serverKey, authMessage);
const saslContinueCmd = {
saslContinue: 1,
conversationId: response.conversationId,
payload: new bson_1.Binary(Buffer.from(clientFinal))
};
const r = await connection.commandAsync((0, utils_1.ns)(`${db}.$cmd`), saslContinueCmd, undefined);
const parsedResponse = parsePayload(r.payload.value());
if (!compareDigest(Buffer.from(parsedResponse.v, 'base64'), serverSignature)) {
throw new error_1.MongoRuntimeError('Server returned an invalid signature');
}
if (r.done !== false) {
// If the server sends r.done === true we can save one RTT
return;
}
const retrySaslContinueCmd = {
saslContinue: 1,
conversationId: r.conversationId,
payload: Buffer.alloc(0)
};
await connection.commandAsync((0, utils_1.ns)(`${db}.$cmd`), retrySaslContinueCmd, undefined);
}
function parsePayload(payload) {
const dict = {};
const parts = payload.split(',');
for (let i = 0; i < parts.length; i++) {
const valueParts = parts[i].split('=');
dict[valueParts[0]] = valueParts[1];
}
return dict;
}
function passwordDigest(username, password) {
if (typeof username !== 'string') {
throw new error_1.MongoInvalidArgumentError('Username must be a string');
}
if (typeof password !== 'string') {
throw new error_1.MongoInvalidArgumentError('Password must be a string');
}
if (password.length === 0) {
throw new error_1.MongoInvalidArgumentError('Password cannot be empty');
}
let md5;
try {
md5 = crypto.createHash('md5');
}
catch (err) {
if (crypto.getFips()) {
// This error is (slightly) more helpful than what comes from OpenSSL directly, e.g.
// 'Error: error:060800C8:digital envelope routines:EVP_DigestInit_ex:disabled for FIPS'
throw new Error('Auth mechanism SCRAM-SHA-1 is not supported in FIPS mode');
}
throw err;
}
md5.update(`${username}:mongo:${password}`, 'utf8');
return md5.digest('hex');
}
// XOR two buffers
function xor(a, b) {
if (!Buffer.isBuffer(a)) {
a = Buffer.from(a);
}
if (!Buffer.isBuffer(b)) {
b = Buffer.from(b);
}
const length = Math.max(a.length, b.length);
const res = [];
for (let i = 0; i < length; i += 1) {
res.push(a[i] ^ b[i]);
}
return Buffer.from(res).toString('base64');
}
function H(method, text) {
return crypto.createHash(method).update(text).digest();
}
function HMAC(method, key, text) {
return crypto.createHmac(method, key).update(text).digest();
}
let _hiCache = {};
let _hiCacheCount = 0;
function _hiCachePurge() {
_hiCache = {};
_hiCacheCount = 0;
}
const hiLengthMap = {
sha256: 32,
sha1: 20
};
function HI(data, salt, iterations, cryptoMethod) {
// omit the work if already generated
const key = [data, salt.toString('base64'), iterations].join('_');
if (_hiCache[key] != null) {
return _hiCache[key];
}
// generate the salt
const saltedData = crypto.pbkdf2Sync(data, salt, iterations, hiLengthMap[cryptoMethod], cryptoMethod);
// cache a copy to speed up the next lookup, but prevent unbounded cache growth
if (_hiCacheCount >= 200) {
_hiCachePurge();
}
_hiCache[key] = saltedData;
_hiCacheCount += 1;
return saltedData;
}
function compareDigest(lhs, rhs) {
if (lhs.length !== rhs.length) {
return false;
}
if (typeof crypto.timingSafeEqual === 'function') {
return crypto.timingSafeEqual(lhs, rhs);
}
let result = 0;
for (let i = 0; i < lhs.length; i++) {
result |= lhs[i] ^ rhs[i];
}
return result === 0;
}
class ScramSHA1 extends ScramSHA {
constructor() {
super('sha1');
}
}
exports.ScramSHA1 = ScramSHA1;
class ScramSHA256 extends ScramSHA {
constructor() {
super('sha256');
}
}
exports.ScramSHA256 = ScramSHA256;
//# sourceMappingURL=scram.js.map

1
node_modules/mongodb/lib/cmap/auth/scram.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

36
node_modules/mongodb/lib/cmap/auth/x509.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.X509 = void 0;
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const auth_provider_1 = require("./auth_provider");
class X509 extends auth_provider_1.AuthProvider {
async prepare(handshakeDoc, authContext) {
const { credentials } = authContext;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
return { ...handshakeDoc, speculativeAuthenticate: x509AuthenticateCommand(credentials) };
}
async auth(authContext) {
const connection = authContext.connection;
const credentials = authContext.credentials;
if (!credentials) {
throw new error_1.MongoMissingCredentialsError('AuthContext must provide credentials.');
}
const response = authContext.response;
if (response?.speculativeAuthenticate) {
return;
}
await connection.commandAsync((0, utils_1.ns)('$external.$cmd'), x509AuthenticateCommand(credentials), undefined);
}
}
exports.X509 = X509;
function x509AuthenticateCommand(credentials) {
const command = { authenticate: 1, mechanism: 'MONGODB-X509' };
if (credentials.username) {
command.user = credentials.username;
}
return command;
}
//# sourceMappingURL=x509.js.map

1
node_modules/mongodb/lib/cmap/auth/x509.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"x509.js","sourceRoot":"","sources":["../../../src/cmap/auth/x509.ts"],"names":[],"mappings":";;;AACA,uCAA2D;AAC3D,uCAAiC;AAEjC,mDAA4D;AAG5D,MAAa,IAAK,SAAQ,4BAAY;IAC3B,KAAK,CAAC,OAAO,CACpB,YAA+B,EAC/B,WAAwB;QAExB,MAAM,EAAE,WAAW,EAAE,GAAG,WAAW,CAAC;QACpC,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,oCAA4B,CAAC,uCAAuC,CAAC,CAAC;SACjF;QACD,OAAO,EAAE,GAAG,YAAY,EAAE,uBAAuB,EAAE,uBAAuB,CAAC,WAAW,CAAC,EAAE,CAAC;IAC5F,CAAC;IAEQ,KAAK,CAAC,IAAI,CAAC,WAAwB;QAC1C,MAAM,UAAU,GAAG,WAAW,CAAC,UAAU,CAAC;QAC1C,MAAM,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC;QAC5C,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,oCAA4B,CAAC,uCAAuC,CAAC,CAAC;SACjF;QACD,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC;QAEtC,IAAI,QAAQ,EAAE,uBAAuB,EAAE;YACrC,OAAO;SACR;QAED,MAAM,UAAU,CAAC,YAAY,CAC3B,IAAA,UAAE,EAAC,gBAAgB,CAAC,EACpB,uBAAuB,CAAC,WAAW,CAAC,EACpC,SAAS,CACV,CAAC;IACJ,CAAC;CACF;AA9BD,oBA8BC;AAED,SAAS,uBAAuB,CAAC,WAA6B;IAC5D,MAAM,OAAO,GAAa,EAAE,YAAY,EAAE,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC;IACzE,IAAI,WAAW,CAAC,QAAQ,EAAE;QACxB,OAAO,CAAC,IAAI,GAAG,WAAW,CAAC,QAAQ,CAAC;KACrC;IAED,OAAO,OAAO,CAAC;AACjB,CAAC"}