First commit
This commit is contained in:
30
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/aws_service_workflow.js
generated
vendored
Normal file
30
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/aws_service_workflow.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AwsServiceWorkflow = void 0;
|
||||
const fs = require("fs");
|
||||
const error_1 = require("../../../error");
|
||||
const service_workflow_1 = require("./service_workflow");
|
||||
/** Error for when the token is missing in the environment. */
|
||||
const TOKEN_MISSING_ERROR = 'AWS_WEB_IDENTITY_TOKEN_FILE must be set in the environment.';
|
||||
/**
|
||||
* Device workflow implementation for AWS.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
class AwsServiceWorkflow extends service_workflow_1.ServiceWorkflow {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
/**
|
||||
* Get the token from the environment.
|
||||
*/
|
||||
async getToken() {
|
||||
const tokenFile = process.env.AWS_WEB_IDENTITY_TOKEN_FILE;
|
||||
if (!tokenFile) {
|
||||
throw new error_1.MongoAWSError(TOKEN_MISSING_ERROR);
|
||||
}
|
||||
return fs.promises.readFile(tokenFile, 'utf8');
|
||||
}
|
||||
}
|
||||
exports.AwsServiceWorkflow = AwsServiceWorkflow;
|
||||
//# sourceMappingURL=aws_service_workflow.js.map
|
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/aws_service_workflow.js.map
generated
vendored
Normal file
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/aws_service_workflow.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"aws_service_workflow.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/aws_service_workflow.ts"],"names":[],"mappings":";;;AAAA,yBAAyB;AAEzB,0CAA+C;AAC/C,yDAAqD;AAErD,8DAA8D;AAC9D,MAAM,mBAAmB,GAAG,6DAA6D,CAAC;AAE1F;;;;GAIG;AACH,MAAa,kBAAmB,SAAQ,kCAAe;IACrD;QACE,KAAK,EAAE,CAAC;IACV,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAQ;QACZ,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC;QAC1D,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,qBAAa,CAAC,mBAAmB,CAAC,CAAC;SAC9C;QACD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACjD,CAAC;CACF;AAfD,gDAeC"}
|
28
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/cache.js
generated
vendored
Normal file
28
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/cache.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Cache = void 0;
|
||||
/**
|
||||
* Base class for OIDC caches.
|
||||
*/
|
||||
class Cache {
|
||||
/**
|
||||
* Create a new cache.
|
||||
*/
|
||||
constructor() {
|
||||
this.entries = new Map();
|
||||
}
|
||||
/**
|
||||
* Clear the cache.
|
||||
*/
|
||||
clear() {
|
||||
this.entries.clear();
|
||||
}
|
||||
/**
|
||||
* Create a cache key from the address and username.
|
||||
*/
|
||||
cacheKey(address, username, callbackHash) {
|
||||
return JSON.stringify([address, username, callbackHash]);
|
||||
}
|
||||
}
|
||||
exports.Cache = Cache;
|
||||
//# sourceMappingURL=cache.js.map
|
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/cache.js.map
generated
vendored
Normal file
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/cache.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"cache.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/cache.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,MAAsB,KAAK;IAGzB;;OAEG;IACH;QACE,IAAI,CAAC,OAAO,GAAG,IAAI,GAAG,EAAa,CAAC;IACtC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;IACvB,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,OAAe,EAAE,QAAgB,EAAE,YAAoB;QAC9D,OAAO,IAAI,CAAC,SAAS,CAAC,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AAvBD,sBAuBC"}
|
83
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_lock_cache.js
generated
vendored
Normal file
83
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_lock_cache.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CallbackLockCache = void 0;
|
||||
const error_1 = require("../../../error");
|
||||
const cache_1 = require("./cache");
|
||||
/** Error message for when request callback is missing. */
|
||||
const REQUEST_CALLBACK_REQUIRED_ERROR = 'Auth mechanism property REQUEST_TOKEN_CALLBACK is required.';
|
||||
/* Counter for function "hashes".*/
|
||||
let FN_HASH_COUNTER = 0;
|
||||
/* No function present function */
|
||||
const NO_FUNCTION = async () => ({ accessToken: 'test' });
|
||||
/* The map of function hashes */
|
||||
const FN_HASHES = new WeakMap();
|
||||
/* Put the no function hash in the map. */
|
||||
FN_HASHES.set(NO_FUNCTION, FN_HASH_COUNTER);
|
||||
/**
|
||||
* A cache of request and refresh callbacks per server/user.
|
||||
*/
|
||||
class CallbackLockCache extends cache_1.Cache {
|
||||
/**
|
||||
* Get the callbacks for the connection and credentials. If an entry does not
|
||||
* exist a new one will get set.
|
||||
*/
|
||||
getCallbacks(connection, credentials) {
|
||||
const requestCallback = credentials.mechanismProperties.REQUEST_TOKEN_CALLBACK;
|
||||
const refreshCallback = credentials.mechanismProperties.REFRESH_TOKEN_CALLBACK;
|
||||
if (!requestCallback) {
|
||||
throw new error_1.MongoInvalidArgumentError(REQUEST_CALLBACK_REQUIRED_ERROR);
|
||||
}
|
||||
const callbackHash = hashFunctions(requestCallback, refreshCallback);
|
||||
const key = this.cacheKey(connection.address, credentials.username, callbackHash);
|
||||
const entry = this.entries.get(key);
|
||||
if (entry) {
|
||||
return entry;
|
||||
}
|
||||
return this.setCallbacks(key, callbackHash, requestCallback, refreshCallback);
|
||||
}
|
||||
/**
|
||||
* Set locked callbacks on for connection and credentials.
|
||||
*/
|
||||
setCallbacks(key, callbackHash, requestCallback, refreshCallback) {
|
||||
const entry = {
|
||||
requestCallback: withLock(requestCallback),
|
||||
refreshCallback: refreshCallback ? withLock(refreshCallback) : undefined,
|
||||
callbackHash: callbackHash
|
||||
};
|
||||
this.entries.set(key, entry);
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
exports.CallbackLockCache = CallbackLockCache;
|
||||
/**
|
||||
* Ensure the callback is only executed one at a time.
|
||||
*/
|
||||
function withLock(callback) {
|
||||
let lock = Promise.resolve();
|
||||
return async (info, context) => {
|
||||
await lock;
|
||||
lock = lock.then(() => callback(info, context));
|
||||
return lock;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Get the hash string for the request and refresh functions.
|
||||
*/
|
||||
function hashFunctions(requestFn, refreshFn) {
|
||||
let requestHash = FN_HASHES.get(requestFn);
|
||||
let refreshHash = FN_HASHES.get(refreshFn ?? NO_FUNCTION);
|
||||
if (requestHash == null) {
|
||||
// Create a new one for the function and put it in the map.
|
||||
FN_HASH_COUNTER++;
|
||||
requestHash = FN_HASH_COUNTER;
|
||||
FN_HASHES.set(requestFn, FN_HASH_COUNTER);
|
||||
}
|
||||
if (refreshHash == null && refreshFn) {
|
||||
// Create a new one for the function and put it in the map.
|
||||
FN_HASH_COUNTER++;
|
||||
refreshHash = FN_HASH_COUNTER;
|
||||
FN_HASHES.set(refreshFn, FN_HASH_COUNTER);
|
||||
}
|
||||
return `${requestHash}-${refreshHash}`;
|
||||
}
|
||||
//# sourceMappingURL=callback_lock_cache.js.map
|
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_lock_cache.js.map
generated
vendored
Normal file
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_lock_cache.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"callback_lock_cache.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/callback_lock_cache.ts"],"names":[],"mappings":";;;AAAA,0CAA2D;AAU3D,mCAAgC;AAEhC,0DAA0D;AAC1D,MAAM,+BAA+B,GACnC,6DAA6D,CAAC;AAChE,mCAAmC;AACnC,IAAI,eAAe,GAAG,CAAC,CAAC;AACxB,kCAAkC;AAClC,MAAM,WAAW,GAAwB,KAAK,IAAI,EAAE,CAAC,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;AAC/E,gCAAgC;AAChC,MAAM,SAAS,GAAG,IAAI,OAAO,EAAqD,CAAC;AACnF,0CAA0C;AAC1C,SAAS,CAAC,GAAG,CAAC,WAAW,EAAE,eAAe,CAAC,CAAC;AAW5C;;GAEG;AACH,MAAa,iBAAkB,SAAQ,aAAqB;IAC1D;;;OAGG;IACH,YAAY,CAAC,UAAsB,EAAE,WAA6B;QAChE,MAAM,eAAe,GAAG,WAAW,CAAC,mBAAmB,CAAC,sBAAsB,CAAC;QAC/E,MAAM,eAAe,GAAG,WAAW,CAAC,mBAAmB,CAAC,sBAAsB,CAAC;QAC/E,IAAI,CAAC,eAAe,EAAE;YACpB,MAAM,IAAI,iCAAyB,CAAC,+BAA+B,CAAC,CAAC;SACtE;QACD,MAAM,YAAY,GAAG,aAAa,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;QACrE,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;QAClF,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACpC,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAC;SACd;QACD,OAAO,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,YAAY,EAAE,eAAe,EAAE,eAAe,CAAC,CAAC;IAChF,CAAC;IAED;;OAEG;IACK,YAAY,CAClB,GAAW,EACX,YAAoB,EACpB,eAAoC,EACpC,eAAqC;QAErC,MAAM,KAAK,GAAG;YACZ,eAAe,EAAE,QAAQ,CAAC,eAAe,CAAC;YAC1C,eAAe,EAAE,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,SAAS;YACxE,YAAY,EAAE,YAAY;SAC3B,CAAC;QACF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC7B,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AArCD,8CAqCC;AAED;;GAEG;AACH,SAAS,QAAQ,CAAC,QAAmD;IACnE,IAAI,IAAI,GAAiB,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3C,OAAO,KAAK,EAAE,IAAmB,EAAE,OAA4B,EAA8B,EAAE;QAC7F,MAAM,IAAI,CAAC;QACX,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAA8B,EAAE,SAA+B;IACpF,IAAI,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;IAC3C,IAAI,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,IAAI,WAAW,CAAC,CAAC;IAC1D,IAAI,WAAW,IAAI,IAAI,EAAE;QACvB,2DAA2D;QAC3D,eAAe,EAAE,CAAC;QAClB,WAAW,GAAG,eAAe,CAAC;QAC9B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;KAC3C;IACD,IAAI,WAAW,IAAI,IAAI,IAAI,SAAS,EAAE;QACpC,2DAA2D;QAC3D,eAAe,EAAE,CAAC;QAClB,WAAW,GAAG,eAAe,CAAC;QAC9B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;KAC3C;IACD,OAAO,GAAG,WAAW,IAAI,WAAW,EAAE,CAAC;AACzC,CAAC"}
|
204
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_workflow.js
generated
vendored
Normal file
204
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_workflow.js
generated
vendored
Normal file
@@ -0,0 +1,204 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CallbackWorkflow = void 0;
|
||||
const bson_1 = require("bson");
|
||||
const error_1 = require("../../../error");
|
||||
const utils_1 = require("../../../utils");
|
||||
const providers_1 = require("../providers");
|
||||
const callback_lock_cache_1 = require("./callback_lock_cache");
|
||||
const token_entry_cache_1 = require("./token_entry_cache");
|
||||
/** The current version of OIDC implementation. */
|
||||
const OIDC_VERSION = 0;
|
||||
/** 5 minutes in seconds */
|
||||
const TIMEOUT_S = 300;
|
||||
/** Properties allowed on results of callbacks. */
|
||||
const RESULT_PROPERTIES = ['accessToken', 'expiresInSeconds', 'refreshToken'];
|
||||
/** Error message when the callback result is invalid. */
|
||||
const CALLBACK_RESULT_ERROR = 'User provided OIDC callbacks must return a valid object with an accessToken.';
|
||||
/**
|
||||
* OIDC implementation of a callback based workflow.
|
||||
* @internal
|
||||
*/
|
||||
class CallbackWorkflow {
|
||||
/**
|
||||
* Instantiate the workflow
|
||||
*/
|
||||
constructor() {
|
||||
this.cache = new token_entry_cache_1.TokenEntryCache();
|
||||
this.callbackCache = new callback_lock_cache_1.CallbackLockCache();
|
||||
}
|
||||
/**
|
||||
* Get the document to add for speculative authentication. This also needs
|
||||
* to add a db field from the credentials source.
|
||||
*/
|
||||
async speculativeAuth(credentials) {
|
||||
const document = startCommandDocument(credentials);
|
||||
document.db = credentials.source;
|
||||
return { speculativeAuthenticate: document };
|
||||
}
|
||||
/**
|
||||
* Execute the OIDC callback workflow.
|
||||
*/
|
||||
async execute(connection, credentials, reauthenticating, response) {
|
||||
// Get the callbacks with locks from the callback lock cache.
|
||||
const { requestCallback, refreshCallback, callbackHash } = this.callbackCache.getCallbacks(connection, credentials);
|
||||
// Look for an existing entry in the cache.
|
||||
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
|
||||
let result;
|
||||
if (entry) {
|
||||
// Reauthentication cannot use a token from the cache since the server has
|
||||
// stated it is invalid by the request for reauthentication.
|
||||
if (entry.isValid() && !reauthenticating) {
|
||||
// Presence of a valid cache entry means we can skip to the finishing step.
|
||||
result = await this.finishAuthentication(connection, credentials, entry.tokenResult, response?.speculativeAuthenticate?.conversationId);
|
||||
}
|
||||
else {
|
||||
// Presence of an expired cache entry means we must fetch a new one and
|
||||
// then execute the final step.
|
||||
const tokenResult = await this.fetchAccessToken(connection, credentials, entry.serverInfo, reauthenticating, callbackHash, requestCallback, refreshCallback);
|
||||
try {
|
||||
result = await this.finishAuthentication(connection, credentials, tokenResult, reauthenticating ? undefined : response?.speculativeAuthenticate?.conversationId);
|
||||
}
|
||||
catch (error) {
|
||||
// If we are reauthenticating and this errors with reauthentication
|
||||
// required, we need to do the entire process over again and clear
|
||||
// the cache entry.
|
||||
if (reauthenticating &&
|
||||
error instanceof error_1.MongoError &&
|
||||
error.code === error_1.MONGODB_ERROR_CODES.Reauthenticate) {
|
||||
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
|
||||
result = await this.execute(connection, credentials, reauthenticating);
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// No entry in the cache requires us to do all authentication steps
|
||||
// from start to finish, including getting a fresh token for the cache.
|
||||
const startDocument = await this.startAuthentication(connection, credentials, reauthenticating, response);
|
||||
const conversationId = startDocument.conversationId;
|
||||
const serverResult = bson_1.BSON.deserialize(startDocument.payload.buffer);
|
||||
const tokenResult = await this.fetchAccessToken(connection, credentials, serverResult, reauthenticating, callbackHash, requestCallback, refreshCallback);
|
||||
result = await this.finishAuthentication(connection, credentials, tokenResult, conversationId);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Starts the callback authentication process. If there is a speculative
|
||||
* authentication document from the initial handshake, then we will use that
|
||||
* value to get the issuer, otherwise we will send the saslStart command.
|
||||
*/
|
||||
async startAuthentication(connection, credentials, reauthenticating, response) {
|
||||
let result;
|
||||
if (!reauthenticating && response?.speculativeAuthenticate) {
|
||||
result = response.speculativeAuthenticate;
|
||||
}
|
||||
else {
|
||||
result = await connection.commandAsync((0, utils_1.ns)(credentials.source), startCommandDocument(credentials), undefined);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Finishes the callback authentication process.
|
||||
*/
|
||||
async finishAuthentication(connection, credentials, tokenResult, conversationId) {
|
||||
const result = await connection.commandAsync((0, utils_1.ns)(credentials.source), finishCommandDocument(tokenResult.accessToken, conversationId), undefined);
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Fetches an access token using either the request or refresh callbacks and
|
||||
* puts it in the cache.
|
||||
*/
|
||||
async fetchAccessToken(connection, credentials, serverInfo, reauthenticating, callbackHash, requestCallback, refreshCallback) {
|
||||
// Get the token from the cache.
|
||||
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
|
||||
let result;
|
||||
const context = { timeoutSeconds: TIMEOUT_S, version: OIDC_VERSION };
|
||||
// Check if there's a token in the cache.
|
||||
if (entry) {
|
||||
// If the cache entry is valid, return the token result.
|
||||
if (entry.isValid() && !reauthenticating) {
|
||||
return entry.tokenResult;
|
||||
}
|
||||
// If the cache entry is not valid, remove it from the cache and first attempt
|
||||
// to use the refresh callback to get a new token. If no refresh callback
|
||||
// exists, then fallback to the request callback.
|
||||
if (refreshCallback) {
|
||||
context.refreshToken = entry.tokenResult.refreshToken;
|
||||
result = await refreshCallback(serverInfo, context);
|
||||
}
|
||||
else {
|
||||
result = await requestCallback(serverInfo, context);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// With no token in the cache we use the request callback.
|
||||
result = await requestCallback(serverInfo, context);
|
||||
}
|
||||
// Validate that the result returned by the callback is acceptable. If it is not
|
||||
// we must clear the token result from the cache.
|
||||
if (isCallbackResultInvalid(result)) {
|
||||
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
|
||||
throw new error_1.MongoMissingCredentialsError(CALLBACK_RESULT_ERROR);
|
||||
}
|
||||
// Cleanup the cache.
|
||||
this.cache.deleteExpiredEntries();
|
||||
// Put the new entry into the cache.
|
||||
this.cache.addEntry(connection.address, credentials.username || '', callbackHash, result, serverInfo);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
exports.CallbackWorkflow = CallbackWorkflow;
|
||||
/**
|
||||
* Generate the finishing command document for authentication. Will be a
|
||||
* saslStart or saslContinue depending on the presence of a conversation id.
|
||||
*/
|
||||
function finishCommandDocument(token, conversationId) {
|
||||
if (conversationId != null && typeof conversationId === 'number') {
|
||||
return {
|
||||
saslContinue: 1,
|
||||
conversationId: conversationId,
|
||||
payload: new bson_1.Binary(bson_1.BSON.serialize({ jwt: token }))
|
||||
};
|
||||
}
|
||||
// saslContinue requires a conversationId in the command to be valid so in this
|
||||
// case the server allows "step two" to actually be a saslStart with the token
|
||||
// as the jwt since the use of the cached value has no correlating conversating
|
||||
// on the particular connection.
|
||||
return {
|
||||
saslStart: 1,
|
||||
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
|
||||
payload: new bson_1.Binary(bson_1.BSON.serialize({ jwt: token }))
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Determines if a result returned from a request or refresh callback
|
||||
* function is invalid. This means the result is nullish, doesn't contain
|
||||
* the accessToken required field, and does not contain extra fields.
|
||||
*/
|
||||
function isCallbackResultInvalid(tokenResult) {
|
||||
if (tokenResult == null || typeof tokenResult !== 'object')
|
||||
return true;
|
||||
if (!('accessToken' in tokenResult))
|
||||
return true;
|
||||
return !Object.getOwnPropertyNames(tokenResult).every(prop => RESULT_PROPERTIES.includes(prop));
|
||||
}
|
||||
/**
|
||||
* Generate the saslStart command document.
|
||||
*/
|
||||
function startCommandDocument(credentials) {
|
||||
const payload = {};
|
||||
if (credentials.username) {
|
||||
payload.n = credentials.username;
|
||||
}
|
||||
return {
|
||||
saslStart: 1,
|
||||
autoAuthorize: 1,
|
||||
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
|
||||
payload: new bson_1.Binary(bson_1.BSON.serialize(payload))
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=callback_workflow.js.map
|
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_workflow.js.map
generated
vendored
Normal file
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/callback_workflow.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
43
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/service_workflow.js
generated
vendored
Normal file
43
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/service_workflow.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.commandDocument = exports.ServiceWorkflow = void 0;
|
||||
const bson_1 = require("bson");
|
||||
const utils_1 = require("../../../utils");
|
||||
const providers_1 = require("../providers");
|
||||
/**
|
||||
* Common behaviour for OIDC device workflows.
|
||||
* @internal
|
||||
*/
|
||||
class ServiceWorkflow {
|
||||
/**
|
||||
* Execute the workflow. Looks for AWS_WEB_IDENTITY_TOKEN_FILE in the environment
|
||||
* and then attempts to read the token from that path.
|
||||
*/
|
||||
async execute(connection, credentials) {
|
||||
const token = await this.getToken();
|
||||
const command = commandDocument(token);
|
||||
return connection.commandAsync((0, utils_1.ns)(credentials.source), command, undefined);
|
||||
}
|
||||
/**
|
||||
* Get the document to add for speculative authentication.
|
||||
*/
|
||||
async speculativeAuth(credentials) {
|
||||
const token = await this.getToken();
|
||||
const document = commandDocument(token);
|
||||
document.db = credentials.source;
|
||||
return { speculativeAuthenticate: document };
|
||||
}
|
||||
}
|
||||
exports.ServiceWorkflow = ServiceWorkflow;
|
||||
/**
|
||||
* Create the saslStart command document.
|
||||
*/
|
||||
function commandDocument(token) {
|
||||
return {
|
||||
saslStart: 1,
|
||||
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
|
||||
payload: bson_1.BSON.serialize({ jwt: token })
|
||||
};
|
||||
}
|
||||
exports.commandDocument = commandDocument;
|
||||
//# sourceMappingURL=service_workflow.js.map
|
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/service_workflow.js.map
generated
vendored
Normal file
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/service_workflow.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"service_workflow.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/service_workflow.ts"],"names":[],"mappings":";;;AAAA,+BAA2C;AAE3C,0CAAoC;AAIpC,4CAA6C;AAE7C;;;GAGG;AACH,MAAsB,eAAe;IACnC;;;OAGG;IACH,KAAK,CAAC,OAAO,CAAC,UAAsB,EAAE,WAA6B;QACjE,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAC;QACpC,MAAM,OAAO,GAAG,eAAe,CAAC,KAAK,CAAC,CAAC;QACvC,OAAO,UAAU,CAAC,YAAY,CAAC,IAAA,UAAE,EAAC,WAAW,CAAC,MAAM,CAAC,EAAE,OAAO,EAAE,SAAS,CAAC,CAAC;IAC7E,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,eAAe,CAAC,WAA6B;QACjD,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAC;QACpC,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,CAAC,CAAC;QACxC,QAAQ,CAAC,EAAE,GAAG,WAAW,CAAC,MAAM,CAAC;QACjC,OAAO,EAAE,uBAAuB,EAAE,QAAQ,EAAE,CAAC;IAC/C,CAAC;CAMF;AAzBD,0CAyBC;AAED;;GAEG;AACH,SAAgB,eAAe,CAAC,KAAa;IAC3C,OAAO;QACL,SAAS,EAAE,CAAC;QACZ,SAAS,EAAE,yBAAa,CAAC,YAAY;QACrC,OAAO,EAAE,WAAI,CAAC,SAAS,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;KACxC,CAAC;AACJ,CAAC;AAND,0CAMC"}
|
71
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/token_entry_cache.js
generated
vendored
Normal file
71
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/token_entry_cache.js
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TokenEntryCache = exports.TokenEntry = void 0;
|
||||
const cache_1 = require("./cache");
|
||||
/* 5 minutes in milliseconds */
|
||||
const EXPIRATION_BUFFER_MS = 300000;
|
||||
/* Default expiration is now for when no expiration provided */
|
||||
const DEFAULT_EXPIRATION_SECS = 0;
|
||||
/** @internal */
|
||||
class TokenEntry {
|
||||
/**
|
||||
* Instantiate the entry.
|
||||
*/
|
||||
constructor(tokenResult, serverInfo, expiration) {
|
||||
this.tokenResult = tokenResult;
|
||||
this.serverInfo = serverInfo;
|
||||
this.expiration = expiration;
|
||||
}
|
||||
/**
|
||||
* The entry is still valid if the expiration is more than
|
||||
* 5 minutes from the expiration time.
|
||||
*/
|
||||
isValid() {
|
||||
return this.expiration - Date.now() > EXPIRATION_BUFFER_MS;
|
||||
}
|
||||
}
|
||||
exports.TokenEntry = TokenEntry;
|
||||
/**
|
||||
* Cache of OIDC token entries.
|
||||
* @internal
|
||||
*/
|
||||
class TokenEntryCache extends cache_1.Cache {
|
||||
/**
|
||||
* Set an entry in the token cache.
|
||||
*/
|
||||
addEntry(address, username, callbackHash, tokenResult, serverInfo) {
|
||||
const entry = new TokenEntry(tokenResult, serverInfo, expirationTime(tokenResult.expiresInSeconds));
|
||||
this.entries.set(this.cacheKey(address, username, callbackHash), entry);
|
||||
return entry;
|
||||
}
|
||||
/**
|
||||
* Delete an entry from the cache.
|
||||
*/
|
||||
deleteEntry(address, username, callbackHash) {
|
||||
this.entries.delete(this.cacheKey(address, username, callbackHash));
|
||||
}
|
||||
/**
|
||||
* Get an entry from the cache.
|
||||
*/
|
||||
getEntry(address, username, callbackHash) {
|
||||
return this.entries.get(this.cacheKey(address, username, callbackHash));
|
||||
}
|
||||
/**
|
||||
* Delete all expired entries from the cache.
|
||||
*/
|
||||
deleteExpiredEntries() {
|
||||
for (const [key, entry] of this.entries) {
|
||||
if (!entry.isValid()) {
|
||||
this.entries.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.TokenEntryCache = TokenEntryCache;
|
||||
/**
|
||||
* Get an expiration time in milliseconds past epoch. Defaults to immediate.
|
||||
*/
|
||||
function expirationTime(expiresInSeconds) {
|
||||
return Date.now() + (expiresInSeconds ?? DEFAULT_EXPIRATION_SECS) * 1000;
|
||||
}
|
||||
//# sourceMappingURL=token_entry_cache.js.map
|
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/token_entry_cache.js.map
generated
vendored
Normal file
1
node_modules/mongodb/lib/cmap/auth/mongodb_oidc/token_entry_cache.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"token_entry_cache.js","sourceRoot":"","sources":["../../../../src/cmap/auth/mongodb_oidc/token_entry_cache.ts"],"names":[],"mappings":";;;AACA,mCAAgC;AAEhC,+BAA+B;AAC/B,MAAM,oBAAoB,GAAG,MAAM,CAAC;AACpC,+DAA+D;AAC/D,MAAM,uBAAuB,GAAG,CAAC,CAAC;AAClC,gBAAgB;AAChB,MAAa,UAAU;IAKrB;;OAEG;IACH,YAAY,WAA8B,EAAE,UAAyB,EAAE,UAAkB;QACvF,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,oBAAoB,CAAC;IAC7D,CAAC;CACF;AArBD,gCAqBC;AAED;;;GAGG;AACH,MAAa,eAAgB,SAAQ,aAAiB;IACpD;;OAEG;IACH,QAAQ,CACN,OAAe,EACf,QAAgB,EAChB,YAAoB,EACpB,WAA8B,EAC9B,UAAyB;QAEzB,MAAM,KAAK,GAAG,IAAI,UAAU,CAC1B,WAAW,EACX,UAAU,EACV,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,CAC7C,CAAC;QACF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,OAAe,EAAE,QAAgB,EAAE,YAAoB;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,OAAe,EAAE,QAAgB,EAAE,YAAoB;QAC9D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,CAAC,CAAC,CAAC;IAC1E,CAAC;IAED;;OAEG;IACH,oBAAoB;QAClB,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE;YACvC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,EAAE;gBACpB,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aAC1B;SACF;IACH,CAAC;CACF;AA5CD,0CA4CC;AAED;;GAEG;AACH,SAAS,cAAc,CAAC,gBAAyB;IAC/C,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,gBAAgB,IAAI,uBAAuB,CAAC,GAAG,IAAI,CAAC;AAC3E,CAAC"}
|
Reference in New Issue
Block a user