Created
December 4, 2025 07:59
-
-
Save serkan-ozal/6e389f1830e70e82596319a763650279 to your computer and use it in GitHub Desktop.
AWS Lambda Nodejs Runtime Interface Client (Extracted from AWS Lambda Environment) Nodejs v24
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // dist/logging/verbose-log.js | |
| var EnvVarName = "AWS_LAMBDA_RUNTIME_VERBOSE"; | |
| var Tag = "RUNTIME"; | |
| var Verbosity = (() => { | |
| if (!process.env[EnvVarName]) { | |
| return 0; | |
| } | |
| try { | |
| const verbosity = parseInt(process.env[EnvVarName]); | |
| return verbosity < 0 ? 0 : verbosity > 3 ? 3 : verbosity; | |
| } catch { | |
| return 0; | |
| } | |
| })(); | |
| function logger(category) { | |
| return { | |
| verbose(...args) { | |
| if (Verbosity >= 1) { | |
| const resolvedArgs = args.map((arg) => typeof arg === "function" ? arg() : arg); | |
| console.log.apply(null, [Tag, category, ...resolvedArgs]); | |
| } | |
| }, | |
| vverbose(...args) { | |
| if (Verbosity >= 2) { | |
| const resolvedArgs = args.map((arg) => typeof arg === "function" ? arg() : arg); | |
| console.log.apply(null, [Tag, category, ...resolvedArgs]); | |
| } | |
| }, | |
| vvverbose(...args) { | |
| if (Verbosity >= 3) { | |
| const resolvedArgs = args.map((arg) => typeof arg === "function" ? arg() : arg); | |
| console.log.apply(null, [Tag, category, ...resolvedArgs]); | |
| } | |
| } | |
| }; | |
| } | |
| // node_modules/@aws/lambda-invoke-store/dist-es/invoke-store.js | |
| var PROTECTED_KEYS = { | |
| REQUEST_ID: Symbol.for("_AWS_LAMBDA_REQUEST_ID"), | |
| X_RAY_TRACE_ID: Symbol.for("_AWS_LAMBDA_X_RAY_TRACE_ID"), | |
| TENANT_ID: Symbol.for("_AWS_LAMBDA_TENANT_ID") | |
| }; | |
| var NO_GLOBAL_AWS_LAMBDA = ["true", "1"].includes(process.env?.AWS_LAMBDA_NODEJS_NO_GLOBAL_AWSLAMBDA ?? ""); | |
| if (!NO_GLOBAL_AWS_LAMBDA) { | |
| globalThis.awslambda = globalThis.awslambda || {}; | |
| } | |
| var InvokeStoreBase = class { | |
| static PROTECTED_KEYS = PROTECTED_KEYS; | |
| isProtectedKey(key) { | |
| return Object.values(PROTECTED_KEYS).includes(key); | |
| } | |
| getRequestId() { | |
| return this.get(PROTECTED_KEYS.REQUEST_ID) ?? "-"; | |
| } | |
| getXRayTraceId() { | |
| return this.get(PROTECTED_KEYS.X_RAY_TRACE_ID); | |
| } | |
| getTenantId() { | |
| return this.get(PROTECTED_KEYS.TENANT_ID); | |
| } | |
| }; | |
| var InvokeStoreSingle = class extends InvokeStoreBase { | |
| currentContext; | |
| getContext() { | |
| return this.currentContext; | |
| } | |
| hasContext() { | |
| return this.currentContext !== void 0; | |
| } | |
| get(key) { | |
| return this.currentContext?.[key]; | |
| } | |
| set(key, value) { | |
| if (this.isProtectedKey(key)) { | |
| throw new Error(`Cannot modify protected Lambda context field: ${String(key)}`); | |
| } | |
| this.currentContext = this.currentContext || {}; | |
| this.currentContext[key] = value; | |
| } | |
| run(context, fn) { | |
| this.currentContext = context; | |
| try { | |
| return fn(); | |
| } finally { | |
| this.currentContext = void 0; | |
| } | |
| } | |
| }; | |
| var InvokeStoreMulti = class _InvokeStoreMulti extends InvokeStoreBase { | |
| als; | |
| static async create() { | |
| const instance = new _InvokeStoreMulti(); | |
| const asyncHooks = await import("node:async_hooks"); | |
| instance.als = new asyncHooks.AsyncLocalStorage(); | |
| return instance; | |
| } | |
| getContext() { | |
| return this.als.getStore(); | |
| } | |
| hasContext() { | |
| return this.als.getStore() !== void 0; | |
| } | |
| get(key) { | |
| return this.als.getStore()?.[key]; | |
| } | |
| set(key, value) { | |
| if (this.isProtectedKey(key)) { | |
| throw new Error(`Cannot modify protected Lambda context field: ${String(key)}`); | |
| } | |
| const store = this.als.getStore(); | |
| if (!store) { | |
| throw new Error("No context available"); | |
| } | |
| store[key] = value; | |
| } | |
| run(context, fn) { | |
| return this.als.run(context, fn); | |
| } | |
| }; | |
| var InvokeStore; | |
| (function(InvokeStore2) { | |
| let instance = null; | |
| async function getInstanceAsync() { | |
| if (!instance) { | |
| instance = (async () => { | |
| const isMulti = "AWS_LAMBDA_MAX_CONCURRENCY" in process.env; | |
| const newInstance = isMulti ? await InvokeStoreMulti.create() : new InvokeStoreSingle(); | |
| if (!NO_GLOBAL_AWS_LAMBDA && globalThis.awslambda?.InvokeStore) { | |
| return globalThis.awslambda.InvokeStore; | |
| } else if (!NO_GLOBAL_AWS_LAMBDA && globalThis.awslambda) { | |
| globalThis.awslambda.InvokeStore = newInstance; | |
| return newInstance; | |
| } else { | |
| return newInstance; | |
| } | |
| })(); | |
| } | |
| return instance; | |
| } | |
| InvokeStore2.getInstanceAsync = getInstanceAsync; | |
| InvokeStore2._testing = process.env.AWS_LAMBDA_BENCHMARK_MODE === "1" ? { | |
| reset: () => { | |
| instance = null; | |
| if (globalThis.awslambda?.InvokeStore) { | |
| delete globalThis.awslambda.InvokeStore; | |
| } | |
| globalThis.awslambda = {}; | |
| } | |
| } : void 0; | |
| })(InvokeStore || (InvokeStore = {})); | |
| // dist/utils/errors.js | |
| var NativeClientLoadingError = class extends Error { | |
| constructor(message) { | |
| super(message); | |
| this.name = "Runtime.NativeClientLoading"; | |
| } | |
| }; | |
| var HandlerNotFoundError = class extends Error { | |
| constructor(message) { | |
| super(message); | |
| this.name = "Runtime.HandlerNotFound"; | |
| } | |
| }; | |
| var MalformedHandlerNameError = class extends Error { | |
| constructor(message) { | |
| super(message); | |
| this.name = "Runtime.MalformedHandlerName"; | |
| } | |
| }; | |
| var ImportModuleError = class extends Error { | |
| constructor(originalError) { | |
| super(String(originalError)); | |
| this.name = "Runtime.ImportModuleError"; | |
| } | |
| }; | |
| var UserCodeSyntaxError = class extends Error { | |
| constructor(originalError) { | |
| super(String(originalError)); | |
| this.name = "Runtime.UserCodeSyntaxError"; | |
| this.stack = originalError.stack; | |
| } | |
| }; | |
| var InvalidStreamingOperation = class extends Error { | |
| constructor(message) { | |
| super(message); | |
| this.name = "Runtime.InvalidStreamingOperation"; | |
| } | |
| }; | |
| var MalformedStreamingHandler = class extends Error { | |
| constructor(message) { | |
| super(message); | |
| this.name = "Runtime.MalformedStreamingHandler"; | |
| } | |
| }; | |
| var JSONStringifyError = class extends Error { | |
| constructor(message = "Unable to stringify response body") { | |
| super(message); | |
| this.name = "Runtime.JSONStringifyError"; | |
| } | |
| }; | |
| var PlatformError = class extends Error { | |
| constructor(message) { | |
| super(message); | |
| this.name = "Runtime.PlatformError"; | |
| } | |
| }; | |
| var CallbackHandlerDeprecatedError = class extends Error { | |
| constructor(message) { | |
| super(message); | |
| this.name = "Runtime.CallbackHandlerDeprecated"; | |
| } | |
| }; | |
| // dist/utils/network.js | |
| function parseHostPort(hostnamePort) { | |
| const parts = hostnamePort.split(":"); | |
| if (parts.length !== 2) { | |
| throw new PlatformError(`Invalid hostnamePort: ${hostnamePort}`); | |
| } | |
| const [hostname, portString] = parts; | |
| if (!hostname || hostname.trim().length === 0) { | |
| throw new PlatformError(`Invalid hostnamePort: ${hostnamePort}`); | |
| } | |
| if (!isValidPort(portString)) { | |
| throw new PlatformError(`Invalid hostnamePort: ${hostnamePort}`); | |
| } | |
| const port = Number(portString); | |
| return { hostname, port }; | |
| } | |
| function isValidPort(portString) { | |
| const port = Number(portString); | |
| return Number.isFinite(port) && Number.isInteger(port) && port >= 0 && port <= 65535 && port.toString() === portString; | |
| } | |
| // dist/context/constants.js | |
| var REQUIRED_INVOKE_HEADERS = { | |
| FUNCTION_ARN: "lambda-runtime-invoked-function-arn", | |
| REQUEST_ID: "lambda-runtime-aws-request-id", | |
| DEADLINE_MS: "lambda-runtime-deadline-ms" | |
| }; | |
| var OPTIONAL_INVOKE_HEADERS = { | |
| CLIENT_CONTEXT: "lambda-runtime-client-context", | |
| COGNITO_IDENTITY: "lambda-runtime-cognito-identity", | |
| X_RAY_TRACE_ID: "lambda-runtime-trace-id", | |
| TENANT_ID: "lambda-runtime-aws-tenant-id" | |
| }; | |
| var HEADERS = { | |
| ...REQUIRED_INVOKE_HEADERS, | |
| ...OPTIONAL_INVOKE_HEADERS | |
| }; | |
| var REQUIRED_ENV_VARS = [ | |
| "AWS_LAMBDA_FUNCTION_NAME", | |
| "AWS_LAMBDA_FUNCTION_VERSION", | |
| "AWS_LAMBDA_FUNCTION_MEMORY_SIZE", | |
| "AWS_LAMBDA_LOG_GROUP_NAME", | |
| "AWS_LAMBDA_LOG_STREAM_NAME" | |
| ]; | |
| var CALLBACK_ERROR_NODEJS22 = "ERROR: AWS Lambda does not support callback-based function handlers when using Node.js 22 with Managed Instances. To use Managed Instances, modify this function to use a supported handler signature. For more information see https://docs.aws.amazon.com/lambda/latest/dg/nodejs-handler.html."; | |
| var CALLBACK_ERROR_NODEJS24_ABOVE = "ERROR: AWS Lambda has removed support for callback-based function handlers starting with Node.js 24. You need to modify this function to use a supported handler signature to use Node.js 24 or later. For more information see https://docs.aws.amazon.com/lambda/latest/dg/nodejs-handler.html."; | |
| // dist/context/context-builder.js | |
| var ContextBuilder = class { | |
| static build(headers) { | |
| this.validateEnvironment(); | |
| const invokeHeaders = this.validateAndNormalizeHeaders(headers); | |
| const headerData = this.getHeaderData(invokeHeaders); | |
| const environmentData = this.getEnvironmentData(); | |
| moveXRayHeaderToEnv(invokeHeaders); | |
| return Object.assign(headerData, environmentData); | |
| } | |
| static getEnvironmentData() { | |
| return { | |
| functionName: process.env.AWS_LAMBDA_FUNCTION_NAME, | |
| functionVersion: process.env.AWS_LAMBDA_FUNCTION_VERSION, | |
| memoryLimitInMB: process.env.AWS_LAMBDA_FUNCTION_MEMORY_SIZE, | |
| logGroupName: process.env.AWS_LAMBDA_LOG_GROUP_NAME, | |
| logStreamName: process.env.AWS_LAMBDA_LOG_STREAM_NAME | |
| }; | |
| } | |
| static getHeaderData(invokeHeaders) { | |
| const deadline = this.parseDeadline(invokeHeaders); | |
| return { | |
| clientContext: this.parseJsonHeader(invokeHeaders[OPTIONAL_INVOKE_HEADERS.CLIENT_CONTEXT], OPTIONAL_INVOKE_HEADERS.CLIENT_CONTEXT), | |
| identity: this.parseJsonHeader(invokeHeaders[OPTIONAL_INVOKE_HEADERS.COGNITO_IDENTITY], OPTIONAL_INVOKE_HEADERS.COGNITO_IDENTITY), | |
| invokedFunctionArn: invokeHeaders[REQUIRED_INVOKE_HEADERS.FUNCTION_ARN], | |
| awsRequestId: invokeHeaders[REQUIRED_INVOKE_HEADERS.REQUEST_ID], | |
| tenantId: invokeHeaders[OPTIONAL_INVOKE_HEADERS.TENANT_ID], | |
| xRayTraceId: invokeHeaders[OPTIONAL_INVOKE_HEADERS.X_RAY_TRACE_ID], | |
| getRemainingTimeInMillis: function() { | |
| return deadline - Date.now(); | |
| } | |
| }; | |
| } | |
| static parseDeadline(invokeHeaders) { | |
| const deadline = parseInt(invokeHeaders[REQUIRED_INVOKE_HEADERS.DEADLINE_MS], 10); | |
| if (isNaN(deadline)) { | |
| throw new PlatformError("Invalid deadline timestamp"); | |
| } | |
| return deadline; | |
| } | |
| static validateEnvironment() { | |
| const missing = REQUIRED_ENV_VARS.filter((name) => !process.env[name]); | |
| if (missing.length > 0) { | |
| throw new PlatformError(`Missing required environment variables: ${missing.join(", ")}`); | |
| } | |
| } | |
| static validateAndNormalizeHeaders(headers) { | |
| const normalizedHeaders = this.normalizeHeaders(headers); | |
| const missingHeaders = this.checkForMissingHeaders(normalizedHeaders); | |
| if (missingHeaders.length > 0) { | |
| throw new PlatformError(`Missing required headers: ${missingHeaders.join(", ")}`); | |
| } | |
| return this.addKnownHeaders(normalizedHeaders); | |
| } | |
| static normalizeHeaders(headers) { | |
| return Object.fromEntries(Object.entries(headers).map(([k, v]) => [k.toLowerCase(), v])); | |
| } | |
| static checkForMissingHeaders(normalizedHeaders) { | |
| return Object.values(REQUIRED_INVOKE_HEADERS).filter((requiredKey) => !(requiredKey.toLowerCase() in normalizedHeaders)); | |
| } | |
| static addKnownHeaders(normalizedHeaders) { | |
| const result = {}; | |
| for (const headerKey of Object.values(REQUIRED_INVOKE_HEADERS)) { | |
| const lowerKey = headerKey.toLowerCase(); | |
| result[headerKey] = normalizedHeaders[lowerKey]; | |
| } | |
| for (const headerKey of Object.values(OPTIONAL_INVOKE_HEADERS)) { | |
| const lowerKey = headerKey.toLowerCase(); | |
| if (lowerKey in normalizedHeaders) { | |
| result[headerKey] = normalizedHeaders[lowerKey]; | |
| } | |
| } | |
| return result; | |
| } | |
| static parseJsonHeader(headerValue, headerName) { | |
| if (!headerValue) | |
| return void 0; | |
| try { | |
| return JSON.parse(headerValue); | |
| } catch (error) { | |
| throw new PlatformError(`Failed to parse ${headerName} as JSON: ${error.message}`); | |
| } | |
| } | |
| }; | |
| // dist/utils/env.js | |
| function shouldUseAlternativeClient() { | |
| return process.env["AWS_LAMBDA_NODEJS_USE_ALTERNATIVE_CLIENT_1"] === "true"; | |
| } | |
| function determineLogFormat() { | |
| return process.env["AWS_LAMBDA_LOG_FORMAT"]?.toUpperCase() === LOG_FORMAT.JSON ? LOG_FORMAT.JSON : LOG_FORMAT.TEXT; | |
| } | |
| function determineLogLevel() { | |
| const envLevel = process.env["AWS_LAMBDA_LOG_LEVEL"]?.toUpperCase(); | |
| return envLevel && envLevel in LOG_LEVEL ? LOG_LEVEL[envLevel] : LOG_LEVEL.TRACE; | |
| } | |
| function consumeTelemetryFd() { | |
| const raw = process.env["_LAMBDA_TELEMETRY_LOG_FD"]; | |
| delete process.env["_LAMBDA_TELEMETRY_LOG_FD"]; | |
| const fd = Number(raw); | |
| return Number.isInteger(fd) && fd >= 0 ? fd : void 0; | |
| } | |
| function isMultiConcurrentMode() { | |
| return process.env["AWS_LAMBDA_MAX_CONCURRENCY"] !== void 0; | |
| } | |
| function moveXRayHeaderToEnv(headers) { | |
| if (!isMultiConcurrentMode()) { | |
| if (headers[HEADERS.X_RAY_TRACE_ID]) { | |
| process.env["_X_AMZN_TRACE_ID"] = headers[HEADERS.X_RAY_TRACE_ID]; | |
| } else { | |
| delete process.env["_X_AMZN_TRACE_ID"]; | |
| } | |
| } | |
| } | |
| // dist/utils/serialize.js | |
| function serializeToJSON(value) { | |
| try { | |
| return JSON.stringify(value === void 0 ? null : value); | |
| } catch { | |
| throw new JSONStringifyError("Unable to stringify response body"); | |
| } | |
| } | |
| // dist/utils/xray.js | |
| function formatXRayError(error) { | |
| try { | |
| const formatted = { | |
| working_directory: process.cwd(), | |
| exceptions: [ | |
| { | |
| type: error.name?.replaceAll("\x7F", "%7F"), | |
| message: error.message?.replaceAll("\x7F", "%7F"), | |
| stack: parseStackTrace(error.stack) | |
| } | |
| ], | |
| paths: [] | |
| }; | |
| formatted.paths = formatted.exceptions[0].stack.map((entry) => entry.path).filter((value, index, self) => self.indexOf(value) === index); | |
| return JSON.stringify(formatted); | |
| } catch { | |
| return ""; | |
| } | |
| } | |
| function parseStackTrace(stack) { | |
| if (!stack) | |
| return []; | |
| const lines = stack.replaceAll("\x7F", "%7F").split("\n"); | |
| lines.shift(); | |
| return lines.map((line) => { | |
| const trimmed = line.trim().replace(/[()]/g, "").replace(/^[^\s]*\s/, ""); | |
| const lastSpaceIndex = trimmed.lastIndexOf(" "); | |
| const label = lastSpaceIndex >= 0 ? trimmed.slice(0, lastSpaceIndex) : "anonymous"; | |
| const pathParts = (lastSpaceIndex >= 0 ? trimmed.slice(lastSpaceIndex + 1) : trimmed).split(":"); | |
| return { | |
| path: pathParts[0], | |
| line: parseInt(pathParts[1]), | |
| label | |
| }; | |
| }); | |
| } | |
| // dist/utils/error.js | |
| function formatError(error) { | |
| try { | |
| if (error instanceof Error) { | |
| return { | |
| // Replace ASCII DEL character (\x7F) with %7F | |
| errorType: error.name?.replaceAll("\x7F", "%7F"), | |
| errorMessage: error.message?.replaceAll("\x7F", "%7F"), | |
| trace: error.stack?.replaceAll("\x7F", "%7F").split("\n") || [] | |
| }; | |
| } | |
| return { | |
| errorType: typeof error, | |
| errorMessage: String(error), | |
| trace: [] | |
| }; | |
| } catch { | |
| return { | |
| errorType: "handled", | |
| errorMessage: "callback called with Error argument, but there was a problem while retrieving one or more of its message, name, and stack", | |
| trace: [] | |
| }; | |
| } | |
| } | |
| function intoError(err) { | |
| if (err instanceof Error) { | |
| return err; | |
| } | |
| return new Error(String(err)); | |
| } | |
| function toFormatted(error) { | |
| try { | |
| return FORMAT.FIELD_DELIMITER + JSON.stringify(error, (_k, v) => withEnumerableProperties(v)); | |
| } catch { | |
| return FORMAT.FIELD_DELIMITER + JSON.stringify(formatError(error)); | |
| } | |
| } | |
| function withEnumerableProperties(error) { | |
| if (error instanceof Error) { | |
| const ret = Object.assign({ | |
| errorType: error.name, | |
| errorMessage: error.message, | |
| code: error.code | |
| }, error); | |
| if (typeof error.stack === "string") { | |
| ret.stack = error.stack.split(FORMAT.LINE_DELIMITER); | |
| } | |
| return ret; | |
| } | |
| return error; | |
| } | |
| // dist/utils/cjs-require.js | |
| import { createRequire } from "module"; | |
| var cjsRequire = createRequire(import.meta.url); | |
| // dist/utils/handler-path.js | |
| var path = cjsRequire("node:path"); | |
| var FUNCTION_EXPR = /^([^.]*)\.(.*)$/; | |
| function parseHandlerString(fullHandlerString) { | |
| const handlerString = path.basename(fullHandlerString); | |
| const moduleRoot = fullHandlerString.substring(0, fullHandlerString.indexOf(handlerString)); | |
| const match = handlerString.match(FUNCTION_EXPR); | |
| if (!match || match.length !== 3) { | |
| throw new MalformedHandlerNameError("Bad handler"); | |
| } | |
| return { | |
| moduleRoot: moduleRoot.replace(/\/$/, ""), | |
| // Remove trailing slash | |
| moduleName: match[1], | |
| handlerName: match[2] | |
| }; | |
| } | |
| // dist/function/user-function-loader.js | |
| var UserFunctionLoader = class { | |
| static HANDLER_STREAMING = Symbol.for("aws.lambda.runtime.handler.streaming"); | |
| static HANDLER_HIGHWATERMARK = Symbol.for("aws.lambda.runtime.handler.streaming.highWaterMark"); | |
| static STREAM_RESPONSE = "response"; | |
| static RELATIVE_PATH_SUBSTRING = ".."; | |
| static async load(appRoot, handlerString) { | |
| this.validateHandlerString(handlerString); | |
| const { moduleRoot, moduleName, handlerName } = parseHandlerString(handlerString); | |
| const module = await loadModule({ | |
| appRoot, | |
| moduleRoot, | |
| moduleName | |
| }); | |
| const handler = resolveHandler(module, handlerName, handlerString); | |
| return { | |
| handler, | |
| metadata: this.getHandlerMetadata(handler) | |
| }; | |
| } | |
| static getHandlerMetadata(handler) { | |
| return { | |
| streaming: this.isHandlerStreaming(handler), | |
| highWaterMark: this.getHighWaterMark(handler), | |
| argsNum: handler.length | |
| }; | |
| } | |
| static isHandlerStreaming(handler) { | |
| const streamingValue = handler[this.HANDLER_STREAMING]; | |
| if (!streamingValue) { | |
| return false; | |
| } | |
| if (streamingValue === this.STREAM_RESPONSE) { | |
| return true; | |
| } | |
| throw new MalformedStreamingHandler("Only response streaming is supported."); | |
| } | |
| static getHighWaterMark(handler) { | |
| const waterMarkValue = handler[this.HANDLER_HIGHWATERMARK]; | |
| if (!waterMarkValue) { | |
| return void 0; | |
| } | |
| const hwm = Number(waterMarkValue); | |
| return Number.isNaN(hwm) ? void 0 : hwm; | |
| } | |
| static validateHandlerString(handlerString) { | |
| if (handlerString.includes(this.RELATIVE_PATH_SUBSTRING)) { | |
| throw new MalformedHandlerNameError(`'${handlerString}' is not a valid handler name. Use absolute paths when specifying root directories in handler names.`); | |
| } | |
| } | |
| }; | |
| // dist/function/dynamic-imports.js | |
| var { existsSync } = cjsRequire("node:fs"); | |
| async function tryAwaitImport(file, extension) { | |
| const path3 = `${file}${extension || ""}`; | |
| return existsSync(path3) ? await import(path3) : void 0; | |
| } | |
| // dist/function/module-loader.js | |
| var path2 = cjsRequire("node:path"); | |
| async function loadModule(options) { | |
| const fullPathWithoutExtension = path2.resolve(options.appRoot, options.moduleRoot, options.moduleName); | |
| const extensionLookupOrder = ["", ".js", ".mjs", ".cjs"]; | |
| try { | |
| for (const extension of extensionLookupOrder) { | |
| const module = await tryAwaitImport(fullPathWithoutExtension, extension); | |
| if (module) | |
| return module; | |
| } | |
| const resolvedPath = cjsRequire.resolve(options.moduleName, { | |
| paths: [options.appRoot, path2.join(options.appRoot, options.moduleRoot)] | |
| }); | |
| return cjsRequire(resolvedPath); | |
| } catch (err) { | |
| if (err instanceof SyntaxError) { | |
| throw new UserCodeSyntaxError(err); | |
| } else if (err instanceof Error && err.code === "MODULE_NOT_FOUND") { | |
| throw new ImportModuleError(err); | |
| } else { | |
| throw err; | |
| } | |
| } | |
| } | |
| // dist/function/handler-resolver.js | |
| function resolveHandler(module, handlerName, fullHandlerString) { | |
| let handler = findIn(handlerName, module); | |
| if (!handler && typeof module === "object" && module !== null && "default" in module) { | |
| handler = findIn(handlerName, module.default); | |
| } | |
| if (!handler) { | |
| throw new HandlerNotFoundError(`${fullHandlerString} is undefined or not exported`); | |
| } | |
| if (!isUserHandler(handler)) { | |
| throw new HandlerNotFoundError(`${fullHandlerString} is not a function`); | |
| } | |
| return handler; | |
| } | |
| function findIn(handlerName, module) { | |
| return handlerName.split(".").reduce((nested, key) => { | |
| return nested && typeof nested === "object" ? nested[key] : void 0; | |
| }, module); | |
| } | |
| function isUserHandler(fn) { | |
| return typeof fn === "function"; | |
| } | |
| // dist/function/callback-deprecation.js | |
| var shouldErrorOnCallbackFunction = (metadata) => { | |
| return (metadata?.argsNum ?? 0) >= 3 && !metadata?.streaming; | |
| }; | |
| var isNodejs22Runtime = () => { | |
| return process.env.AWS_EXECUTION_ENV === "AWS_Lambda_nodejs22.x"; | |
| }; | |
| function errorOnDeprecatedCallback(metadata) { | |
| if (shouldErrorOnCallbackFunction(metadata)) { | |
| const errorMessage = isNodejs22Runtime() ? CALLBACK_ERROR_NODEJS22 : CALLBACK_ERROR_NODEJS24_ABOVE; | |
| throw new CallbackHandlerDeprecatedError(errorMessage); | |
| } | |
| } | |
| // dist/stream/constants.js | |
| var HEADER_RESPONSE_MODE = "Lambda-Runtime-Function-Response-Mode"; | |
| var VALUE_STREAMING = "streaming"; | |
| var TRAILER_NAME_ERROR_TYPE = "Lambda-Runtime-Function-Error-Type"; | |
| var TRAILER_NAME_ERROR_BODY = "Lambda-Runtime-Function-Error-Body"; | |
| var HEADER_CONTENT_TYPE = "Content-Type"; | |
| var DEFAULT_CONTENT_TYPE = "application/octet-stream"; | |
| var HEADER_TRANSFER_ENCODING = "Transfer-Encoding"; | |
| var CHUNKED_TRANSFER_ENCODING = "chunked"; | |
| var STATUS_READY = "ready"; | |
| var STATUS_WRITE_CALLED = "write_called"; | |
| // dist/stream/response-stream.js | |
| var { createConnection } = cjsRequire("node:net"); | |
| var log = logger("STREAM"); | |
| var failProps = /* @__PURE__ */ new WeakMap(); | |
| function addFailWeakProp(req, fn) { | |
| failProps.set(req, fn); | |
| } | |
| async function tryCallFail(req, err) { | |
| const fn = failProps.get(req); | |
| if (typeof fn === "function") { | |
| await fn(err); | |
| return true; | |
| } | |
| return false; | |
| } | |
| var WRITABLE_METHODS = [ | |
| "cork", | |
| "destroy", | |
| "end", | |
| "uncork", | |
| "write", | |
| "addListener", | |
| "on", | |
| "once", | |
| "prependListener", | |
| "prependOnceListener", | |
| "off", | |
| "removeListener", | |
| "removeAllListeners", | |
| "setMaxListeners", | |
| "getMaxListeners", | |
| "listeners", | |
| "rawListeners", | |
| "listenerCount", | |
| "eventNames", | |
| "emit", | |
| "setContentType" | |
| ]; | |
| var PROPS_READ_WRITE = ["destroyed", "_onBeforeFirstWrite"]; | |
| var PROPS_READ_ONLY = [ | |
| "writableFinished", | |
| "writableObjectMode", | |
| "writableEnded", | |
| "writableNeedDrain", | |
| "writableHighWaterMark", | |
| "writableCorked", | |
| "writableLength", | |
| "writable" | |
| ]; | |
| function toWritableResponseStream(inner) { | |
| const stream = {}; | |
| for (const method of WRITABLE_METHODS) { | |
| stream[method] = inner[method].bind(inner); | |
| } | |
| for (const p of PROPS_READ_WRITE) { | |
| Object.defineProperty(stream, p, { | |
| get: () => inner[p], | |
| set: (v) => { | |
| inner[p] = v; | |
| } | |
| }); | |
| } | |
| for (const p of PROPS_READ_ONLY) { | |
| Object.defineProperty(stream, p, { | |
| get: () => inner[p] | |
| }); | |
| } | |
| return stream; | |
| } | |
| function createResponseStream(options) { | |
| let status = STATUS_READY; | |
| const headers = makeResponseStreamHeaders(options); | |
| const headersDone = createDeferred(); | |
| const responseDone = createDeferred(); | |
| const agent = makePatchedAgent(options); | |
| const req = options.httpOptions.http.request({ | |
| http: options.httpOptions.http, | |
| method: options.httpOptions.method, | |
| hostname: options.httpOptions.hostname, | |
| port: options.httpOptions.port, | |
| path: options.httpOptions.path, | |
| headers, | |
| agent | |
| }, (res) => { | |
| headersDone.resolve({ | |
| statusCode: res.statusCode, | |
| statusMessage: res.statusMessage, | |
| headers: res.headers | |
| }); | |
| hookResponseListners(res, responseDone, req); | |
| }); | |
| req.on("error", (err) => { | |
| headersDone.reject(err); | |
| responseDone.reject(err); | |
| req.destroy(err); | |
| }); | |
| req.setContentType = (contentType) => { | |
| if (status !== STATUS_READY) { | |
| throw new InvalidStreamingOperation("Cannot set content-type, too late."); | |
| } | |
| req.setHeader("Content-Type", contentType); | |
| }; | |
| const origWrite = req.write.bind(req); | |
| req.write = (chunk, encoding, callback) => { | |
| log.vvverbose( | |
| "ResponseStream::write", | |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any | |
| chunk.length, | |
| "callback:", | |
| typeof callback | |
| ); | |
| let data = chunk; | |
| if (typeof chunk !== "string" && !Buffer.isBuffer(chunk) && !(chunk instanceof Uint8Array)) { | |
| data = JSON.stringify(chunk); | |
| } | |
| if (status === STATUS_READY && typeof req._onBeforeFirstWrite === "function") { | |
| req._onBeforeFirstWrite((ch) => origWrite(ch)); | |
| } | |
| const ret = origWrite(data, encoding, callback); | |
| log.vvverbose("ResponseStream::origWrite", ret); | |
| if (status === STATUS_READY) { | |
| status = STATUS_WRITE_CALLED; | |
| } | |
| return ret; | |
| }; | |
| const request2 = toWritableResponseStream(req); | |
| hookWeakFailProps(request2, req); | |
| return { | |
| request: request2, | |
| headersDone: headersDone.promise, | |
| responseDone: responseDone.promise | |
| }; | |
| } | |
| function hookWeakFailProps(request2, req) { | |
| addFailWeakProp(request2, async (err) => { | |
| log.verbose("ResponseStream::fail err:", err); | |
| const error = formatError(err); | |
| req.addTrailers({ | |
| [TRAILER_NAME_ERROR_TYPE]: error.errorType, | |
| [TRAILER_NAME_ERROR_BODY]: Buffer.from(JSON.stringify(error)).toString("base64") | |
| }); | |
| await new Promise((resolve) => { | |
| req.end(resolve); | |
| }); | |
| }); | |
| } | |
| function hookResponseListners(res, responseDone, req) { | |
| let buf; | |
| res.on("data", (chunk) => { | |
| buf = buf === void 0 ? chunk : Buffer.concat([buf, chunk]); | |
| }); | |
| res.on("aborted", (err) => { | |
| responseDone.reject(err); | |
| req.destroy(err); | |
| }); | |
| res.on("end", () => { | |
| log.vvverbose("rapid response", buf ? buf.toString() : "buf undefined"); | |
| responseDone.resolve(buf); | |
| }); | |
| return buf; | |
| } | |
| function makePatchedAgent(options) { | |
| const agent = options.httpOptions.agent; | |
| agent.createConnection = (opts, connectionListener) => { | |
| return createConnection({ | |
| ...opts, | |
| highWaterMark: options.httpOptions.highWaterMark | |
| }, connectionListener); | |
| }; | |
| return agent; | |
| } | |
| function makeResponseStreamHeaders(options) { | |
| return { | |
| [HEADER_RESPONSE_MODE]: VALUE_STREAMING, | |
| Trailer: [TRAILER_NAME_ERROR_TYPE, TRAILER_NAME_ERROR_BODY], | |
| [HEADER_CONTENT_TYPE]: options.contentType ?? DEFAULT_CONTENT_TYPE, | |
| [HEADER_TRANSFER_ENCODING]: CHUNKED_TRANSFER_ENCODING | |
| }; | |
| } | |
| function createDeferred() { | |
| let resolve; | |
| let reject; | |
| const promise = new Promise((res, rej) => { | |
| resolve = res; | |
| reject = rej; | |
| }); | |
| return { promise, resolve, reject }; | |
| } | |
| // dist/stream/http-response-stream.js | |
| var METADATA_PRELUDE_CONTENT_TYPE = "application/vnd.awslambda.http-integration-response"; | |
| var DELIMITER_LEN = 8; | |
| var HttpResponseStream = class { | |
| /** | |
| * Wraps the given writable response stream so that on the very first write, | |
| * it: | |
| * 1. Sets the Content-Type header to the special integration MIME. | |
| * 2. Writes a JSON‐stringified prelude. | |
| * 3. Writes exactly 8 null bytes as a delimiter. | |
| * | |
| * @param underlyingStream - your streaming response sink | |
| * @param prelude - any JSON‐serializable metadata object | |
| * @returns the same stream, now wired to prepend the metadata | |
| */ | |
| static from(underlyingStream, prelude) { | |
| underlyingStream.setContentType(METADATA_PRELUDE_CONTENT_TYPE); | |
| const metadataPrelude = JSON.stringify(prelude); | |
| underlyingStream._onBeforeFirstWrite = (write) => { | |
| write(metadataPrelude); | |
| write(new Uint8Array(DELIMITER_LEN)); | |
| }; | |
| return underlyingStream; | |
| } | |
| }; | |
| // dist/utils/globals.js | |
| function setupGlobals() { | |
| const NoGlobalAwsLambda = process.env["AWS_LAMBDA_NODEJS_NO_GLOBAL_AWSLAMBDA"] === "1" || process.env["AWS_LAMBDA_NODEJS_NO_GLOBAL_AWSLAMBDA"] === "true"; | |
| if (!NoGlobalAwsLambda) { | |
| globalThis.awslambda = { | |
| ...globalThis.awslambda, | |
| streamifyResponse: (handler, options) => { | |
| const typedHandler = handler; | |
| typedHandler[UserFunctionLoader.HANDLER_STREAMING] = UserFunctionLoader.STREAM_RESPONSE; | |
| if (typeof options?.highWaterMark === "number") { | |
| typedHandler[UserFunctionLoader.HANDLER_HIGHWATERMARK] = parseInt(String(options.highWaterMark)); | |
| } | |
| return handler; | |
| }, | |
| HttpResponseStream | |
| }; | |
| } | |
| } | |
| // dist/client/rapid-client.js | |
| var { Agent, request } = cjsRequire("node:http"); | |
| var RAPIDClient = class _RAPIDClient { | |
| static ERROR_TYPE_HEADER = "Lambda-Runtime-Function-Error-Type"; | |
| hostname; | |
| port; | |
| agent; | |
| httpClient; | |
| nativeClient; | |
| useAlternativeClient; | |
| isMultiConcurrent; | |
| retryOptions; | |
| static async create(hostnamePort, deps = {}, isMultiConcurrent = false, retryOptions = DEFAULT_RETRY_OPTIONS) { | |
| const httpModule = deps.httpModule ?? { Agent, request }; | |
| const nativeClient = deps.nativeClient ?? await this.loadNativeClient(); | |
| return new _RAPIDClient(hostnamePort, httpModule, nativeClient, isMultiConcurrent, retryOptions); | |
| } | |
| constructor(hostnamePort, httpClient, nativeClient, isMultiConcurrent = false, retryOptions = DEFAULT_RETRY_OPTIONS) { | |
| const { hostname, port } = parseHostPort(hostnamePort); | |
| this.hostname = hostname; | |
| this.port = port; | |
| this.httpClient = httpClient; | |
| this.nativeClient = nativeClient; | |
| this.agent = new this.httpClient.Agent({ | |
| keepAlive: true, | |
| maxSockets: 1 | |
| }); | |
| this.useAlternativeClient = shouldUseAlternativeClient(); | |
| this.isMultiConcurrent = isMultiConcurrent; | |
| this.retryOptions = retryOptions; | |
| } | |
| async nextInvocation() { | |
| try { | |
| return await this.nextInvocationOnce(); | |
| } catch (error) { | |
| if (!this.isMultiConcurrent) { | |
| throw error; | |
| } | |
| return this.retryAfterInitialFailure(error); | |
| } | |
| } | |
| async nextInvocationOnce() { | |
| if (this.useAlternativeClient) { | |
| return this.nextInvocationHttp(); | |
| } | |
| return this.nativeClient.next(); | |
| } | |
| postInvocationResponse(response, id) { | |
| const bodyString = serializeToJSON(response); | |
| try { | |
| this.nativeClient.done(encodeURIComponent(id), bodyString); | |
| } catch (error) { | |
| if (!this.isMultiConcurrent) { | |
| throw error; | |
| } | |
| structuredConsole.logError(`Failed to post invocation response for ${id}`, error); | |
| } | |
| } | |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any | |
| postInvocationError(error, id) { | |
| try { | |
| const errorResponse = formatError(error); | |
| const bodyString = serializeToJSON(errorResponse); | |
| const xrayString = formatXRayError(error); | |
| this.nativeClient.error(encodeURIComponent(id), bodyString, xrayString); | |
| } catch (error2) { | |
| if (!this.isMultiConcurrent) { | |
| throw error2; | |
| } | |
| structuredConsole.logError(`Failed to post invocation error for ${id}`, error2); | |
| } | |
| } | |
| async postInitError(error) { | |
| const response = formatError(error); | |
| try { | |
| await this.post("/2018-06-01/runtime/init/error", response, { | |
| [_RAPIDClient.ERROR_TYPE_HEADER]: response.errorType | |
| }); | |
| } catch (error2) { | |
| structuredConsole.logError(`Failed to post init error`, error2); | |
| throw error2; | |
| } | |
| } | |
| getStreamForInvocationResponse(id, options) { | |
| const { request: request2, responseDone } = createResponseStream({ | |
| httpOptions: { | |
| agent: this.agent, | |
| http: this.httpClient, | |
| hostname: this.hostname, | |
| method: "POST", | |
| port: this.port, | |
| path: "/2018-06-01/runtime/invocation/" + encodeURIComponent(id) + "/response", | |
| highWaterMark: options?.highWaterMark | |
| } | |
| }); | |
| return { | |
| request: request2, | |
| responseDone | |
| }; | |
| } | |
| static async loadNativeClient() { | |
| try { | |
| return cjsRequire("./rapid-client.node"); | |
| } catch (error) { | |
| throw new NativeClientLoadingError(`Failed to load native client: ${error}`); | |
| } | |
| } | |
| async nextInvocationHttp() { | |
| return new Promise((resolve, reject) => { | |
| const options = { | |
| hostname: this.hostname, | |
| port: this.port, | |
| path: "/2018-06-01/runtime/invocation/next", | |
| method: "GET", | |
| agent: this.agent | |
| }; | |
| const request2 = this.httpClient.request(options, (response) => { | |
| let data = ""; | |
| response.setEncoding("utf-8").on("data", (chunk) => { | |
| data += chunk; | |
| }).on("end", () => { | |
| resolve({ | |
| bodyJson: data, | |
| headers: response.headers | |
| }); | |
| }); | |
| }); | |
| request2.on("error", reject).end(); | |
| }); | |
| } | |
| async post(path3, body, headers = {}) { | |
| const bodyString = serializeToJSON(body); | |
| const options = { | |
| hostname: this.hostname, | |
| port: this.port, | |
| path: path3, | |
| method: "POST", | |
| headers: { | |
| "Content-Type": "application/json", | |
| "Content-Length": Buffer.from(bodyString).length, | |
| ...headers | |
| }, | |
| agent: this.agent | |
| }; | |
| await new Promise((resolve, reject) => { | |
| const request2 = this.httpClient.request(options, (response) => { | |
| response.on("end", resolve).on("error", reject).on("data", () => { | |
| }); | |
| }); | |
| request2.on("error", reject); | |
| request2.end(bodyString, "utf-8"); | |
| }); | |
| } | |
| async retryAfterInitialFailure(initialError) { | |
| let attempts = 1; | |
| let lastError = initialError; | |
| while (attempts <= this.retryOptions.maxRetries) { | |
| const backoffMs = calculateBackoffDelay(attempts - 1, this.retryOptions); | |
| structuredConsole.logError(`Failed to get next invocation (attempt ${attempts}/${this.retryOptions.maxRetries + 1}). Retrying in ${backoffMs}ms...`, lastError); | |
| await new Promise((resolve) => setTimeout(resolve, backoffMs)); | |
| try { | |
| return await this.nextInvocationOnce(); | |
| } catch (error) { | |
| lastError = error; | |
| attempts++; | |
| if (attempts > this.retryOptions.maxRetries) { | |
| structuredConsole.logError(`Failed to get next invocation after ${attempts} attempts. Giving up.`, lastError); | |
| throw lastError; | |
| } | |
| } | |
| } | |
| throw lastError; | |
| } | |
| }; | |
| // dist/runtime/lifecycle-manager.js | |
| var log2 = logger("STREAM"); | |
| var LifecycleManager = class _LifecycleManager { | |
| client; | |
| static create(rapidClient) { | |
| return new _LifecycleManager(rapidClient); | |
| } | |
| constructor(client) { | |
| this.client = client; | |
| } | |
| async fail(requestId, error) { | |
| await new Promise((resolve) => setImmediate(resolve)); | |
| structuredConsole.logError("Invoke Error", error); | |
| this.client.postInvocationError(error, requestId); | |
| } | |
| async failResponseStream(responseStream, error) { | |
| await new Promise((resolve) => setImmediate(resolve)); | |
| log2.verbose("Runtime::handleOnceStreaming::finally stream destroyed"); | |
| structuredConsole.logError("Invoke Error", error); | |
| tryCallFail(responseStream, error); | |
| } | |
| async succeed(requestId, result) { | |
| await new Promise((resolve) => setImmediate(resolve)); | |
| this.client.postInvocationResponse(result, requestId); | |
| } | |
| async next() { | |
| const invocationRequest = await this.client.nextInvocation(); | |
| const context = ContextBuilder.build(invocationRequest.headers); | |
| const event = JSON.parse(invocationRequest.bodyJson); | |
| return { | |
| context, | |
| event | |
| }; | |
| } | |
| setupResponseStream(requestId, options) { | |
| const { request: request2, responseDone } = this.client.getStreamForInvocationResponse(requestId, options); | |
| log2.vverbose("StreamingContextBuilder::createStream", "stream created"); | |
| return { | |
| rapidResponse: responseDone, | |
| responseStream: request2 | |
| }; | |
| } | |
| }; | |
| // dist/runtime/buffered-invoke-processor.js | |
| var BufferedInvokeProcessor = class { | |
| handler; | |
| lifecycle; | |
| constructor(handler, lifecycle) { | |
| this.handler = handler; | |
| this.lifecycle = lifecycle; | |
| } | |
| async processInvoke(context, event) { | |
| try { | |
| const result = await this.handler(event, context); | |
| await this.lifecycle.succeed(context.awsRequestId, result); | |
| } catch (err) { | |
| await this.lifecycle.fail(context.awsRequestId, err); | |
| } | |
| } | |
| }; | |
| // dist/runtime/streaming-invoke-processor.js | |
| var log3 = logger("RUNTIME"); | |
| var StreamingInvokeProcessor = class { | |
| handler; | |
| lifecycle; | |
| handlerMetadata; | |
| constructor(handler, lifecycle, handlerMetadata) { | |
| this.handler = handler; | |
| this.lifecycle = lifecycle; | |
| this.handlerMetadata = handlerMetadata; | |
| } | |
| async processInvoke(context, event) { | |
| const { rapidResponse, responseStream } = this.lifecycle.setupResponseStream(context.awsRequestId, { | |
| highWaterMark: this.handlerMetadata.highWaterMark | |
| }); | |
| try { | |
| log3.verbose("StreamingInvokeProcessor::processInvoke", "invoking handler"); | |
| const handlerResult = this.handler(event, responseStream, context); | |
| await this.waitForStreamClosure(handlerResult, rapidResponse, responseStream); | |
| } catch (err) { | |
| await this.lifecycle.failResponseStream(responseStream, err); | |
| } | |
| } | |
| async waitForStreamClosure(handlerResult, rapidResponse, responseStream) { | |
| log3.verbose("StreamingInvokeProcessor::waitForStreamClosure", "handler returned"); | |
| if (!this.isPromise(handlerResult)) { | |
| log3.verbose("StreamingInvokeProcessor::waitForStreamClosure", "Runtime got non-promise response"); | |
| throw new Error("Streaming does not support non-async handlers."); | |
| } | |
| const result = await handlerResult; | |
| if (typeof result !== "undefined") { | |
| console.warn("Streaming handlers ignore return values."); | |
| } | |
| log3.verbose("StreamingInvokeProcessor::waitForStreamClosure", "result is awaited."); | |
| const rapidRes = await rapidResponse; | |
| log3.vverbose("StreamingInvokeProcessor::waitForStreamClosure", "RAPID response", rapidRes); | |
| if (!responseStream.writableFinished) { | |
| throw new Error("Response stream is not finished."); | |
| } | |
| } | |
| isPromise(value) { | |
| return typeof value === "object" && value !== null && typeof value.then === "function"; | |
| } | |
| }; | |
| // dist/runtime/runtime.js | |
| var Runtime = class _Runtime { | |
| handler; | |
| handlerMetadata; | |
| isMultiConcurrent; | |
| lifecycle; | |
| static create({ rapidClient, handler, handlerMetadata = {}, isMultiConcurrent = false }) { | |
| return new _Runtime(handler, handlerMetadata, isMultiConcurrent, LifecycleManager.create(rapidClient)); | |
| } | |
| constructor(handler, handlerMetadata, isMultiConcurrent, lifecycle) { | |
| this.handler = handler; | |
| this.handlerMetadata = handlerMetadata; | |
| this.isMultiConcurrent = isMultiConcurrent; | |
| this.lifecycle = lifecycle; | |
| } | |
| async start() { | |
| const processor = this.createProcessor(); | |
| if (this.isMultiConcurrent) { | |
| await this.processMultiConcurrent(processor); | |
| } else { | |
| await this.processSingleConcurrent(processor); | |
| } | |
| } | |
| createProcessor() { | |
| if (this.handlerMetadata.streaming) { | |
| return new StreamingInvokeProcessor(this.handler, this.lifecycle, this.handlerMetadata); | |
| } else { | |
| return new BufferedInvokeProcessor(this.handler, this.lifecycle); | |
| } | |
| } | |
| async processSingleConcurrent(processor) { | |
| while (true) { | |
| const { context, event } = await this.lifecycle.next(); | |
| await this.runWithInvokeContext(context.awsRequestId, context.xRayTraceId, () => processor.processInvoke(context, event)); | |
| } | |
| } | |
| async processMultiConcurrent(processor) { | |
| while (true) { | |
| const { context, event } = await this.lifecycle.next(); | |
| setImmediate(async () => { | |
| await this.runWithInvokeContext(context.awsRequestId, context.xRayTraceId, () => processor.processInvoke(context, event)); | |
| }); | |
| } | |
| } | |
| async runWithInvokeContext(requestId, xRayTraceId, fn) { | |
| const invokeStore = await InvokeStore.getInstanceAsync(); | |
| return invokeStore.run({ | |
| [InvokeStoreBase.PROTECTED_KEYS.REQUEST_ID]: requestId, | |
| [InvokeStoreBase.PROTECTED_KEYS.X_RAY_TRACE_ID]: xRayTraceId | |
| }, fn); | |
| } | |
| }; | |
| // dist/utils/runtime-setup.js | |
| async function createRuntime(rapidClientOptions = {}) { | |
| setupGlobals(); | |
| await LogPatch.patchConsole(); | |
| const isMultiConcurrent = isMultiConcurrentMode(); | |
| const runtimeApi = process.env.AWS_LAMBDA_RUNTIME_API; | |
| const handlerString = process.env._HANDLER; | |
| const taskRoot = process.env.LAMBDA_TASK_ROOT; | |
| if (!runtimeApi) { | |
| throw new PlatformError("AWS_LAMBDA_RUNTIME_API environment variable is not set"); | |
| } | |
| if (!handlerString) { | |
| throw new PlatformError("_HANDLER environment variable is not set"); | |
| } | |
| if (!taskRoot) { | |
| throw new PlatformError("LAMBDA_TASK_ROOT environment variable is not set"); | |
| } | |
| const rapidClient = await RAPIDClient.create(runtimeApi, rapidClientOptions, isMultiConcurrent); | |
| try { | |
| const { handler, metadata: handlerMetadata } = await UserFunctionLoader.load(taskRoot, handlerString); | |
| errorOnDeprecatedCallback(handlerMetadata); | |
| return Runtime.create({ | |
| rapidClient, | |
| handler, | |
| handlerMetadata, | |
| isMultiConcurrent | |
| }); | |
| } catch (error) { | |
| structuredConsole.logError("Init Error", error); | |
| await rapidClient.postInitError(error); | |
| throw error; | |
| } | |
| } | |
| // dist/utils/retry.js | |
| var DEFAULT_RETRY_OPTIONS = { | |
| initialDelayMs: 100, | |
| maxDelayMs: 9e4, | |
| // 90 second max delay | |
| maxRetries: 19 | |
| // Total 20 attempts, pushes the last retry to just over 15 minute mark | |
| }; | |
| function calculateBackoffDelay(attempt, options = DEFAULT_RETRY_OPTIONS) { | |
| const delay = Math.min(options.initialDelayMs * Math.pow(2, attempt), options.maxDelayMs); | |
| return delay; | |
| } | |
| // dist/utils/socket.js | |
| var { createConnection: createConnection2 } = cjsRequire("node:net"); | |
| async function acquireSocketFd() { | |
| if (!process.env._LAMBDA_TELEMETRY_LOG_FD_PROVIDER_SOCKET) { | |
| return 1; | |
| } | |
| const socketPath = process.env._LAMBDA_TELEMETRY_LOG_FD_PROVIDER_SOCKET; | |
| return new Promise((resolve, reject) => { | |
| try { | |
| const socket = createConnection2(socketPath); | |
| socket.once("error", (err) => { | |
| reject(new PlatformError(`Failed to connect to telemetry socket: ${err.message}`)); | |
| }); | |
| socket.once("connect", () => { | |
| const handle = socket._handle; | |
| if (handle && typeof handle.fd === "number") { | |
| resolve(handle.fd); | |
| } else { | |
| reject(new PlatformError("Socket file descriptor not available")); | |
| } | |
| }); | |
| } catch (error) { | |
| const errorMessage = error instanceof Error ? error.message : String(error); | |
| reject(new PlatformError(`Failed to connect to telemetry socket: ${errorMessage}`)); | |
| } | |
| }); | |
| } | |
| // dist/utils/worker.js | |
| var { readFileSync } = cjsRequire("node:fs"); | |
| var os = cjsRequire("node:os"); | |
| var verboseLog = logger("WorkerCount"); | |
| var DEFAULT_CPU_COUNT = 1; | |
| function getAvailableCpus() { | |
| try { | |
| const content = readFileSync("/sys/fs/cgroup/cpu.max", "utf8").trim(); | |
| const [maxStr, periodStr] = content.split(/\s+/); | |
| const period = parseInt(periodStr, 10); | |
| if (maxStr === "max") { | |
| const detected = Math.max(1, os.cpus().length); | |
| verboseLog.vvverbose(`cpu.max reports unlimited quota ("max"), using detected cores: ${detected}`); | |
| return detected; | |
| } | |
| const quota = parseInt(maxStr, 10); | |
| if (quota > 0 && period > 0) { | |
| const cpuCount = Math.ceil(quota / period); | |
| verboseLog.vvverbose(`Using cpu.max quota/period: ${quota}/${period} = ${cpuCount} CPUs`); | |
| return cpuCount; | |
| } | |
| verboseLog.vvverbose(`cpu.max quota/period invalid: quota=${quota}, period=${period}`); | |
| } catch { | |
| verboseLog.vvverbose("cpu.max file not accessible, falling back"); | |
| } | |
| verboseLog.vvverbose(`Could not read taking minimum 1 vCPU: ${DEFAULT_CPU_COUNT} CPUs`); | |
| return DEFAULT_CPU_COUNT; | |
| } | |
| function getWorkerCount() { | |
| const envValue = process.env.AWS_LAMBDA_NODEJS_WORKER_COUNT; | |
| if (envValue) { | |
| const workerCount = parseInt(envValue, 10); | |
| verboseLog.vvverbose(`Using AWS_LAMBDA_NODEJS_WORKER_COUNT: ${envValue} = ${workerCount} workers`); | |
| return workerCount; | |
| } | |
| const detectedCpus = getAvailableCpus(); | |
| const calculatedWorkers = 8 * detectedCpus; | |
| verboseLog.vvverbose(`No env var set, using 8 * ${detectedCpus} = ${calculatedWorkers} workers`); | |
| return calculatedWorkers; | |
| } | |
| // dist/logging/constants.js | |
| var LOG_FORMAT = { | |
| JSON: "JSON", | |
| TEXT: "TEXT" | |
| }; | |
| var LOG_LEVEL = { | |
| TRACE: { name: "TRACE", priority: 1, tlvMask: 4 }, | |
| DEBUG: { name: "DEBUG", priority: 2, tlvMask: 8 }, | |
| INFO: { name: "INFO", priority: 3, tlvMask: 12 }, | |
| WARN: { name: "WARN", priority: 4, tlvMask: 16 }, | |
| ERROR: { name: "ERROR", priority: 5, tlvMask: 20 }, | |
| FATAL: { name: "FATAL", priority: 6, tlvMask: 24 } | |
| }; | |
| var TELEMETRY = { | |
| FRAME_HEADER_SIZE: 16, | |
| // 4 + 4 + 8 bytes | |
| TYPE_OFFSET: 0, | |
| LENGTH_OFFSET: 4, | |
| TIMESTAMP_OFFSET: 8, | |
| FRAME_TYPE_TEXT: 2774138883, | |
| FRAME_TYPE_JSON: 2774138882 | |
| }; | |
| var FORMAT = { | |
| FIELD_DELIMITER: " ", | |
| LINE_DELIMITER: "\n", | |
| CARRIAGE_RETURN: "\r" | |
| }; | |
| var JSON_LOG_FIELDS = { | |
| TIMESTAMP: "timestamp", | |
| LEVEL: "level", | |
| REQUEST_ID: "requestId", | |
| TENANT_ID: "tenantId", | |
| MESSAGE: "message", | |
| ERROR_TYPE: "errorType", | |
| ERROR_MESSAGE: "errorMessage", | |
| STACK_TRACE: "stackTrace" | |
| }; | |
| // dist/logging/base-logger.js | |
| var BaseLogger = class { | |
| options; | |
| invokeStoreParam; | |
| invokeStore; | |
| constructor(options, invokeStoreParam) { | |
| this.options = options; | |
| this.invokeStoreParam = invokeStoreParam; | |
| this.invokeStore = invokeStoreParam; | |
| } | |
| shouldLog(level) { | |
| return level.priority >= this.options.minLevel.priority; | |
| } | |
| }; | |
| // dist/logging/formatter.js | |
| var { format } = cjsRequire("node:util"); | |
| function formatTextMessage(timestamp, requestId, level, message, ...params) { | |
| return [timestamp, requestId, level.name, format(message, ...params)].join(FORMAT.FIELD_DELIMITER); | |
| } | |
| function formatJsonMessage(timestamp, requestId, tenantId, level, message, ...params) { | |
| const result = { | |
| [JSON_LOG_FIELDS.TIMESTAMP]: timestamp, | |
| [JSON_LOG_FIELDS.LEVEL]: level.name, | |
| [JSON_LOG_FIELDS.REQUEST_ID]: requestId | |
| }; | |
| if (tenantId) { | |
| result[JSON_LOG_FIELDS.TENANT_ID] = tenantId; | |
| } | |
| if (params.length === 0) { | |
| result.message = message; | |
| try { | |
| return JSON.stringify(result, jsonErrorReplacer); | |
| } catch { | |
| result.message = format(result.message); | |
| return JSON.stringify(result); | |
| } | |
| } | |
| result.message = format(message, ...params); | |
| for (const param of params) { | |
| if (param instanceof Error) { | |
| result[JSON_LOG_FIELDS.ERROR_TYPE] = param?.constructor?.name ?? "UnknownError"; | |
| result[JSON_LOG_FIELDS.ERROR_MESSAGE] = param.message; | |
| result[JSON_LOG_FIELDS.STACK_TRACE] = typeof param.stack === "string" ? param.stack.split("\n") : []; | |
| break; | |
| } | |
| } | |
| return JSON.stringify(result); | |
| } | |
| var jsonErrorReplacer = (_, value) => { | |
| if (value instanceof Error) { | |
| const serializedErr = Object.assign({ | |
| errorType: value?.constructor?.name ?? "UnknownError", | |
| errorMessage: value.message, | |
| stackTrace: typeof value.stack === "string" ? value.stack.split("\n") : value.stack | |
| }, value); | |
| return serializedErr; | |
| } | |
| return value; | |
| }; | |
| // dist/logging/socket-logger.js | |
| var fs = cjsRequire("node:fs"); | |
| var SocketLogger = class extends BaseLogger { | |
| fd; | |
| invokeStore; | |
| constructor(fd, options, invokeStore) { | |
| super(options, invokeStore); | |
| this.fd = fd; | |
| this.invokeStore = invokeStore; | |
| } | |
| log(level, message, ...params) { | |
| if (!this.shouldLog(level)) | |
| return; | |
| const timestamp = (/* @__PURE__ */ new Date()).toISOString(); | |
| const requestId = this.invokeStore.getRequestId(); | |
| const tenantId = this.invokeStore.getTenantId() || ""; | |
| const line = formatJsonMessage(timestamp, requestId, tenantId, level, message, ...params).replace(/\n/g, FORMAT.CARRIAGE_RETURN) + FORMAT.LINE_DELIMITER; | |
| fs.writeSync(this.fd, line); | |
| } | |
| }; | |
| // dist/logging/stdout-logger.js | |
| var StdoutLogger = class extends BaseLogger { | |
| log(level, message, ...params) { | |
| if (!this.shouldLog(level)) | |
| return; | |
| const timestamp = (/* @__PURE__ */ new Date()).toISOString(); | |
| const requestId = this.invokeStore.getRequestId(); | |
| const tenantId = this.invokeStore.getTenantId() || ""; | |
| if (this.options.format === LOG_FORMAT.JSON) { | |
| this.logJsonMessage(timestamp, requestId, tenantId, level, message, ...params); | |
| } else { | |
| this.logTextMessge(timestamp, requestId, level, message, ...params); | |
| } | |
| } | |
| logTextMessge(timestamp, requestId, level, message, ...params) { | |
| const line = formatTextMessage(timestamp, requestId, level, message, ...params).replace(/\n/g, FORMAT.CARRIAGE_RETURN); | |
| process.stdout.write(line + FORMAT.LINE_DELIMITER); | |
| } | |
| logJsonMessage(timestamp, requestId, tenantId, level, message, ...params) { | |
| const line = formatJsonMessage(timestamp, requestId, tenantId, level, message, ...params).replace(/\n/g, FORMAT.CARRIAGE_RETURN); | |
| process.stdout.write(line + FORMAT.LINE_DELIMITER); | |
| } | |
| }; | |
| // dist/logging/telemetry-logger.js | |
| var fs2 = cjsRequire("node:fs"); | |
| var TelemetryLogger = class extends BaseLogger { | |
| fd; | |
| invokeStore; | |
| buffer; | |
| constructor(fd, options, invokeStore) { | |
| super(options, invokeStore); | |
| this.fd = fd; | |
| this.invokeStore = invokeStore; | |
| this.buffer = Buffer.alloc(TELEMETRY.FRAME_HEADER_SIZE); | |
| } | |
| log(level, message, ...params) { | |
| if (!this.shouldLog(level)) | |
| return; | |
| const now = /* @__PURE__ */ new Date(); | |
| const requestId = this.invokeStore.getRequestId(); | |
| const tenantId = this.invokeStore.getTenantId() || ""; | |
| if (this.options.format === LOG_FORMAT.JSON) { | |
| this.logJsonMessge(now, requestId, tenantId, level, message, ...params); | |
| } else { | |
| this.logTextMessge(now, requestId, level, message, ...params); | |
| } | |
| } | |
| logTextMessge(now, requestId, level, message, ...params) { | |
| const line = formatTextMessage(now.toISOString(), requestId, level, message, ...params) + FORMAT.LINE_DELIMITER; | |
| this.writeFrame(level, now, line, TELEMETRY.FRAME_TYPE_TEXT); | |
| } | |
| logJsonMessge(now, requestId, tenantId, level, message, ...params) { | |
| const line = formatJsonMessage(now.toISOString(), requestId, tenantId, level, message, ...params); | |
| this.writeFrame(level, now, line, TELEMETRY.FRAME_TYPE_JSON); | |
| } | |
| /** | |
| * Write logs to filedescriptor. | |
| * Implements the logging contract between runtimes and the platform. | |
| * Each entry is framed as: | |
| * +----------------------+------------------------+---------------------+-----------------------+ | |
| * | Frame Type - 4 bytes | Length (len) - 4 bytes | Timestamp - 8 bytes | Message - 'len' bytes | | |
| * +----------------------+------------------------+---------------------+-----------------------+ | |
| * The first 4 bytes are the frame type. For text logs this is always 0xa55a0003, while for | |
| * json logs this is calculated as bitiwise OR of 0xa55a0002 and tlv mask of corresponding message log level. | |
| * The second 4 bytes are the length of the message. | |
| * The next 8 bytes are the UNIX timestamp of the message with microseconds precision. | |
| * The remaining bytes are the message itself. Byte order is big-endian. | |
| */ | |
| writeFrame(level, now, message, frameType) { | |
| this.buffer.writeUInt32BE((frameType | level.tlvMask) >>> 0, TELEMETRY.TYPE_OFFSET); | |
| const messageBuffer = Buffer.from(message, "utf8"); | |
| this.buffer.writeInt32BE(messageBuffer.length, TELEMETRY.LENGTH_OFFSET); | |
| this.buffer.writeBigInt64BE(BigInt(now.valueOf()) * 1000n, TELEMETRY.TIMESTAMP_OFFSET); | |
| fs2.writeSync(this.fd, this.buffer); | |
| fs2.writeSync(this.fd, messageBuffer); | |
| } | |
| }; | |
| // dist/logging/log-patch.js | |
| var LogPatch = class _LogPatch { | |
| static NopLog = () => { | |
| }; | |
| static logger; | |
| static options; | |
| static async patchConsole() { | |
| const options = this.createLoggerOptions(); | |
| this.logger = await this.createLogger(options); | |
| this.patchConsoleMethods(this.logger); | |
| this.options = options; | |
| } | |
| static structuredConsole = { | |
| logError(msg, err) { | |
| if (_LogPatch.logger) { | |
| const errorLogger = _LogPatch.options?.format === LOG_FORMAT.JSON ? _LogPatch.jsonErrorLogger : _LogPatch.textErrorLogger; | |
| errorLogger(msg, err); | |
| } | |
| } | |
| }; | |
| static createLoggerOptions() { | |
| return { | |
| format: determineLogFormat(), | |
| minLevel: determineLogLevel() | |
| }; | |
| } | |
| static async createLogger(options) { | |
| const invokeStore = await InvokeStore.getInstanceAsync(); | |
| if (isMultiConcurrentMode()) { | |
| const socketFd = await acquireSocketFd(); | |
| return new SocketLogger(socketFd, options, invokeStore); | |
| } | |
| const telemetryFd = consumeTelemetryFd(); | |
| if (telemetryFd) { | |
| return new TelemetryLogger(telemetryFd, options, invokeStore); | |
| } | |
| return new StdoutLogger(options, invokeStore); | |
| } | |
| static patchConsoleMethods(logger2) { | |
| const createLogFunction = (level) => { | |
| if (!logger2.shouldLog(level)) { | |
| return this.NopLog; | |
| } | |
| return (message, ...params) => { | |
| logger2.log(level, message, ...params); | |
| }; | |
| }; | |
| console.trace = createLogFunction(LOG_LEVEL.TRACE); | |
| console.debug = createLogFunction(LOG_LEVEL.DEBUG); | |
| console.info = createLogFunction(LOG_LEVEL.INFO); | |
| console.warn = createLogFunction(LOG_LEVEL.WARN); | |
| console.error = createLogFunction(LOG_LEVEL.ERROR); | |
| console.fatal = createLogFunction(LOG_LEVEL.FATAL); | |
| console.log = console.info; | |
| } | |
| static jsonErrorLogger = (_, err) => { | |
| console.error(intoError(err)); | |
| }; | |
| static textErrorLogger = (msg, err) => { | |
| console.error(msg, toFormatted(intoError(err))); | |
| }; | |
| }; | |
| var { structuredConsole } = LogPatch; | |
| // dist/worker/worker-manager.js | |
| var { Worker } = cjsRequire("worker_threads"); | |
| var verboseLog2 = logger("WorkerManager"); | |
| var WorkerManager = class { | |
| async start() { | |
| const workerCount = getWorkerCount(); | |
| verboseLog2.verbose(`Starting ${workerCount} worker threads`); | |
| const workerPromises = []; | |
| for (let i = 0; i < workerCount; i++) { | |
| const workerId = i; | |
| const worker = new Worker(new URL(import.meta.url), { | |
| env: process.env, | |
| execArgv: this.getFilteredExecArgv() | |
| }); | |
| workerPromises.push(this.waitForWorker(worker, workerId)); | |
| } | |
| await Promise.allSettled(workerPromises); | |
| verboseLog2.verbose("All workers have exited"); | |
| } | |
| waitForWorker(worker, workerId) { | |
| return new Promise((resolve) => { | |
| worker.on("error", (error) => { | |
| structuredConsole.logError(`Worker ${workerId} error:`, error); | |
| resolve(); | |
| }); | |
| worker.on("exit", (code) => { | |
| if (code !== 0) { | |
| const error = new Error(`Worker ${workerId} exited with code ${code}`); | |
| structuredConsole.logError(`Worker ${workerId} exit:`, error); | |
| } | |
| resolve(); | |
| }); | |
| }); | |
| } | |
| getFilteredExecArgv() { | |
| const invalidWorkerFlags = [ | |
| "--expose-gc", | |
| "--max-semi-space-size", | |
| "--max-old-space-size" | |
| ]; | |
| return process.execArgv.filter((arg) => { | |
| return !invalidWorkerFlags.some((flag) => arg.startsWith(flag)); | |
| }); | |
| } | |
| }; | |
| // dist/worker/ignition.js | |
| var { isMainThread } = cjsRequire("node:worker_threads"); | |
| var verboseLog3 = logger("Ignition"); | |
| async function ignition() { | |
| if (isMultiConcurrentMode() && isMainThread) { | |
| verboseLog3.verbose("Running in MultiConcurrent Mode"); | |
| const manager = new WorkerManager(); | |
| await manager.start(); | |
| } else { | |
| verboseLog3.verbose("Running worker thread"); | |
| const runtime = await createRuntime(); | |
| await runtime.start(); | |
| } | |
| } | |
| // dist/index.js | |
| ignition(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment