| | const mongoose = require('mongoose'); |
| | const crypto = require('node:crypto'); |
| | const { logger } = require('@librechat/data-schemas'); |
| | const { ResourceType, SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider'); |
| | const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_all, mcp_delimiter } = |
| | require('librechat-data-provider').Constants; |
| | const { |
| | removeAgentFromAllProjects, |
| | removeAgentIdsFromProject, |
| | addAgentIdsToProject, |
| | getProjectByName, |
| | } = require('./Project'); |
| | const { removeAllPermissions } = require('~/server/services/PermissionService'); |
| | const { getMCPServerTools } = require('~/server/services/Config'); |
| | const { Agent, AclEntry } = require('~/db/models'); |
| | const { getActions } = require('./Action'); |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | const createAgent = async (agentData) => { |
| | const { author: _author, ...versionData } = agentData; |
| | const timestamp = new Date(); |
| | const initialAgentData = { |
| | ...agentData, |
| | versions: [ |
| | { |
| | ...versionData, |
| | createdAt: timestamp, |
| | updatedAt: timestamp, |
| | }, |
| | ], |
| | category: agentData.category || 'general', |
| | }; |
| |
|
| | return (await Agent.create(initialAgentData)).toObject(); |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const getAgent = async (searchParameter) => await Agent.findOne(searchParameter).lean(); |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | const getAgents = async (searchParameter) => await Agent.find(searchParameter).lean(); |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const loadEphemeralAgent = async ({ req, spec, agent_id, endpoint, model_parameters: _m }) => { |
| | const { model, ...model_parameters } = _m; |
| | const modelSpecs = req.config?.modelSpecs?.list; |
| | |
| | let modelSpec = null; |
| | if (spec != null && spec !== '') { |
| | modelSpec = modelSpecs?.find((s) => s.name === spec) || null; |
| | } |
| | |
| | const ephemeralAgent = req.body.ephemeralAgent; |
| | const mcpServers = new Set(ephemeralAgent?.mcp); |
| | const userId = req.user?.id; |
| | if (modelSpec?.mcpServers) { |
| | for (const mcpServer of modelSpec.mcpServers) { |
| | mcpServers.add(mcpServer); |
| | } |
| | } |
| | |
| | const tools = []; |
| | if (ephemeralAgent?.execute_code === true || modelSpec?.executeCode === true) { |
| | tools.push(Tools.execute_code); |
| | } |
| | if (ephemeralAgent?.file_search === true || modelSpec?.fileSearch === true) { |
| | tools.push(Tools.file_search); |
| | } |
| | if (ephemeralAgent?.web_search === true || modelSpec?.webSearch === true) { |
| | tools.push(Tools.web_search); |
| | } |
| |
|
| | const addedServers = new Set(); |
| | if (mcpServers.size > 0) { |
| | for (const mcpServer of mcpServers) { |
| | if (addedServers.has(mcpServer)) { |
| | continue; |
| | } |
| | const serverTools = await getMCPServerTools(userId, mcpServer); |
| | if (!serverTools) { |
| | tools.push(`${mcp_all}${mcp_delimiter}${mcpServer}`); |
| | addedServers.add(mcpServer); |
| | continue; |
| | } |
| | tools.push(...Object.keys(serverTools)); |
| | addedServers.add(mcpServer); |
| | } |
| | } |
| |
|
| | const instructions = req.body.promptPrefix; |
| | const result = { |
| | id: agent_id, |
| | instructions, |
| | provider: endpoint, |
| | model_parameters, |
| | model, |
| | tools, |
| | }; |
| |
|
| | if (ephemeralAgent?.artifacts != null && ephemeralAgent.artifacts) { |
| | result.artifacts = ephemeralAgent.artifacts; |
| | } |
| | return result; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const loadAgent = async ({ req, spec, agent_id, endpoint, model_parameters }) => { |
| | if (!agent_id) { |
| | return null; |
| | } |
| | if (agent_id === EPHEMERAL_AGENT_ID) { |
| | return await loadEphemeralAgent({ req, spec, agent_id, endpoint, model_parameters }); |
| | } |
| | const agent = await getAgent({ |
| | id: agent_id, |
| | }); |
| |
|
| | if (!agent) { |
| | return null; |
| | } |
| |
|
| | agent.version = agent.versions ? agent.versions.length : 0; |
| | return agent; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const isDuplicateVersion = (updateData, currentData, versions, actionsHash = null) => { |
| | if (!versions || versions.length === 0) { |
| | return null; |
| | } |
| |
|
| | const excludeFields = [ |
| | '_id', |
| | 'id', |
| | 'createdAt', |
| | 'updatedAt', |
| | 'author', |
| | 'updatedBy', |
| | 'created_at', |
| | 'updated_at', |
| | '__v', |
| | 'versions', |
| | 'actionsHash', |
| | ]; |
| |
|
| | const { $push: _$push, $pull: _$pull, $addToSet: _$addToSet, ...directUpdates } = updateData; |
| |
|
| | if (Object.keys(directUpdates).length === 0 && !actionsHash) { |
| | return null; |
| | } |
| |
|
| | const wouldBeVersion = { ...currentData, ...directUpdates }; |
| | const lastVersion = versions[versions.length - 1]; |
| |
|
| | if (actionsHash && lastVersion.actionsHash !== actionsHash) { |
| | return null; |
| | } |
| |
|
| | const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]); |
| |
|
| | const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field)); |
| |
|
| | let isMatch = true; |
| | for (const field of importantFields) { |
| | const wouldBeValue = wouldBeVersion[field]; |
| | const lastVersionValue = lastVersion[field]; |
| |
|
| | |
| | if (!wouldBeValue && !lastVersionValue) { |
| | continue; |
| | } |
| |
|
| | |
| | if (Array.isArray(wouldBeValue) || Array.isArray(lastVersionValue)) { |
| | |
| | let wouldBeArr; |
| | if (Array.isArray(wouldBeValue)) { |
| | wouldBeArr = wouldBeValue; |
| | } else if (wouldBeValue == null) { |
| | wouldBeArr = []; |
| | } else { |
| | wouldBeArr = [wouldBeValue]; |
| | } |
| |
|
| | let lastVersionArr; |
| | if (Array.isArray(lastVersionValue)) { |
| | lastVersionArr = lastVersionValue; |
| | } else if (lastVersionValue == null) { |
| | lastVersionArr = []; |
| | } else { |
| | lastVersionArr = [lastVersionValue]; |
| | } |
| |
|
| | if (wouldBeArr.length !== lastVersionArr.length) { |
| | isMatch = false; |
| | break; |
| | } |
| |
|
| | |
| | if (field === 'projectIds') { |
| | const wouldBeIds = wouldBeArr.map((id) => id.toString()).sort(); |
| | const versionIds = lastVersionArr.map((id) => id.toString()).sort(); |
| |
|
| | if (!wouldBeIds.every((id, i) => id === versionIds[i])) { |
| | isMatch = false; |
| | break; |
| | } |
| | } |
| | |
| | else if ( |
| | wouldBeArr.length > 0 && |
| | typeof wouldBeArr[0] === 'object' && |
| | wouldBeArr[0] !== null |
| | ) { |
| | const sortedWouldBe = [...wouldBeArr].map((item) => JSON.stringify(item)).sort(); |
| | const sortedVersion = [...lastVersionArr].map((item) => JSON.stringify(item)).sort(); |
| |
|
| | if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) { |
| | isMatch = false; |
| | break; |
| | } |
| | } else { |
| | const sortedWouldBe = [...wouldBeArr].sort(); |
| | const sortedVersion = [...lastVersionArr].sort(); |
| |
|
| | if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) { |
| | isMatch = false; |
| | break; |
| | } |
| | } |
| | } |
| | |
| | else if (typeof wouldBeValue === 'object' && wouldBeValue !== null) { |
| | const lastVersionObj = |
| | typeof lastVersionValue === 'object' && lastVersionValue !== null ? lastVersionValue : {}; |
| |
|
| | |
| | const wouldBeKeys = Object.keys(wouldBeValue); |
| | const lastVersionKeys = Object.keys(lastVersionObj); |
| |
|
| | |
| | if (wouldBeKeys.length === 0 && lastVersionKeys.length === 0) { |
| | continue; |
| | } |
| |
|
| | |
| | if (JSON.stringify(wouldBeValue) !== JSON.stringify(lastVersionObj)) { |
| | isMatch = false; |
| | break; |
| | } |
| | } |
| | |
| | else { |
| | |
| | if (wouldBeValue !== lastVersionValue) { |
| | |
| | if ( |
| | typeof wouldBeValue === 'boolean' && |
| | wouldBeValue === false && |
| | lastVersionValue === undefined |
| | ) { |
| | continue; |
| | } |
| | |
| | if ( |
| | typeof wouldBeValue === 'string' && |
| | wouldBeValue === '' && |
| | lastVersionValue === undefined |
| | ) { |
| | continue; |
| | } |
| | isMatch = false; |
| | break; |
| | } |
| | } |
| | } |
| |
|
| | return isMatch ? lastVersion : null; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const updateAgent = async (searchParameter, updateData, options = {}) => { |
| | const { updatingUserId = null, forceVersion = false, skipVersioning = false } = options; |
| | const mongoOptions = { new: true, upsert: false }; |
| |
|
| | const currentAgent = await Agent.findOne(searchParameter); |
| | if (currentAgent) { |
| | const { |
| | __v, |
| | _id, |
| | id: __id, |
| | versions, |
| | author: _author, |
| | ...versionData |
| | } = currentAgent.toObject(); |
| | const { $push, $pull, $addToSet, ...directUpdates } = updateData; |
| |
|
| | let actionsHash = null; |
| |
|
| | |
| | if (currentAgent.actions && currentAgent.actions.length > 0) { |
| | |
| | const actionIds = currentAgent.actions |
| | .map((action) => { |
| | const parts = action.split(actionDelimiter); |
| | return parts[1]; |
| | }) |
| | .filter(Boolean); |
| |
|
| | if (actionIds.length > 0) { |
| | try { |
| | const actions = await getActions( |
| | { |
| | action_id: { $in: actionIds }, |
| | }, |
| | true, |
| | ); |
| |
|
| | actionsHash = await generateActionMetadataHash(currentAgent.actions, actions); |
| | } catch (error) { |
| | logger.error('Error fetching actions for hash generation:', error); |
| | } |
| | } |
| | } |
| |
|
| | const shouldCreateVersion = |
| | !skipVersioning && |
| | (forceVersion || Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet); |
| |
|
| | if (shouldCreateVersion) { |
| | const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash); |
| | if (duplicateVersion && !forceVersion) { |
| | |
| | const agentObj = currentAgent.toObject(); |
| | agentObj.version = versions.length; |
| | return agentObj; |
| | } |
| | } |
| |
|
| | const versionEntry = { |
| | ...versionData, |
| | ...directUpdates, |
| | updatedAt: new Date(), |
| | }; |
| |
|
| | |
| | if (actionsHash) { |
| | versionEntry.actionsHash = actionsHash; |
| | } |
| |
|
| | |
| | if (updatingUserId) { |
| | versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId); |
| | } |
| |
|
| | if (shouldCreateVersion) { |
| | updateData.$push = { |
| | ...($push || {}), |
| | versions: versionEntry, |
| | }; |
| | } |
| | } |
| |
|
| | return Agent.findOneAndUpdate(searchParameter, updateData, mongoOptions).lean(); |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => { |
| | const searchParameter = { id: agent_id }; |
| | let agent = await getAgent(searchParameter); |
| | if (!agent) { |
| | throw new Error('Agent not found for adding resource file'); |
| | } |
| | const fileIdsPath = `tool_resources.${tool_resource}.file_ids`; |
| | await Agent.updateOne( |
| | { |
| | id: agent_id, |
| | [`${fileIdsPath}`]: { $exists: false }, |
| | }, |
| | { |
| | $set: { |
| | [`${fileIdsPath}`]: [], |
| | }, |
| | }, |
| | ); |
| |
|
| | const updateData = { |
| | $addToSet: { |
| | tools: tool_resource, |
| | [fileIdsPath]: file_id, |
| | }, |
| | }; |
| |
|
| | const updatedAgent = await updateAgent(searchParameter, updateData, { |
| | updatingUserId: req?.user?.id, |
| | }); |
| | if (updatedAgent) { |
| | return updatedAgent; |
| | } else { |
| | throw new Error('Agent not found for adding resource file'); |
| | } |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const removeAgentResourceFiles = async ({ agent_id, files }) => { |
| | const searchParameter = { id: agent_id }; |
| |
|
| | |
| | const filesByResource = files.reduce((acc, { tool_resource, file_id }) => { |
| | if (!acc[tool_resource]) { |
| | acc[tool_resource] = []; |
| | } |
| | acc[tool_resource].push(file_id); |
| | return acc; |
| | }, {}); |
| |
|
| | |
| | const pullOps = {}; |
| | const resourcesToCheck = new Set(); |
| | for (const [resource, fileIds] of Object.entries(filesByResource)) { |
| | const fileIdsPath = `tool_resources.${resource}.file_ids`; |
| | pullOps[fileIdsPath] = { $in: fileIds }; |
| | resourcesToCheck.add(resource); |
| | } |
| |
|
| | const updatePullData = { $pull: pullOps }; |
| | const agentAfterPull = await Agent.findOneAndUpdate(searchParameter, updatePullData, { |
| | new: true, |
| | }).lean(); |
| |
|
| | if (!agentAfterPull) { |
| | |
| | |
| | const agentExists = await getAgent(searchParameter); |
| | if (!agentExists) { |
| | throw new Error('Agent not found for removing resource files'); |
| | } |
| | |
| | throw new Error('Failed to update agent during file removal (pull step)'); |
| | } |
| |
|
| | |
| | |
| | |
| | return agentAfterPull; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const deleteAgent = async (searchParameter) => { |
| | const agent = await Agent.findOneAndDelete(searchParameter); |
| | if (agent) { |
| | await removeAgentFromAllProjects(agent.id); |
| | await removeAllPermissions({ |
| | resourceType: ResourceType.AGENT, |
| | resourceId: agent._id, |
| | }); |
| | } |
| | return agent; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | const deleteUserAgents = async (userId) => { |
| | try { |
| | const userAgents = await getAgents({ author: userId }); |
| |
|
| | if (userAgents.length === 0) { |
| | return; |
| | } |
| |
|
| | const agentIds = userAgents.map((agent) => agent.id); |
| | const agentObjectIds = userAgents.map((agent) => agent._id); |
| |
|
| | for (const agentId of agentIds) { |
| | await removeAgentFromAllProjects(agentId); |
| | } |
| |
|
| | await AclEntry.deleteMany({ |
| | resourceType: ResourceType.AGENT, |
| | resourceId: { $in: agentObjectIds }, |
| | }); |
| |
|
| | await Agent.deleteMany({ author: userId }); |
| | } catch (error) { |
| | logger.error('[deleteUserAgents] General error:', error); |
| | } |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const getListAgentsByAccess = async ({ |
| | accessibleIds = [], |
| | otherParams = {}, |
| | limit = null, |
| | after = null, |
| | }) => { |
| | const isPaginated = limit !== null && limit !== undefined; |
| | const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null; |
| |
|
| | |
| | const baseQuery = { ...otherParams, _id: { $in: accessibleIds } }; |
| |
|
| | |
| | if (after) { |
| | try { |
| | const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8')); |
| | const { updatedAt, _id } = cursor; |
| |
|
| | const cursorCondition = { |
| | $or: [ |
| | { updatedAt: { $lt: new Date(updatedAt) } }, |
| | { updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } }, |
| | ], |
| | }; |
| |
|
| | |
| | if (Object.keys(baseQuery).length > 0) { |
| | baseQuery.$and = [{ ...baseQuery }, cursorCondition]; |
| | |
| | Object.keys(baseQuery).forEach((key) => { |
| | if (key !== '$and') delete baseQuery[key]; |
| | }); |
| | } else { |
| | Object.assign(baseQuery, cursorCondition); |
| | } |
| | } catch (error) { |
| | logger.warn('Invalid cursor:', error.message); |
| | } |
| | } |
| |
|
| | let query = Agent.find(baseQuery, { |
| | id: 1, |
| | _id: 1, |
| | name: 1, |
| | avatar: 1, |
| | author: 1, |
| | projectIds: 1, |
| | description: 1, |
| | updatedAt: 1, |
| | category: 1, |
| | support_contact: 1, |
| | is_promoted: 1, |
| | }).sort({ updatedAt: -1, _id: 1 }); |
| |
|
| | |
| | if (isPaginated) { |
| | query = query.limit(normalizedLimit + 1); |
| | } |
| |
|
| | const agents = await query.lean(); |
| |
|
| | const hasMore = isPaginated ? agents.length > normalizedLimit : false; |
| | const data = (isPaginated ? agents.slice(0, normalizedLimit) : agents).map((agent) => { |
| | if (agent.author) { |
| | agent.author = agent.author.toString(); |
| | } |
| | return agent; |
| | }); |
| |
|
| | |
| | let nextCursor = null; |
| | if (isPaginated && hasMore && data.length > 0) { |
| | const lastAgent = agents[normalizedLimit - 1]; |
| | nextCursor = Buffer.from( |
| | JSON.stringify({ |
| | updatedAt: lastAgent.updatedAt.toISOString(), |
| | _id: lastAgent._id.toString(), |
| | }), |
| | ).toString('base64'); |
| | } |
| |
|
| | return { |
| | object: 'list', |
| | data, |
| | first_id: data.length > 0 ? data[0].id : null, |
| | last_id: data.length > 0 ? data[data.length - 1].id : null, |
| | has_more: hasMore, |
| | after: nextCursor, |
| | }; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const getListAgents = async (searchParameter) => { |
| | const { author, ...otherParams } = searchParameter; |
| |
|
| | let query = Object.assign({ author }, otherParams); |
| |
|
| | const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, ['agentIds']); |
| | if (globalProject && (globalProject.agentIds?.length ?? 0) > 0) { |
| | const globalQuery = { id: { $in: globalProject.agentIds }, ...otherParams }; |
| | delete globalQuery.author; |
| | query = { $or: [globalQuery, query] }; |
| | } |
| | const agents = ( |
| | await Agent.find(query, { |
| | id: 1, |
| | _id: 1, |
| | name: 1, |
| | avatar: 1, |
| | author: 1, |
| | projectIds: 1, |
| | description: 1, |
| | |
| | isCollaborative: 1, |
| | category: 1, |
| | }).lean() |
| | ).map((agent) => { |
| | if (agent.author?.toString() !== author) { |
| | delete agent.author; |
| | } |
| | if (agent.author) { |
| | agent.author = agent.author.toString(); |
| | } |
| | return agent; |
| | }); |
| |
|
| | const hasMore = agents.length > 0; |
| | const firstId = agents.length > 0 ? agents[0].id : null; |
| | const lastId = agents.length > 0 ? agents[agents.length - 1].id : null; |
| |
|
| | return { |
| | data: agents, |
| | has_more: hasMore, |
| | first_id: firstId, |
| | last_id: lastId, |
| | }; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds }) => { |
| | const updateOps = {}; |
| |
|
| | if (removeProjectIds && removeProjectIds.length > 0) { |
| | for (const projectId of removeProjectIds) { |
| | await removeAgentIdsFromProject(projectId, [agentId]); |
| | } |
| | updateOps.$pull = { projectIds: { $in: removeProjectIds } }; |
| | } |
| |
|
| | if (projectIds && projectIds.length > 0) { |
| | for (const projectId of projectIds) { |
| | await addAgentIdsToProject(projectId, [agentId]); |
| | } |
| | updateOps.$addToSet = { projectIds: { $each: projectIds } }; |
| | } |
| |
|
| | if (Object.keys(updateOps).length === 0) { |
| | return await getAgent({ id: agentId }); |
| | } |
| |
|
| | const updateQuery = { id: agentId, author: user.id }; |
| | if (user.role === SystemRoles.ADMIN) { |
| | delete updateQuery.author; |
| | } |
| |
|
| | const updatedAgent = await updateAgent(updateQuery, updateOps, { |
| | updatingUserId: user.id, |
| | skipVersioning: true, |
| | }); |
| | if (updatedAgent) { |
| | return updatedAgent; |
| | } |
| | if (updateOps.$addToSet) { |
| | for (const projectId of projectIds) { |
| | await removeAgentIdsFromProject(projectId, [agentId]); |
| | } |
| | } else if (updateOps.$pull) { |
| | for (const projectId of removeProjectIds) { |
| | await addAgentIdsToProject(projectId, [agentId]); |
| | } |
| | } |
| |
|
| | return await getAgent({ id: agentId }); |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | const revertAgentVersion = async (searchParameter, versionIndex) => { |
| | const agent = await Agent.findOne(searchParameter); |
| | if (!agent) { |
| | throw new Error('Agent not found'); |
| | } |
| |
|
| | if (!agent.versions || !agent.versions[versionIndex]) { |
| | throw new Error(`Version ${versionIndex} not found`); |
| | } |
| |
|
| | const revertToVersion = agent.versions[versionIndex]; |
| |
|
| | const updateData = { |
| | ...revertToVersion, |
| | }; |
| |
|
| | delete updateData._id; |
| | delete updateData.id; |
| | delete updateData.versions; |
| | delete updateData.author; |
| | delete updateData.updatedBy; |
| |
|
| | return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean(); |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | const generateActionMetadataHash = async (actionIds, actions) => { |
| | if (!actionIds || actionIds.length === 0) { |
| | return ''; |
| | } |
| |
|
| | |
| | const actionMap = new Map(); |
| | actions.forEach((action) => { |
| | actionMap.set(action.action_id, action.metadata); |
| | }); |
| |
|
| | |
| | const sortedActionIds = [...actionIds].sort(); |
| |
|
| | |
| | const metadataString = sortedActionIds |
| | .map((actionFullId) => { |
| | |
| | const parts = actionFullId.split(actionDelimiter); |
| | const actionId = parts[1]; |
| |
|
| | const metadata = actionMap.get(actionId); |
| | if (!metadata) { |
| | return `${actionId}:null`; |
| | } |
| |
|
| | |
| | const sortedKeys = Object.keys(metadata).sort(); |
| | const metadataStr = sortedKeys |
| | .map((key) => `${key}:${JSON.stringify(metadata[key])}`) |
| | .join(','); |
| | return `${actionId}:{${metadataStr}}`; |
| | }) |
| | .join(';'); |
| |
|
| | |
| | const encoder = new TextEncoder(); |
| | const data = encoder.encode(metadataString); |
| | const hashBuffer = await crypto.webcrypto.subtle.digest('SHA-256', data); |
| | const hashArray = Array.from(new Uint8Array(hashBuffer)); |
| | const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join(''); |
| |
|
| | return hashHex; |
| | }; |
| | |
| | |
| | |
| | |
| | const countPromotedAgents = async () => { |
| | const count = await Agent.countDocuments({ is_promoted: true }); |
| | return count; |
| | }; |
| |
|
| | |
| | |
| | |
| | |
| | |
| |
|
| | module.exports = { |
| | getAgent, |
| | getAgents, |
| | loadAgent, |
| | createAgent, |
| | updateAgent, |
| | deleteAgent, |
| | deleteUserAgents, |
| | getListAgents, |
| | revertAgentVersion, |
| | updateAgentProjects, |
| | addAgentResourceFile, |
| | getListAgentsByAccess, |
| | removeAgentResourceFiles, |
| | generateActionMetadataHash, |
| | countPromotedAgents, |
| | }; |
| |
|