From c14ab0c37a029a3a36f8667a103a5a75a4329b87 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Thu, 21 Nov 2024 16:35:17 +0700 Subject: [PATCH 01/20] refactor(backend): Remove un-needed join in `fix_llm_provider_credentials` query (#8728) --- autogpt_platform/backend/backend/data/graph.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/autogpt_platform/backend/backend/data/graph.py b/autogpt_platform/backend/backend/data/graph.py index 4d3108693..cc253af59 100644 --- a/autogpt_platform/backend/backend/data/graph.py +++ b/autogpt_platform/backend/backend/data/graph.py @@ -547,16 +547,14 @@ async def fix_llm_provider_credentials(): broken_nodes = await prisma.get_client().query_raw( """ - SELECT "User".id user_id, + SELECT graph."userId" user_id, node.id node_id, node."constantInput" node_preset_input FROM platform."AgentNode" node LEFT JOIN platform."AgentGraph" graph ON node."agentGraphId" = graph.id - LEFT JOIN platform."User" "User" - ON graph."userId" = "User".id WHERE node."constantInput"::jsonb->'credentials'->>'provider' = 'llm' - ORDER BY user_id; + ORDER BY graph."userId"; """ ) logger.info(f"Fixing LLM credential inputs on {len(broken_nodes)} nodes") From 6954f4eb0ebcc615a901f6ccdf1e5abdd59b2642 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Thu, 21 Nov 2024 17:46:55 +0700 Subject: [PATCH 02/20] fix(backend): Revert non-async routes that are changed to async (#8734) --- autogpt_platform/backend/backend/server/rest_api.py | 4 +--- autogpt_platform/backend/backend/server/routers/v1.py | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/server/rest_api.py index 2436e4115..59b7f04af 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/server/rest_api.py @@ -91,9 +91,7 @@ class AgentServer(backend.util.service.AppProcess): async def test_execute_graph( graph_id: str, node_input: dict[typing.Any, typing.Any], user_id: str ): - return await backend.server.routers.v1.execute_graph( - graph_id, node_input, user_id - ) + return backend.server.routers.v1.execute_graph(graph_id, node_input, user_id) @staticmethod async def test_create_graph( diff --git a/autogpt_platform/backend/backend/server/routers/v1.py b/autogpt_platform/backend/backend/server/routers/v1.py index 11d4c6a73..a44f25c1a 100644 --- a/autogpt_platform/backend/backend/server/routers/v1.py +++ b/autogpt_platform/backend/backend/server/routers/v1.py @@ -276,7 +276,7 @@ async def set_graph_active_version( tags=["graphs"], dependencies=[Depends(auth_middleware)], ) -async def execute_graph( +def execute_graph( graph_id: str, node_input: dict[Any, Any], user_id: Annotated[str, Depends(get_user_id)], @@ -481,7 +481,7 @@ async def create_schedule( tags=["schedules"], dependencies=[Depends(auth_middleware)], ) -async def delete_schedule( +def delete_schedule( schedule_id: str, user_id: Annotated[str, Depends(get_user_id)], ) -> dict[Any, Any]: @@ -494,7 +494,7 @@ async def delete_schedule( tags=["schedules"], dependencies=[Depends(auth_middleware)], ) -async def get_execution_schedules( +def get_execution_schedules( user_id: Annotated[str, Depends(get_user_id)], graph_id: str | None = None, ) -> list[scheduler.JobInfo]: From 8b4bb27077ef33352e5e552fd39b457059eeb5c8 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Thu, 21 Nov 2024 18:05:41 +0700 Subject: [PATCH 03/20] fix(backend): Re-work the connection input consumption logic for Agent Executor Block (#8710) --- .../backend/backend/data/block.py | 10 +--- .../backend/backend/executor/manager.py | 50 ++++++++++++++----- autogpt_platform/backend/backend/util/json.py | 15 ++++++ 3 files changed, 53 insertions(+), 22 deletions(-) diff --git a/autogpt_platform/backend/backend/data/block.py b/autogpt_platform/backend/backend/data/block.py index 05e20ea0b..f86eee084 100644 --- a/autogpt_platform/backend/backend/data/block.py +++ b/autogpt_platform/backend/backend/data/block.py @@ -94,15 +94,7 @@ class BlockSchema(BaseModel): @classmethod def validate_data(cls, data: BlockInput) -> str | None: - """ - Validate the data against the schema. - Returns the validation error message if the data does not match the schema. - """ - try: - jsonschema.validate(data, cls.jsonschema()) - return None - except jsonschema.ValidationError as e: - return str(e) + return json.validate_with_jsonschema(schema=cls.jsonschema(), data=data) @classmethod def validate_field(cls, field_name: str, data: BlockInput) -> str | None: diff --git a/autogpt_platform/backend/backend/executor/manager.py b/autogpt_platform/backend/backend/executor/manager.py index b87054482..2d1e2d223 100644 --- a/autogpt_platform/backend/backend/executor/manager.py +++ b/autogpt_platform/backend/backend/executor/manager.py @@ -18,6 +18,7 @@ if TYPE_CHECKING: from autogpt_libs.utils.cache import thread_cached +from backend.blocks.agent import AgentExecutorBlock from backend.data import redis from backend.data.block import Block, BlockData, BlockInput, BlockType, get_block from backend.data.execution import ( @@ -135,7 +136,6 @@ def execute_node( logger.error(f"Block {node.block_id} not found.") return - # Sanity check: validate the execution input. log_metadata = LogMetadata( user_id=user_id, graph_eid=graph_exec_id, @@ -144,11 +144,20 @@ def execute_node( node_id=node_id, block_name=node_block.name, ) + + # Sanity check: validate the execution input. input_data, error = validate_exec(node, data.data, resolve_input=False) if input_data is None: log_metadata.error(f"Skip execution, input validation error: {error}") + db_client.upsert_execution_output(node_exec_id, "error", error) + update_execution(ExecutionStatus.FAILED) return + # Re-shape the input data for agent block. + # AgentExecutorBlock specially separate the node input_data & its input_default. + if isinstance(node_block, AgentExecutorBlock): + input_data = {**node.input_default, "data": input_data} + # Execute the node input_data_str = json.dumps(input_data) input_size = len(input_data_str) @@ -376,31 +385,46 @@ def validate_exec( if not node_block: return None, f"Block for {node.block_id} not found." - error_prefix = f"Input data missing for {node_block.name}:" + if isinstance(node_block, AgentExecutorBlock): + # Validate the execution metadata for the agent executor block. + try: + exec_data = AgentExecutorBlock.Input(**node.input_default) + except Exception as e: + return None, f"Input data doesn't match {node_block.name}: {str(e)}" + + # Validation input + input_schema = exec_data.input_schema + required_fields = set(input_schema["required"]) + input_default = exec_data.data + else: + # Convert non-matching data types to the expected input schema. + for name, data_type in node_block.input_schema.__annotations__.items(): + if (value := data.get(name)) and (type(value) is not data_type): + data[name] = convert(value, data_type) + + # Validation input + input_schema = node_block.input_schema.jsonschema() + required_fields = node_block.input_schema.get_required_fields() + input_default = node.input_default # Input data (without default values) should contain all required fields. + error_prefix = f"Input data missing or mismatch for `{node_block.name}`:" input_fields_from_nodes = {link.sink_name for link in node.input_links} if not input_fields_from_nodes.issubset(data): return None, f"{error_prefix} {input_fields_from_nodes - set(data)}" # Merge input data with default values and resolve dynamic dict/list/object pins. - data = {**node.input_default, **data} + data = {**input_default, **data} if resolve_input: data = merge_execution_input(data) # Input data post-merge should contain all required fields from the schema. - input_fields_from_schema = node_block.input_schema.get_required_fields() - if not input_fields_from_schema.issubset(data): - return None, f"{error_prefix} {input_fields_from_schema - set(data)}" - - # Convert non-matching data types to the expected input schema. - for name, data_type in node_block.input_schema.__annotations__.items(): - if (value := data.get(name)) and (type(value) is not data_type): - data[name] = convert(value, data_type) + if not required_fields.issubset(data): + return None, f"{error_prefix} {required_fields - set(data)}" # Last validation: Validate the input values against the schema. - if error := node_block.input_schema.validate_data(data): - error_message = f"Input data doesn't match {node_block.name}: {error}" + if error := json.validate_with_jsonschema(schema=input_schema, data=data): + error_message = f"{error_prefix} {error}" logger.error(error_message) return None, error_message diff --git a/autogpt_platform/backend/backend/util/json.py b/autogpt_platform/backend/backend/util/json.py index f8fb6f2fc..7f8891741 100644 --- a/autogpt_platform/backend/backend/util/json.py +++ b/autogpt_platform/backend/backend/util/json.py @@ -1,6 +1,7 @@ import json from typing import Any, Type, TypeVar, overload +import jsonschema from fastapi.encoders import jsonable_encoder from .type import type_match @@ -30,3 +31,17 @@ def loads(data: str, *args, target_type: Type[T] | None = None, **kwargs) -> Any if target_type: return type_match(parsed, target_type) return parsed + + +def validate_with_jsonschema( + schema: dict[str, Any], data: dict[str, Any] +) -> str | None: + """ + Validate the data against the schema. + Returns the validation error message if the data does not match the schema. + """ + try: + jsonschema.validate(data, schema) + return None + except jsonschema.ValidationError as e: + return str(e) From 5ee8b62d67f84555d311a291fdb191ec13b8bed7 Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Thu, 21 Nov 2024 21:27:35 +0530 Subject: [PATCH 04/20] fix: hide content except login when not authenticated to prevent errors (#8398) * fix: hide content except login when not authenticated to prevent errors * Remove supabase folder from tracking * Remove supabase folder from Git tracking * adding git submodule * adding git submodule * Discard changes to .gitignore * only showing AutoGPT logo if user is not present --------- Co-authored-by: Nicholas Tindle Co-authored-by: Nicholas Tindle Co-authored-by: Swifty Co-authored-by: Toran Bruce Richards --- .../frontend/src/app/build/page.tsx | 12 ++++++------ .../frontend/src/components/NavBar.tsx | 17 +++++++++++++++-- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/autogpt_platform/frontend/src/app/build/page.tsx b/autogpt_platform/frontend/src/app/build/page.tsx index 6b5ea497e..55302556a 100644 --- a/autogpt_platform/frontend/src/app/build/page.tsx +++ b/autogpt_platform/frontend/src/app/build/page.tsx @@ -1,16 +1,16 @@ "use client"; import { useSearchParams } from "next/navigation"; -import FlowEditor from '@/components/Flow'; +import FlowEditor from "@/components/Flow"; export default function Home() { const query = useSearchParams(); return ( - + ); } diff --git a/autogpt_platform/frontend/src/components/NavBar.tsx b/autogpt_platform/frontend/src/components/NavBar.tsx index b22cbd48e..d7b4c1ef7 100644 --- a/autogpt_platform/frontend/src/components/NavBar.tsx +++ b/autogpt_platform/frontend/src/components/NavBar.tsx @@ -7,7 +7,6 @@ import getServerUser from "@/hooks/getServerUser"; import ProfileDropdown from "./ProfileDropdown"; import { IconCircleUser, IconMenu } from "@/components/ui/icons"; import CreditButton from "@/components/nav/CreditButton"; - import { NavBarButtons } from "./nav/NavBarButtons"; export async function NavBar() { @@ -17,7 +16,7 @@ export async function NavBar() { ); const { user } = await getServerUser(); - return ( + return user ? (
@@ -67,5 +66,19 @@ export async function NavBar() { {isAvailable && user && }
+ ) : ( + ); } From bc8ae1f5427736db8e778edc9e8e044753a51d0c Mon Sep 17 00:00:00 2001 From: thecosmicmuffet <72475634+thecosmicmuffet@users.noreply.github.com> Date: Fri, 22 Nov 2024 18:37:41 -0800 Subject: [PATCH 05/20] docs(platform): Fix url in `README.md` (#8747) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a323729ad..9cbaefdb2 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ The AutoGPT frontend is where users interact with our powerful AI automation pla **Monitoring and Analytics:** Keep track of your agents' performance and gain insights to continually improve your automation processes. -[Read this guide](https://docs.agpt.co/server/new_blocks/) to learn how to build your own custom blocks. +[Read this guide](https://docs.agpt.co/platform/new_blocks/) to learn how to build your own custom blocks. ### 💽 AutoGPT Server From f00654cb2ca20288ceff825a6e774252c61495c7 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Mon, 25 Nov 2024 16:55:52 +0700 Subject: [PATCH 06/20] fix(backend): Fix .env file read contention on pyro connection setup (#8736) --- autogpt_platform/backend/backend/data/credit.py | 3 ++- autogpt_platform/backend/backend/executor/database.py | 3 ++- autogpt_platform/backend/backend/executor/scheduler.py | 3 ++- autogpt_platform/backend/backend/util/retry.py | 5 +++-- autogpt_platform/backend/backend/util/service.py | 9 ++++----- 5 files changed, 13 insertions(+), 10 deletions(-) diff --git a/autogpt_platform/backend/backend/data/credit.py b/autogpt_platform/backend/backend/data/credit.py index fbb79a54a..4112310cd 100644 --- a/autogpt_platform/backend/backend/data/credit.py +++ b/autogpt_platform/backend/backend/data/credit.py @@ -11,6 +11,8 @@ from backend.data.block_cost_config import BLOCK_COSTS from backend.data.cost import BlockCost, BlockCostType from backend.util.settings import Config +config = Config() + class UserCreditBase(ABC): def __init__(self, num_user_credits_refill: int): @@ -202,7 +204,6 @@ class DisabledUserCredit(UserCreditBase): def get_user_credit_model() -> UserCreditBase: - config = Config() if config.enable_credit.lower() == "true": return UserCredit(config.num_user_credits_refill) else: diff --git a/autogpt_platform/backend/backend/executor/database.py b/autogpt_platform/backend/backend/executor/database.py index db6bd4942..2597429b3 100644 --- a/autogpt_platform/backend/backend/executor/database.py +++ b/autogpt_platform/backend/backend/executor/database.py @@ -27,6 +27,7 @@ from backend.util.settings import Config P = ParamSpec("P") R = TypeVar("R") +config = Config() class DatabaseManager(AppService): @@ -38,7 +39,7 @@ class DatabaseManager(AppService): @classmethod def get_port(cls) -> int: - return Config().database_api_port + return config.database_api_port @expose def send_execution_update(self, execution_result: ExecutionResult): diff --git a/autogpt_platform/backend/backend/executor/scheduler.py b/autogpt_platform/backend/backend/executor/scheduler.py index 50e92f5f9..3c906a3af 100644 --- a/autogpt_platform/backend/backend/executor/scheduler.py +++ b/autogpt_platform/backend/backend/executor/scheduler.py @@ -38,6 +38,7 @@ def _extract_schema_from_url(database_url) -> tuple[str, str]: logger = logging.getLogger(__name__) +config = Config() def log(msg, **kwargs): @@ -96,7 +97,7 @@ class ExecutionScheduler(AppService): @classmethod def get_port(cls) -> int: - return Config().execution_scheduler_port + return config.execution_scheduler_port @property @thread_cached diff --git a/autogpt_platform/backend/backend/util/retry.py b/autogpt_platform/backend/backend/util/retry.py index 5a451726c..bbc739e41 100644 --- a/autogpt_platform/backend/backend/util/retry.py +++ b/autogpt_platform/backend/backend/util/retry.py @@ -1,5 +1,6 @@ import logging import os +import threading from functools import wraps from uuid import uuid4 @@ -16,7 +17,7 @@ def _log_prefix(resource_name: str, conn_id: str): This needs to be called on the fly to get the current process ID & service name, not the parent process ID & service name. """ - return f"[PID-{os.getpid()}|{get_service_name()}|{resource_name}-{conn_id}]" + return f"[PID-{os.getpid()}|THREAD-{threading.get_native_id()}|{get_service_name()}|{resource_name}-{conn_id}]" def conn_retry(resource_name: str, action_name: str, max_retry: int = 5): @@ -25,7 +26,7 @@ def conn_retry(resource_name: str, action_name: str, max_retry: int = 5): def on_retry(retry_state): prefix = _log_prefix(resource_name, conn_id) exception = retry_state.outcome.exception() - logger.info(f"{prefix} {action_name} failed: {exception}. Retrying now...") + logger.error(f"{prefix} {action_name} failed: {exception}. Retrying now...") def decorator(func): @wraps(func) diff --git a/autogpt_platform/backend/backend/util/service.py b/autogpt_platform/backend/backend/util/service.py index 0333f4418..a0b6bde40 100644 --- a/autogpt_platform/backend/backend/util/service.py +++ b/autogpt_platform/backend/backend/util/service.py @@ -120,7 +120,7 @@ class AppService(AppProcess, ABC): @classmethod def get_host(cls) -> str: - return os.environ.get(f"{cls.service_name.upper()}_HOST", Config().pyro_host) + return os.environ.get(f"{cls.service_name.upper()}_HOST", config.pyro_host) def run_service(self) -> None: while True: @@ -170,14 +170,13 @@ class AppService(AppProcess, ABC): @conn_retry("Pyro", "Starting Pyro Service") def __start_pyro(self): - conf = Config() maximum_connection_thread_count = max( Pyro5.config.THREADPOOL_SIZE, - conf.num_node_workers * conf.num_graph_workers, + config.num_node_workers * config.num_graph_workers, ) Pyro5.config.THREADPOOL_SIZE = maximum_connection_thread_count # type: ignore - daemon = Pyro5.api.Daemon(host=conf.pyro_host, port=self.get_port()) + daemon = Pyro5.api.Daemon(host=config.pyro_host, port=self.get_port()) self.uri = daemon.register(self, objectId=self.service_name) logger.info(f"[{self.service_name}] Connected to Pyro; URI = {self.uri}") daemon.requestLoop() @@ -209,7 +208,7 @@ def get_service_client(service_type: Type[AS]) -> AS: class DynamicClient(PyroClient): @conn_retry("Pyro", f"Connecting to [{service_name}]") def __init__(self): - host = os.environ.get(f"{service_name.upper()}_HOST", "localhost") + host = os.environ.get(f"{service_name.upper()}_HOST", pyro_host) uri = f"PYRO:{service_type.service_name}@{host}:{service_type.get_port()}" logger.debug(f"Connecting to service [{service_name}]. URI = {uri}") self.proxy = Pyro5.api.Proxy(uri) From 464b5309d71b0aece7c98919a73a9ba7c2eab8ec Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Mon, 25 Nov 2024 16:41:50 +0100 Subject: [PATCH 07/20] fix(forge): Fix double `model` kwarg error in `AnthropicProvider.create_chat_completion(..)` (#8666) --- classic/forge/forge/llm/providers/anthropic.py | 1 - 1 file changed, 1 deletion(-) diff --git a/classic/forge/forge/llm/providers/anthropic.py b/classic/forge/forge/llm/providers/anthropic.py index 3faf1262a..9810e045d 100644 --- a/classic/forge/forge/llm/providers/anthropic.py +++ b/classic/forge/forge/llm/providers/anthropic.py @@ -180,7 +180,6 @@ class AnthropicProvider(BaseChatModelProvider[AnthropicModelName, AnthropicSetti """Create a completion using the Anthropic API.""" anthropic_messages, completion_kwargs = self._get_chat_completion_args( prompt_messages=model_prompt, - model=model_name, functions=functions, max_output_tokens=max_output_tokens, **kwargs, From eef9bbe991d97ab9e06e4d633657d709639f6feb Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Mon, 25 Nov 2024 18:42:36 +0100 Subject: [PATCH 08/20] feat(platform, blocks): Webhook-triggered blocks (#8358) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - feat(blocks): Add GitHub Pull Request Trigger block ## feat(platform): Add support for Webhook-triggered blocks - ⚠️ Add `PLATFORM_BASE_URL` setting - Add webhook config option and `BlockType.WEBHOOK` to `Block` - Add check to `Block.__init__` to enforce type and shape of webhook event filter - Add check to `Block.__init__` to enforce `payload` input on webhook blocks - Add check to `Block.__init__` to disable webhook blocks if `PLATFORM_BASE_URL` is not set - Add `Webhook` model + CRUD functions in `backend.data.integrations` to represent webhooks created by our system - Add `IntegrationWebhook` to DB schema + reference `AgentGraphNode.webhook_id` - Add `set_node_webhook(..)` in `backend.data.graph` - Add webhook-related endpoints: - `POST /integrations/{provider}/webhooks/{webhook_id}/ingress` endpoint, to receive webhook payloads, and for all associated nodes create graph executions - Add `Node.is_triggered_by_event_type(..)` helper method - `POST /integrations/{provider}/webhooks/{webhook_id}/ping` endpoint, to allow testing a webhook - Add `WebhookEvent` + pub/sub functions in `backend.data.integrations` - Add `backend.integrations.webhooks` module, including: - `graph_lifecycle_hooks`, e.g. `on_graph_activate(..)`, to handle corresponding webhook creation etc. - Add calls to these hooks in the graph create/update endpoints - `BaseWebhooksManager` + `GithubWebhooksManager` to handle creating + registering, removing + deregistering, and retrieving existing webhooks, and validating incoming payloads ## Other improvements - fix(blocks): Allow having an input and output pin with the same name - fix(blocks): Add tooltip with description in places where block inputs are rendered without `NodeHandle` - feat(blocks): Allow hiding inputs (e.g. `payload`) with `SchemaField(hidden=True)` - fix(frontend): Fix `MultiSelector` component styling - feat(frontend): Add `AlertDialog` UI component - feat(frontend): Add `NodeMultiSelectInput` component - feat(backend/data): Add `NodeModel` with `graph_id`, `graph_version`; `GraphModel` with `user_id` - Add `make_graph_model(..)` helper function in `backend.data.graph` - refactor(backend/data): Make `RedisEventQueue` generic and move to `backend.data.execution` - refactor(frontend): Deduplicate & clean up code for different block types in `generateInputHandles(..)` in `CustomNode` - dx(backend): Add `MissingConfigError`, `NeedConfirmation` exception --------- Co-authored-by: Zamil Majdy --- autogpt_platform/backend/.env.example | 11 +- .../backend/backend/blocks/__init__.py | 7 - .../backend/backend/blocks/agent.py | 2 +- .../pull_request.synchronize.json | 700 ++++++++++++++++++ .../backend/backend/blocks/github/triggers.py | 156 ++++ .../backend/backend/data/block.py | 71 +- .../backend/backend/data/execution.py | 44 +- .../backend/backend/data/graph.py | 215 ++++-- .../backend/backend/data/includes.py | 5 + .../backend/backend/data/integrations.py | 168 +++++ .../backend/backend/data/model.py | 2 + .../backend/backend/data/queue.py | 41 +- .../backend/backend/executor/database.py | 7 +- .../backend/backend/executor/manager.py | 19 +- .../backend/integrations/creds_manager.py | 7 +- .../backend/backend/integrations/providers.py | 7 + .../backend/integrations/webhooks/__init__.py | 17 + .../backend/integrations/webhooks/base.py | 163 ++++ .../backend/integrations/webhooks/github.py | 175 +++++ .../webhooks/graph_lifecycle_hooks.py | 198 +++++ .../backend/server/integrations/router.py | 129 +++- .../backend/backend/server/rest_api.py | 33 +- .../backend/backend/server/routers/v1.py | 172 +++-- .../backend/backend/server/ws_api.py | 2 +- .../backend/backend/util/exceptions.py | 6 + .../backend/backend/util/service.py | 15 +- .../backend/backend/util/settings.py | 24 +- autogpt_platform/backend/backend/util/test.py | 5 +- .../migration.sql | 26 + autogpt_platform/backend/schema.prisma | 27 + autogpt_platform/frontend/package.json | 1 + autogpt_platform/frontend/src/app/globals.css | 2 +- .../frontend/src/app/profile/page.tsx | 85 ++- .../frontend/src/components/CustomNode.tsx | 52 +- .../frontend/src/components/flow.css | 16 - .../integrations/credentials-input.tsx | 11 +- .../integrations/credentials-provider.tsx | 22 +- .../src/components/node-input-components.tsx | 91 ++- .../src/components/ui/alert-dialog.tsx | 143 ++++ .../src/components/ui/multiselect.tsx | 10 +- .../src/lib/autogpt-server-api/baseClient.ts | 25 +- .../src/lib/autogpt-server-api/types.ts | 9 + autogpt_platform/frontend/yarn.lock | 23 +- docs/content/platform/new_blocks.md | 146 +++- 44 files changed, 2788 insertions(+), 302 deletions(-) create mode 100644 autogpt_platform/backend/backend/blocks/github/example_payloads/pull_request.synchronize.json create mode 100644 autogpt_platform/backend/backend/blocks/github/triggers.py create mode 100644 autogpt_platform/backend/backend/data/integrations.py create mode 100644 autogpt_platform/backend/backend/integrations/providers.py create mode 100644 autogpt_platform/backend/backend/integrations/webhooks/__init__.py create mode 100644 autogpt_platform/backend/backend/integrations/webhooks/base.py create mode 100644 autogpt_platform/backend/backend/integrations/webhooks/github.py create mode 100644 autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py create mode 100644 autogpt_platform/backend/backend/util/exceptions.py create mode 100644 autogpt_platform/backend/migrations/20241017180251_add_webhooks_and_their_relation_to_nodes/migration.sql create mode 100644 autogpt_platform/frontend/src/components/ui/alert-dialog.tsx diff --git a/autogpt_platform/backend/.env.example b/autogpt_platform/backend/.env.example index b6d41c25d..0dd10e838 100644 --- a/autogpt_platform/backend/.env.example +++ b/autogpt_platform/backend/.env.example @@ -28,8 +28,15 @@ SUPABASE_URL=http://localhost:8000 SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long -# For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow for integrations to work. -FRONTEND_BASE_URL=http://localhost:3000 +## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow +## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set. +# FRONTEND_BASE_URL=http://localhost:3000 + +## PLATFORM_BASE_URL must be set to a *publicly accessible* URL pointing to your backend +## to use the platform's webhook-related functionality. +## If you are developing locally, you can use something like ngrok to get a publc URL +## and tunnel it to your locally running backend. +PLATFORM_BASE_URL=https://your-public-url-here ## == INTEGRATION CREDENTIALS == ## # Each set of server side credentials is required for the corresponding 3rd party diff --git a/autogpt_platform/backend/backend/blocks/__init__.py b/autogpt_platform/backend/backend/blocks/__init__.py index 4fb89e395..03b4a9701 100644 --- a/autogpt_platform/backend/backend/blocks/__init__.py +++ b/autogpt_platform/backend/backend/blocks/__init__.py @@ -60,13 +60,6 @@ for block_cls in all_subclasses(Block): input_schema = block.input_schema.model_fields output_schema = block.output_schema.model_fields - # Prevent duplicate field name in input_schema and output_schema - duplicate_field_names = set(input_schema.keys()) & set(output_schema.keys()) - if duplicate_field_names: - raise ValueError( - f"{block.name} has duplicate field names in input_schema and output_schema: {duplicate_field_names}" - ) - # Make sure `error` field is a string in the output schema if "error" in output_schema and output_schema["error"].annotation is not str: raise ValueError( diff --git a/autogpt_platform/backend/backend/blocks/agent.py b/autogpt_platform/backend/backend/blocks/agent.py index ec5c2efd6..afbe410e4 100644 --- a/autogpt_platform/backend/backend/blocks/agent.py +++ b/autogpt_platform/backend/backend/blocks/agent.py @@ -27,7 +27,7 @@ def get_executor_manager_client(): @thread_cached def get_event_bus(): - from backend.data.queue import RedisExecutionEventBus + from backend.data.execution import RedisExecutionEventBus return RedisExecutionEventBus() diff --git a/autogpt_platform/backend/backend/blocks/github/example_payloads/pull_request.synchronize.json b/autogpt_platform/backend/backend/blocks/github/example_payloads/pull_request.synchronize.json new file mode 100644 index 000000000..7d8f8efbe --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/github/example_payloads/pull_request.synchronize.json @@ -0,0 +1,700 @@ +{ + "action": "synchronize", + "number": 8358, + "pull_request": { + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358", + "id": 2128918491, + "node_id": "PR_kwDOJKSTjM5-5Lfb", + "html_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358", + "diff_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.diff", + "patch_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.patch", + "issue_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358", + "number": 8358, + "state": "open", + "locked": false, + "title": "feat(platform, blocks): Webhook-triggered blocks", + "user": { + "login": "Pwuts", + "id": 12185583, + "node_id": "MDQ6VXNlcjEyMTg1NTgz", + "avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Pwuts", + "html_url": "https://github.com/Pwuts", + "followers_url": "https://api.github.com/users/Pwuts/followers", + "following_url": "https://api.github.com/users/Pwuts/following{/other_user}", + "gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions", + "organizations_url": "https://api.github.com/users/Pwuts/orgs", + "repos_url": "https://api.github.com/users/Pwuts/repos", + "events_url": "https://api.github.com/users/Pwuts/events{/privacy}", + "received_events_url": "https://api.github.com/users/Pwuts/received_events", + "type": "User", + "user_view_type": "public", + "site_admin": false + }, + "body": "- Resolves #8352\r\n\r\n## Changes 🏗️\r\n\r\n- feat(blocks): Add GitHub Pull Request Trigger block\r\n\r\n### feat(platform): Add support for Webhook-triggered blocks\r\n- ⚠️ Add `PLATFORM_BASE_URL` setting\r\n\r\n- Add webhook config option and `BlockType.WEBHOOK` to `Block`\r\n - Add check to `Block.__init__` to enforce type and shape of webhook event filter\r\n - Add check to `Block.__init__` to enforce `payload` input on webhook blocks\r\n\r\n- Add `Webhook` model + CRUD functions in `backend.data.integrations` to represent webhooks created by our system\r\n - Add `IntegrationWebhook` to DB schema + reference `AgentGraphNode.webhook_id`\r\n - Add `set_node_webhook(..)` in `backend.data.graph`\r\n\r\n- Add webhook-related endpoints:\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ingress` endpoint, to receive webhook payloads, and for all associated nodes create graph executions\r\n - Add `Node.is_triggered_by_event_type(..)` helper method\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ping` endpoint, to allow testing a webhook\r\n - Add `WebhookEvent` + pub/sub functions in `backend.data.integrations`\r\n\r\n- Add `backend.integrations.webhooks` module, including:\r\n - `graph_lifecycle_hooks`, e.g. `on_graph_activate(..)`, to handle corresponding webhook creation etc.\r\n - Add calls to these hooks in the graph create/update endpoints\r\n - `BaseWebhooksManager` + `GithubWebhooksManager` to handle creating + registering, removing + deregistering, and retrieving existing webhooks, and validating incoming payloads\r\n\r\n### Other improvements\r\n- fix(blocks): Allow having an input and output pin with the same name\r\n- feat(blocks): Allow hiding inputs (e.g. `payload`) with `SchemaField(hidden=True)`\r\n- feat(backend/data): Add `graph_id`, `graph_version` to `Node`; `user_id` to `GraphMeta`\r\n - Add `Creatable` versions of `Node`, `GraphMeta` and `Graph` without these properties\r\n - Add `graph_from_creatable(..)` helper function in `backend.data.graph`\r\n- refactor(backend/data): Make `RedisEventQueue` generic\r\n- refactor(frontend): Deduplicate & clean up code for different block types in `generateInputHandles(..)` in `CustomNode`\r\n- refactor(backend): Remove unused subgraph functionality\r\n\r\n## How it works\r\n- When a graph is created, the `on_graph_activate` and `on_node_activate` hooks are called on the graph and its nodes\r\n- If a webhook-triggered node has presets for all the relevant inputs, `on_node_activate` will get/create a suitable webhook and link it by setting `AgentGraphNode.webhook_id`\r\n - `on_node_activate` uses `webhook_manager.get_suitable_webhook(..)`, which tries to find a suitable webhook (with matching requirements) or creates it if none exists yet\r\n- When a graph is deactivated (in favor of a newer/other version) or deleted, `on_graph_deactivate` and `on_node_deactivate` are called on the graph and its nodes to clean up webhooks that are no longer in use\r\n- When a valid webhook payload is received, two things happen:\r\n 1. It is broadcast on the Redis channel `webhooks/{webhook_id}/{event_type}`\r\n 2. Graph executions are initiated for all nodes triggered by this webhook\r\n\r\n## TODO\r\n- [ ] #8537\r\n- [x] #8538\r\n- [ ] #8357\r\n- [ ] ~~#8554~~ can be done in a follow-up PR\r\n- [ ] Test test test!\r\n- [ ] Add note on `repo` input of webhook blocks that the credentials used must have the right permissions for the given organization/repo\r\n- [x] Implement proper detection and graceful handling of webhook creation failing due to insufficient permissions. This should give a clear message to the user to e.g. \"give the app access to this organization in your settings\".\r\n- [ ] Nice-to-have: make a button on webhook blocks to trigger a ping and check its result. The API endpoints for this is already implemented.", + "created_at": "2024-10-16T22:13:47Z", + "updated_at": "2024-11-11T18:34:54Z", + "closed_at": null, + "merged_at": null, + "merge_commit_sha": "cbfd0cdd8db52cdd5a3b7ce088fc0ab4617a652e", + "assignee": { + "login": "Pwuts", + "id": 12185583, + "node_id": "MDQ6VXNlcjEyMTg1NTgz", + "avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Pwuts", + "html_url": "https://github.com/Pwuts", + "followers_url": "https://api.github.com/users/Pwuts/followers", + "following_url": "https://api.github.com/users/Pwuts/following{/other_user}", + "gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions", + "organizations_url": "https://api.github.com/users/Pwuts/orgs", + "repos_url": "https://api.github.com/users/Pwuts/repos", + "events_url": "https://api.github.com/users/Pwuts/events{/privacy}", + "received_events_url": "https://api.github.com/users/Pwuts/received_events", + "type": "User", + "user_view_type": "public", + "site_admin": false + }, + "assignees": [ + { + "login": "Pwuts", + "id": 12185583, + "node_id": "MDQ6VXNlcjEyMTg1NTgz", + "avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Pwuts", + "html_url": "https://github.com/Pwuts", + "followers_url": "https://api.github.com/users/Pwuts/followers", + "following_url": "https://api.github.com/users/Pwuts/following{/other_user}", + "gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions", + "organizations_url": "https://api.github.com/users/Pwuts/orgs", + "repos_url": "https://api.github.com/users/Pwuts/repos", + "events_url": "https://api.github.com/users/Pwuts/events{/privacy}", + "received_events_url": "https://api.github.com/users/Pwuts/received_events", + "type": "User", + "user_view_type": "public", + "site_admin": false + } + ], + "requested_reviewers": [ + { + "login": "kcze", + "id": 34861343, + "node_id": "MDQ6VXNlcjM0ODYxMzQz", + "avatar_url": "https://avatars.githubusercontent.com/u/34861343?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/kcze", + "html_url": "https://github.com/kcze", + "followers_url": "https://api.github.com/users/kcze/followers", + "following_url": "https://api.github.com/users/kcze/following{/other_user}", + "gists_url": "https://api.github.com/users/kcze/gists{/gist_id}", + "starred_url": "https://api.github.com/users/kcze/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/kcze/subscriptions", + "organizations_url": "https://api.github.com/users/kcze/orgs", + "repos_url": "https://api.github.com/users/kcze/repos", + "events_url": "https://api.github.com/users/kcze/events{/privacy}", + "received_events_url": "https://api.github.com/users/kcze/received_events", + "type": "User", + "user_view_type": "public", + "site_admin": false + } + ], + "requested_teams": [ + { + "name": "DevOps", + "id": 9547361, + "node_id": "T_kwDOB8roIc4Aka5h", + "slug": "devops", + "description": "", + "privacy": "closed", + "notification_setting": "notifications_enabled", + "url": "https://api.github.com/organizations/130738209/team/9547361", + "html_url": "https://github.com/orgs/Significant-Gravitas/teams/devops", + "members_url": "https://api.github.com/organizations/130738209/team/9547361/members{/member}", + "repositories_url": "https://api.github.com/organizations/130738209/team/9547361/repos", + "permission": "pull", + "parent": null + } + ], + "labels": [ + { + "id": 5272676214, + "node_id": "LA_kwDOJKSTjM8AAAABOkandg", + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/documentation", + "name": "documentation", + "color": "0075ca", + "default": true, + "description": "Improvements or additions to documentation" + }, + { + "id": 5410633769, + "node_id": "LA_kwDOJKSTjM8AAAABQn-4KQ", + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/size/xl", + "name": "size/xl", + "color": "E751DD", + "default": false, + "description": "" + }, + { + "id": 6892322271, + "node_id": "LA_kwDOJKSTjM8AAAABmtB93w", + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/Review%20effort%20[1-5]:%204", + "name": "Review effort [1-5]: 4", + "color": "d1bcf9", + "default": false, + "description": null + }, + { + "id": 7218433025, + "node_id": "LA_kwDOJKSTjM8AAAABrkCMAQ", + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/frontend", + "name": "platform/frontend", + "color": "033C07", + "default": false, + "description": "AutoGPT Platform - Front end" + }, + { + "id": 7219356193, + "node_id": "LA_kwDOJKSTjM8AAAABrk6iIQ", + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/backend", + "name": "platform/backend", + "color": "ededed", + "default": false, + "description": "AutoGPT Platform - Back end" + }, + { + "id": 7515330106, + "node_id": "LA_kwDOJKSTjM8AAAABv_LWOg", + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/blocks", + "name": "platform/blocks", + "color": "eb5757", + "default": false, + "description": null + } + ], + "milestone": null, + "draft": false, + "commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits", + "review_comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments", + "review_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}", + "comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments", + "statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0", + "head": { + "label": "Significant-Gravitas:reinier/open-1961-implement-github-on-pull-request-block", + "ref": "reinier/open-1961-implement-github-on-pull-request-block", + "sha": "8f708a2b60463eec10747d8f45dead35b5a45bd0", + "user": { + "login": "Significant-Gravitas", + "id": 130738209, + "node_id": "O_kgDOB8roIQ", + "avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Significant-Gravitas", + "html_url": "https://github.com/Significant-Gravitas", + "followers_url": "https://api.github.com/users/Significant-Gravitas/followers", + "following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}", + "gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions", + "organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs", + "repos_url": "https://api.github.com/users/Significant-Gravitas/repos", + "events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}", + "received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events", + "type": "Organization", + "user_view_type": "public", + "site_admin": false + }, + "repo": { + "id": 614765452, + "node_id": "R_kgDOJKSTjA", + "name": "AutoGPT", + "full_name": "Significant-Gravitas/AutoGPT", + "private": false, + "owner": { + "login": "Significant-Gravitas", + "id": 130738209, + "node_id": "O_kgDOB8roIQ", + "avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Significant-Gravitas", + "html_url": "https://github.com/Significant-Gravitas", + "followers_url": "https://api.github.com/users/Significant-Gravitas/followers", + "following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}", + "gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions", + "organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs", + "repos_url": "https://api.github.com/users/Significant-Gravitas/repos", + "events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}", + "received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events", + "type": "Organization", + "user_view_type": "public", + "site_admin": false + }, + "html_url": "https://github.com/Significant-Gravitas/AutoGPT", + "description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.", + "fork": false, + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT", + "forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks", + "keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams", + "hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks", + "issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}", + "events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events", + "assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}", + "branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}", + "tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags", + "blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}", + "languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages", + "stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers", + "contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors", + "subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers", + "subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription", + "commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}", + "compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges", + "archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads", + "issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}", + "pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}", + "milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}", + "notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}", + "releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}", + "deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments", + "created_at": "2023-03-16T09:21:07Z", + "updated_at": "2024-11-11T18:16:29Z", + "pushed_at": "2024-11-11T18:34:52Z", + "git_url": "git://github.com/Significant-Gravitas/AutoGPT.git", + "ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git", + "clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git", + "svn_url": "https://github.com/Significant-Gravitas/AutoGPT", + "homepage": "https://agpt.co", + "size": 181894, + "stargazers_count": 168203, + "watchers_count": 168203, + "language": "Python", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "has_discussions": true, + "forks_count": 44376, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 189, + "license": { + "key": "other", + "name": "Other", + "spdx_id": "NOASSERTION", + "url": null, + "node_id": "MDc6TGljZW5zZTA=" + }, + "allow_forking": true, + "is_template": false, + "web_commit_signoff_required": false, + "topics": [ + "ai", + "artificial-intelligence", + "autonomous-agents", + "gpt-4", + "openai", + "python" + ], + "visibility": "public", + "forks": 44376, + "open_issues": 189, + "watchers": 168203, + "default_branch": "master", + "allow_squash_merge": true, + "allow_merge_commit": false, + "allow_rebase_merge": false, + "allow_auto_merge": true, + "delete_branch_on_merge": true, + "allow_update_branch": true, + "use_squash_pr_title_as_default": true, + "squash_merge_commit_message": "COMMIT_MESSAGES", + "squash_merge_commit_title": "PR_TITLE", + "merge_commit_message": "BLANK", + "merge_commit_title": "PR_TITLE" + } + }, + "base": { + "label": "Significant-Gravitas:dev", + "ref": "dev", + "sha": "0b5b95eff5e18c1e162d2b30b66a7be2bed1cbc2", + "user": { + "login": "Significant-Gravitas", + "id": 130738209, + "node_id": "O_kgDOB8roIQ", + "avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Significant-Gravitas", + "html_url": "https://github.com/Significant-Gravitas", + "followers_url": "https://api.github.com/users/Significant-Gravitas/followers", + "following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}", + "gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions", + "organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs", + "repos_url": "https://api.github.com/users/Significant-Gravitas/repos", + "events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}", + "received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events", + "type": "Organization", + "user_view_type": "public", + "site_admin": false + }, + "repo": { + "id": 614765452, + "node_id": "R_kgDOJKSTjA", + "name": "AutoGPT", + "full_name": "Significant-Gravitas/AutoGPT", + "private": false, + "owner": { + "login": "Significant-Gravitas", + "id": 130738209, + "node_id": "O_kgDOB8roIQ", + "avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Significant-Gravitas", + "html_url": "https://github.com/Significant-Gravitas", + "followers_url": "https://api.github.com/users/Significant-Gravitas/followers", + "following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}", + "gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions", + "organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs", + "repos_url": "https://api.github.com/users/Significant-Gravitas/repos", + "events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}", + "received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events", + "type": "Organization", + "user_view_type": "public", + "site_admin": false + }, + "html_url": "https://github.com/Significant-Gravitas/AutoGPT", + "description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.", + "fork": false, + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT", + "forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks", + "keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams", + "hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks", + "issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}", + "events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events", + "assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}", + "branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}", + "tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags", + "blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}", + "languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages", + "stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers", + "contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors", + "subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers", + "subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription", + "commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}", + "compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges", + "archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads", + "issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}", + "pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}", + "milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}", + "notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}", + "releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}", + "deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments", + "created_at": "2023-03-16T09:21:07Z", + "updated_at": "2024-11-11T18:16:29Z", + "pushed_at": "2024-11-11T18:34:52Z", + "git_url": "git://github.com/Significant-Gravitas/AutoGPT.git", + "ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git", + "clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git", + "svn_url": "https://github.com/Significant-Gravitas/AutoGPT", + "homepage": "https://agpt.co", + "size": 181894, + "stargazers_count": 168203, + "watchers_count": 168203, + "language": "Python", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "has_discussions": true, + "forks_count": 44376, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 189, + "license": { + "key": "other", + "name": "Other", + "spdx_id": "NOASSERTION", + "url": null, + "node_id": "MDc6TGljZW5zZTA=" + }, + "allow_forking": true, + "is_template": false, + "web_commit_signoff_required": false, + "topics": [ + "ai", + "artificial-intelligence", + "autonomous-agents", + "gpt-4", + "openai", + "python" + ], + "visibility": "public", + "forks": 44376, + "open_issues": 189, + "watchers": 168203, + "default_branch": "master", + "allow_squash_merge": true, + "allow_merge_commit": false, + "allow_rebase_merge": false, + "allow_auto_merge": true, + "delete_branch_on_merge": true, + "allow_update_branch": true, + "use_squash_pr_title_as_default": true, + "squash_merge_commit_message": "COMMIT_MESSAGES", + "squash_merge_commit_title": "PR_TITLE", + "merge_commit_message": "BLANK", + "merge_commit_title": "PR_TITLE" + } + }, + "_links": { + "self": { + "href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358" + }, + "html": { + "href": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358" + }, + "issue": { + "href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358" + }, + "comments": { + "href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments" + }, + "review_comments": { + "href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments" + }, + "review_comment": { + "href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}" + }, + "commits": { + "href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits" + }, + "statuses": { + "href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0" + } + }, + "author_association": "MEMBER", + "auto_merge": null, + "active_lock_reason": null, + "merged": false, + "mergeable": null, + "rebaseable": null, + "mergeable_state": "unknown", + "merged_by": null, + "comments": 12, + "review_comments": 29, + "maintainer_can_modify": false, + "commits": 62, + "additions": 1674, + "deletions": 331, + "changed_files": 36 + }, + "before": "f40aef87672203f47bbbd53f83fae0964c5624da", + "after": "8f708a2b60463eec10747d8f45dead35b5a45bd0", + "repository": { + "id": 614765452, + "node_id": "R_kgDOJKSTjA", + "name": "AutoGPT", + "full_name": "Significant-Gravitas/AutoGPT", + "private": false, + "owner": { + "login": "Significant-Gravitas", + "id": 130738209, + "node_id": "O_kgDOB8roIQ", + "avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Significant-Gravitas", + "html_url": "https://github.com/Significant-Gravitas", + "followers_url": "https://api.github.com/users/Significant-Gravitas/followers", + "following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}", + "gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions", + "organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs", + "repos_url": "https://api.github.com/users/Significant-Gravitas/repos", + "events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}", + "received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events", + "type": "Organization", + "user_view_type": "public", + "site_admin": false + }, + "html_url": "https://github.com/Significant-Gravitas/AutoGPT", + "description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.", + "fork": false, + "url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT", + "forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks", + "keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams", + "hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks", + "issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}", + "events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events", + "assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}", + "branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}", + "tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags", + "blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}", + "languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages", + "stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers", + "contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors", + "subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers", + "subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription", + "commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}", + "compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges", + "archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads", + "issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}", + "pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}", + "milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}", + "notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}", + "releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}", + "deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments", + "created_at": "2023-03-16T09:21:07Z", + "updated_at": "2024-11-11T18:16:29Z", + "pushed_at": "2024-11-11T18:34:52Z", + "git_url": "git://github.com/Significant-Gravitas/AutoGPT.git", + "ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git", + "clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git", + "svn_url": "https://github.com/Significant-Gravitas/AutoGPT", + "homepage": "https://agpt.co", + "size": 181894, + "stargazers_count": 168203, + "watchers_count": 168203, + "language": "Python", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "has_discussions": true, + "forks_count": 44376, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 189, + "license": { + "key": "other", + "name": "Other", + "spdx_id": "NOASSERTION", + "url": null, + "node_id": "MDc6TGljZW5zZTA=" + }, + "allow_forking": true, + "is_template": false, + "web_commit_signoff_required": false, + "topics": [ + "ai", + "artificial-intelligence", + "autonomous-agents", + "gpt-4", + "openai", + "python" + ], + "visibility": "public", + "forks": 44376, + "open_issues": 189, + "watchers": 168203, + "default_branch": "master", + "custom_properties": { + + } + }, + "organization": { + "login": "Significant-Gravitas", + "id": 130738209, + "node_id": "O_kgDOB8roIQ", + "url": "https://api.github.com/orgs/Significant-Gravitas", + "repos_url": "https://api.github.com/orgs/Significant-Gravitas/repos", + "events_url": "https://api.github.com/orgs/Significant-Gravitas/events", + "hooks_url": "https://api.github.com/orgs/Significant-Gravitas/hooks", + "issues_url": "https://api.github.com/orgs/Significant-Gravitas/issues", + "members_url": "https://api.github.com/orgs/Significant-Gravitas/members{/member}", + "public_members_url": "https://api.github.com/orgs/Significant-Gravitas/public_members{/member}", + "avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4", + "description": "" + }, + "enterprise": { + "id": 149607, + "slug": "significant-gravitas", + "name": "Significant Gravitas", + "node_id": "E_kgDOAAJIZw", + "avatar_url": "https://avatars.githubusercontent.com/b/149607?v=4", + "description": "The creators of AutoGPT", + "website_url": "discord.gg/autogpt", + "html_url": "https://github.com/enterprises/significant-gravitas", + "created_at": "2024-04-18T17:43:53Z", + "updated_at": "2024-10-23T16:59:55Z" + }, + "sender": { + "login": "Pwuts", + "id": 12185583, + "node_id": "MDQ6VXNlcjEyMTg1NTgz", + "avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Pwuts", + "html_url": "https://github.com/Pwuts", + "followers_url": "https://api.github.com/users/Pwuts/followers", + "following_url": "https://api.github.com/users/Pwuts/following{/other_user}", + "gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions", + "organizations_url": "https://api.github.com/users/Pwuts/orgs", + "repos_url": "https://api.github.com/users/Pwuts/repos", + "events_url": "https://api.github.com/users/Pwuts/events{/privacy}", + "received_events_url": "https://api.github.com/users/Pwuts/received_events", + "type": "User", + "user_view_type": "public", + "site_admin": false + } +} \ No newline at end of file diff --git a/autogpt_platform/backend/backend/blocks/github/triggers.py b/autogpt_platform/backend/backend/blocks/github/triggers.py new file mode 100644 index 000000000..ce24a649f --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/github/triggers.py @@ -0,0 +1,156 @@ +import json +import logging +from pathlib import Path + +from pydantic import BaseModel + +from backend.data.block import ( + Block, + BlockCategory, + BlockOutput, + BlockSchema, + BlockWebhookConfig, +) +from backend.data.model import SchemaField + +from ._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + GithubCredentialsField, + GithubCredentialsInput, +) + +logger = logging.getLogger(__name__) + + +# --8<-- [start:GithubTriggerExample] +class GitHubTriggerBase: + class Input(BlockSchema): + credentials: GithubCredentialsInput = GithubCredentialsField("repo") + repo: str = SchemaField( + description=( + "Repository to subscribe to.\n\n" + "**Note:** Make sure your GitHub credentials have permissions " + "to create webhooks on this repo." + ), + placeholder="{owner}/{repo}", + ) + # --8<-- [start:example-payload-field] + payload: dict = SchemaField(hidden=True, default={}) + # --8<-- [end:example-payload-field] + + class Output(BlockSchema): + payload: dict = SchemaField( + description="The complete webhook payload that was received from GitHub. " + "Includes information about the affected resource (e.g. pull request), " + "the event, and the user who triggered the event." + ) + triggered_by_user: dict = SchemaField( + description="Object representing the GitHub user who triggered the event" + ) + error: str = SchemaField( + description="Error message if the payload could not be processed" + ) + + def run(self, input_data: Input, **kwargs) -> BlockOutput: + yield "payload", input_data.payload + yield "triggered_by_user", input_data.payload["sender"] + + +class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block): + EXAMPLE_PAYLOAD_FILE = ( + Path(__file__).parent / "example_payloads" / "pull_request.synchronize.json" + ) + + # --8<-- [start:example-event-filter] + class Input(GitHubTriggerBase.Input): + class EventsFilter(BaseModel): + """ + https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request + """ + + opened: bool = False + edited: bool = False + closed: bool = False + reopened: bool = False + synchronize: bool = False + assigned: bool = False + unassigned: bool = False + labeled: bool = False + unlabeled: bool = False + converted_to_draft: bool = False + locked: bool = False + unlocked: bool = False + enqueued: bool = False + dequeued: bool = False + milestoned: bool = False + demilestoned: bool = False + ready_for_review: bool = False + review_requested: bool = False + review_request_removed: bool = False + auto_merge_enabled: bool = False + auto_merge_disabled: bool = False + + events: EventsFilter = SchemaField( + title="Events", description="The events to subscribe to" + ) + # --8<-- [end:example-event-filter] + + class Output(GitHubTriggerBase.Output): + event: str = SchemaField( + description="The PR event that triggered the webhook (e.g. 'opened')" + ) + number: int = SchemaField(description="The number of the affected pull request") + pull_request: dict = SchemaField( + description="Object representing the affected pull request" + ) + pull_request_url: str = SchemaField( + description="The URL of the affected pull request" + ) + + def __init__(self): + from backend.integrations.webhooks.github import GithubWebhookType + + example_payload = json.loads(self.EXAMPLE_PAYLOAD_FILE.read_text()) + + super().__init__( + id="6c60ec01-8128-419e-988f-96a063ee2fea", + description="This block triggers on pull request events and outputs the event type and payload.", + categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT}, + input_schema=GithubPullRequestTriggerBlock.Input, + output_schema=GithubPullRequestTriggerBlock.Output, + # --8<-- [start:example-webhook_config] + webhook_config=BlockWebhookConfig( + provider="github", + webhook_type=GithubWebhookType.REPO, + resource_format="{repo}", + event_filter_input="events", + event_format="pull_request.{event}", + ), + # --8<-- [end:example-webhook_config] + test_input={ + "repo": "Significant-Gravitas/AutoGPT", + "events": {"opened": True, "synchronize": True}, + "credentials": TEST_CREDENTIALS_INPUT, + "payload": example_payload, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("payload", example_payload), + ("triggered_by_user", example_payload["sender"]), + ("event", example_payload["action"]), + ("number", example_payload["number"]), + ("pull_request", example_payload["pull_request"]), + ("pull_request_url", example_payload["pull_request"]["html_url"]), + ], + ) + + def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore + yield from super().run(input_data, **kwargs) + yield "event", input_data.payload["action"] + yield "number", input_data.payload["number"] + yield "pull_request", input_data.payload["pull_request"] + yield "pull_request_url", input_data.payload["pull_request"]["html_url"] + + +# --8<-- [end:GithubTriggerExample] diff --git a/autogpt_platform/backend/backend/data/block.py b/autogpt_platform/backend/backend/data/block.py index f86eee084..14108d71b 100644 --- a/autogpt_platform/backend/backend/data/block.py +++ b/autogpt_platform/backend/backend/data/block.py @@ -20,9 +20,12 @@ from prisma.models import AgentBlock from pydantic import BaseModel from backend.util import json +from backend.util.settings import Config from .model import CREDENTIALS_FIELD_NAME, ContributorDetails, CredentialsMetaInput +app_config = Config() + BlockData = tuple[str, Any] # Input & Output data should be a tuple of (name, data). BlockInput = dict[str, Any] # Input: 1 input pin consumes 1 data. BlockOutput = Generator[BlockData, None, None] # Output: 1 output pin produces n data. @@ -34,6 +37,7 @@ class BlockType(Enum): INPUT = "Input" OUTPUT = "Output" NOTE = "Note" + WEBHOOK = "Webhook" AGENT = "Agent" @@ -177,6 +181,41 @@ class EmptySchema(BlockSchema): pass +# --8<-- [start:BlockWebhookConfig] +class BlockWebhookConfig(BaseModel): + provider: str + """The service provider that the webhook connects to""" + + webhook_type: str + """ + Identifier for the webhook type. E.g. GitHub has repo and organization level hooks. + + Only for use in the corresponding `WebhooksManager`. + """ + + resource_format: str + """ + Template string for the resource that a block instance subscribes to. + Fields will be filled from the block's inputs (except `payload`). + + Example: `f"{repo}/pull_requests"` (note: not how it's actually implemented) + + Only for use in the corresponding `WebhooksManager`. + """ + + event_filter_input: str + """Name of the block's event filter input.""" + + event_format: str = "{event}" + """ + Template string for the event(s) that a block instance subscribes to. + Applied individually to each event selected in the event filter input. + + Example: `"pull_request.{event}"` -> `"pull_request.opened"` + """ + # --8<-- [end:BlockWebhookConfig] + + class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]): def __init__( self, @@ -193,6 +232,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]): disabled: bool = False, static_output: bool = False, block_type: BlockType = BlockType.STANDARD, + webhook_config: Optional[BlockWebhookConfig] = None, ): """ Initialize the block with the given schema. @@ -223,9 +263,38 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]): self.contributors = contributors or set() self.disabled = disabled self.static_output = static_output - self.block_type = block_type + self.block_type = block_type if not webhook_config else BlockType.WEBHOOK + self.webhook_config = webhook_config self.execution_stats = {} + if self.webhook_config: + # Enforce shape of webhook event filter + event_filter_field = self.input_schema.model_fields[ + self.webhook_config.event_filter_input + ] + if not ( + isinstance(event_filter_field.annotation, type) + and issubclass(event_filter_field.annotation, BaseModel) + and all( + field.annotation is bool + for field in event_filter_field.annotation.model_fields.values() + ) + ): + raise NotImplementedError( + f"{self.name} has an invalid webhook event selector: " + "field must be a BaseModel and all its fields must be boolean" + ) + + # Enforce presence of 'payload' input + if "payload" not in self.input_schema.model_fields: + raise TypeError( + f"{self.name} is webhook-triggered but has no 'payload' input" + ) + + # Disable webhook-triggered block if webhook functionality not available + if not app_config.platform_base_url: + self.disabled = True + @classmethod def create(cls: Type["Block"]) -> "Block": return cls() diff --git a/autogpt_platform/backend/backend/data/execution.py b/autogpt_platform/backend/backend/data/execution.py index 4fa1f567f..dc28b4883 100644 --- a/autogpt_platform/backend/backend/data/execution.py +++ b/autogpt_platform/backend/backend/data/execution.py @@ -1,7 +1,7 @@ from collections import defaultdict from datetime import datetime, timezone from multiprocessing import Manager -from typing import Any, Generic, TypeVar +from typing import Any, AsyncGenerator, Generator, Generic, TypeVar from prisma.enums import AgentExecutionStatus from prisma.models import ( @@ -14,7 +14,9 @@ from pydantic import BaseModel from backend.data.block import BlockData, BlockInput, CompletedBlockOutput from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE +from backend.data.queue import AsyncRedisEventBus, RedisEventBus from backend.util import json, mock +from backend.util.settings import Config class GraphExecution(BaseModel): @@ -271,7 +273,6 @@ async def update_graph_execution_stats( graph_exec_id: str, stats: dict[str, Any], ) -> ExecutionResult: - status = ExecutionStatus.FAILED if stats.get("error") else ExecutionStatus.COMPLETED res = await AgentGraphExecution.prisma().update( where={"id": graph_exec_id}, @@ -471,3 +472,42 @@ async def get_incomplete_executions( include=EXECUTION_RESULT_INCLUDE, ) return [ExecutionResult.from_db(execution) for execution in executions] + + +# --------------------- Event Bus --------------------- # + +config = Config() + + +class RedisExecutionEventBus(RedisEventBus[ExecutionResult]): + Model = ExecutionResult + + @property + def event_bus_name(self) -> str: + return config.execution_event_bus_name + + def publish(self, res: ExecutionResult): + self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}") + + def listen( + self, graph_id: str = "*", graph_exec_id: str = "*" + ) -> Generator[ExecutionResult, None, None]: + for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"): + yield execution_result + + +class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]): + Model = ExecutionResult + + @property + def event_bus_name(self) -> str: + return config.execution_event_bus_name + + async def publish(self, res: ExecutionResult): + await self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}") + + async def listen( + self, graph_id: str = "*", graph_exec_id: str = "*" + ) -> AsyncGenerator[ExecutionResult, None]: + async for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"): + yield execution_result diff --git a/autogpt_platform/backend/backend/data/graph.py b/autogpt_platform/backend/backend/data/graph.py index cc253af59..1081d6b90 100644 --- a/autogpt_platform/backend/backend/data/graph.py +++ b/autogpt_platform/backend/backend/data/graph.py @@ -3,7 +3,7 @@ import logging import uuid from collections import defaultdict from datetime import datetime, timezone -from typing import Any, Literal, Type +from typing import Any, Literal, Optional, Type import prisma from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink @@ -12,12 +12,14 @@ from pydantic.fields import computed_field from backend.blocks.agent import AgentExecutorBlock from backend.blocks.basic import AgentInputBlock, AgentOutputBlock -from backend.data.block import BlockInput, BlockType, get_block, get_blocks -from backend.data.db import BaseDbModel, transaction -from backend.data.execution import ExecutionStatus -from backend.data.includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE from backend.util import json +from .block import BlockInput, BlockType, get_block, get_blocks +from .db import BaseDbModel, transaction +from .execution import ExecutionStatus +from .includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE +from .integrations import Webhook + logger = logging.getLogger(__name__) @@ -50,20 +52,51 @@ class Node(BaseDbModel): input_links: list[Link] = [] output_links: list[Link] = [] + webhook_id: Optional[str] = None + + +class NodeModel(Node): + graph_id: str + graph_version: int + + webhook: Optional[Webhook] = None + @staticmethod def from_db(node: AgentNode): if not node.AgentBlock: raise ValueError(f"Invalid node {node.id}, invalid AgentBlock.") - obj = Node( + obj = NodeModel( id=node.id, block_id=node.AgentBlock.id, input_default=json.loads(node.constantInput, target_type=dict[str, Any]), metadata=json.loads(node.metadata, target_type=dict[str, Any]), + graph_id=node.agentGraphId, + graph_version=node.agentGraphVersion, + webhook_id=node.webhookId, + webhook=Webhook.from_db(node.Webhook) if node.Webhook else None, ) obj.input_links = [Link.from_db(link) for link in node.Input or []] obj.output_links = [Link.from_db(link) for link in node.Output or []] return obj + def is_triggered_by_event_type(self, event_type: str) -> bool: + if not (block := get_block(self.block_id)): + raise ValueError(f"Block #{self.block_id} not found for node #{self.id}") + if not block.webhook_config: + raise TypeError("This method can't be used on non-webhook blocks") + event_filter = self.input_default.get(block.webhook_config.event_filter_input) + if not event_filter: + raise ValueError(f"Event filter is not configured on node #{self.id}") + return event_type in [ + block.webhook_config.event_format.format(event=k) + for k in event_filter + if event_filter[k] is True + ] + + +# Fix 2-way reference Node <-> Webhook +Webhook.model_rebuild() + class GraphExecution(BaseDbModel): execution_id: str @@ -110,33 +143,6 @@ class Graph(BaseDbModel): nodes: list[Node] = [] links: list[Link] = [] - @staticmethod - def _generate_schema( - type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input], - data: list[dict], - ) -> dict[str, Any]: - props = [] - for p in data: - try: - props.append(type_class(**p)) - except Exception as e: - logger.warning(f"Invalid {type_class}: {p}, {e}") - - return { - "type": "object", - "properties": { - p.name: { - "secret": p.secret, - "advanced": p.advanced, - "title": p.title or p.name, - **({"description": p.description} if p.description else {}), - **({"default": p.value} if p.value is not None else {}), - } - for p in props - }, - "required": [p.name for p in props if p.value is None], - } - @computed_field @property def input_schema(self) -> dict[str, Any]: @@ -165,6 +171,38 @@ class Graph(BaseDbModel): ], ) + @staticmethod + def _generate_schema( + type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input], + data: list[dict], + ) -> dict[str, Any]: + props = [] + for p in data: + try: + props.append(type_class(**p)) + except Exception as e: + logger.warning(f"Invalid {type_class}: {p}, {e}") + + return { + "type": "object", + "properties": { + p.name: { + "secret": p.secret, + "advanced": p.advanced, + "title": p.title or p.name, + **({"description": p.description} if p.description else {}), + **({"default": p.value} if p.value is not None else {}), + } + for p in props + }, + "required": [p.name for p in props if p.value is None], + } + + +class GraphModel(Graph): + user_id: str + nodes: list[NodeModel] = [] # type: ignore + @property def starting_nodes(self) -> list[Node]: outbound_nodes = {link.sink_id for link in self.links} @@ -291,36 +329,39 @@ class Graph(BaseDbModel): GraphExecution.from_db(execution) for execution in graph.AgentGraphExecution or [] ] - nodes = graph.AgentNodes or [] - return Graph( + return GraphModel( id=graph.id, + user_id=graph.userId, version=graph.version, is_active=graph.isActive, is_template=graph.isTemplate, name=graph.name or "", description=graph.description or "", executions=executions, - nodes=[Graph._process_node(node, hide_credentials) for node in nodes], + nodes=[ + GraphModel._process_node(node, hide_credentials) + for node in graph.AgentNodes or [] + ], links=list( { Link.from_db(link) - for node in nodes + for node in graph.AgentNodes or [] for link in (node.Input or []) + (node.Output or []) } ), ) @staticmethod - def _process_node(node: AgentNode, hide_credentials: bool) -> Node: - node_dict = node.model_dump() + def _process_node(node: AgentNode, hide_credentials: bool) -> NodeModel: + node_dict = {field: getattr(node, field) for field in node.model_fields} if hide_credentials and "constantInput" in node_dict: constant_input = json.loads( node_dict["constantInput"], target_type=dict[str, Any] ) - constant_input = Graph._hide_credentials_in_input(constant_input) + constant_input = GraphModel._hide_credentials_in_input(constant_input) node_dict["constantInput"] = json.dumps(constant_input) - return Node.from_db(AgentNode(**node_dict)) + return NodeModel.from_db(AgentNode(**node_dict)) @staticmethod def _hide_credentials_in_input(input_data: dict[str, Any]) -> dict[str, Any]: @@ -328,7 +369,7 @@ class Graph(BaseDbModel): result = {} for key, value in input_data.items(): if isinstance(value, dict): - result[key] = Graph._hide_credentials_in_input(value) + result[key] = GraphModel._hide_credentials_in_input(value) elif isinstance(value, str) and any( sensitive_key in key.lower() for sensitive_key in sensitive_keys ): @@ -339,22 +380,37 @@ class Graph(BaseDbModel): return result -# --------------------- Model functions --------------------- # +# --------------------- CRUD functions --------------------- # -async def get_node(node_id: str) -> Node: +async def get_node(node_id: str) -> NodeModel: node = await AgentNode.prisma().find_unique_or_raise( where={"id": node_id}, include=AGENT_NODE_INCLUDE, ) - return Node.from_db(node) + return NodeModel.from_db(node) + + +async def set_node_webhook(node_id: str, webhook_id: str | None) -> NodeModel: + node = await AgentNode.prisma().update( + where={"id": node_id}, + data=( + {"Webhook": {"connect": {"id": webhook_id}}} + if webhook_id + else {"Webhook": {"disconnect": True}} + ), + include=AGENT_NODE_INCLUDE, + ) + if not node: + raise ValueError(f"Node #{node_id} not found") + return NodeModel.from_db(node) async def get_graphs( user_id: str, include_executions: bool = False, filter_by: Literal["active", "template"] | None = "active", -) -> list[Graph]: +) -> list[GraphModel]: """ Retrieves graph metadata objects. Default behaviour is to get all currently active graphs. @@ -365,7 +421,7 @@ async def get_graphs( user_id: The ID of the user that owns the graph. Returns: - list[Graph]: A list of objects representing the retrieved graph metadata. + list[GraphModel]: A list of objects representing the retrieved graphs. """ where_clause: AgentGraphWhereInput = {} @@ -386,7 +442,7 @@ async def get_graphs( include=graph_include, ) - return [Graph.from_db(graph) for graph in graphs] + return [GraphModel.from_db(graph) for graph in graphs] async def get_graph( @@ -395,7 +451,7 @@ async def get_graph( template: bool = False, user_id: str | None = None, hide_credentials: bool = False, -) -> Graph | None: +) -> GraphModel | None: """ Retrieves a graph from the DB. Defaults to the version with `is_active` if `version` is not passed, @@ -420,38 +476,35 @@ async def get_graph( include=AGENT_GRAPH_INCLUDE, order={"version": "desc"}, ) - return Graph.from_db(graph, hide_credentials) if graph else None + return GraphModel.from_db(graph, hide_credentials) if graph else None async def set_graph_active_version(graph_id: str, version: int, user_id: str) -> None: - # Check if the graph belongs to the user - graph = await AgentGraph.prisma().find_first( + # Activate the requested version if it exists and is owned by the user. + updated_count = await AgentGraph.prisma().update_many( + data={"isActive": True}, where={ "id": graph_id, "version": version, "userId": user_id, - } - ) - if not graph: - raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user") - - updated_graph = await AgentGraph.prisma().update( - data={"isActive": True}, - where={ - "graphVersionId": {"id": graph_id, "version": version}, }, ) - if not updated_graph: - raise Exception(f"Graph #{graph_id} v{version} not found") + if updated_count == 0: + raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user") - # Deactivate all other versions + # Deactivate all other versions. await AgentGraph.prisma().update_many( data={"isActive": False}, - where={"id": graph_id, "version": {"not": version}, "userId": user_id}, + where={ + "id": graph_id, + "version": {"not": version}, + "userId": user_id, + "isActive": True, + }, ) -async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]: +async def get_graph_all_versions(graph_id: str, user_id: str) -> list[GraphModel]: graph_versions = await AgentGraph.prisma().find_many( where={"id": graph_id, "userId": user_id}, order={"version": "desc"}, @@ -461,7 +514,7 @@ async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]: if not graph_versions: return [] - return [Graph.from_db(graph) for graph in graph_versions] + return [GraphModel.from_db(graph) for graph in graph_versions] async def delete_graph(graph_id: str, user_id: str) -> int: @@ -473,7 +526,7 @@ async def delete_graph(graph_id: str, user_id: str) -> int: return entries_count -async def create_graph(graph: Graph, user_id: str) -> Graph: +async def create_graph(graph: Graph, user_id: str) -> GraphModel: async with transaction() as tx: await __create_graph(tx, graph, user_id) @@ -534,6 +587,32 @@ async def __create_graph(tx, graph: Graph, user_id: str): # ------------------------ UTILITIES ------------------------ # +def make_graph_model(creatable_graph: Graph, user_id: str) -> GraphModel: + """ + Convert a Graph to a GraphModel, setting graph_id and graph_version on all nodes. + + Args: + creatable_graph (Graph): The creatable graph to convert. + user_id (str): The ID of the user creating the graph. + + Returns: + GraphModel: The converted Graph object. + """ + # Create a new Graph object, inheriting properties from CreatableGraph + return GraphModel( + **creatable_graph.model_dump(exclude={"nodes"}), + user_id=user_id, + nodes=[ + NodeModel( + **creatable_node.model_dump(), + graph_id=creatable_graph.id, + graph_version=creatable_graph.version, + ) + for creatable_node in creatable_graph.nodes + ], + ) + + async def fix_llm_provider_credentials(): """Fix node credentials with provider `llm`""" from autogpt_libs.supabase_integration_credentials_store import ( diff --git a/autogpt_platform/backend/backend/data/includes.py b/autogpt_platform/backend/backend/data/includes.py index 371d87ec5..0b791f502 100644 --- a/autogpt_platform/backend/backend/data/includes.py +++ b/autogpt_platform/backend/backend/data/includes.py @@ -3,6 +3,7 @@ import prisma AGENT_NODE_INCLUDE: prisma.types.AgentNodeInclude = { "Input": True, "Output": True, + "Webhook": True, "AgentBlock": True, } @@ -27,3 +28,7 @@ GRAPH_EXECUTION_INCLUDE: prisma.types.AgentGraphExecutionInclude = { } } } + +INTEGRATION_WEBHOOK_INCLUDE: prisma.types.IntegrationWebhookInclude = { + "AgentNodes": {"include": AGENT_NODE_INCLUDE} # type: ignore +} diff --git a/autogpt_platform/backend/backend/data/integrations.py b/autogpt_platform/backend/backend/data/integrations.py new file mode 100644 index 000000000..f86ecd3a4 --- /dev/null +++ b/autogpt_platform/backend/backend/data/integrations.py @@ -0,0 +1,168 @@ +import logging +from typing import TYPE_CHECKING, AsyncGenerator, Optional + +from prisma import Json +from prisma.models import IntegrationWebhook +from pydantic import Field + +from backend.data.includes import INTEGRATION_WEBHOOK_INCLUDE +from backend.data.queue import AsyncRedisEventBus + +from .db import BaseDbModel + +if TYPE_CHECKING: + from .graph import NodeModel + +logger = logging.getLogger(__name__) + + +class Webhook(BaseDbModel): + user_id: str + provider: str + credentials_id: str + webhook_type: str + resource: str + events: list[str] + config: dict = Field(default_factory=dict) + secret: str + + provider_webhook_id: str + + attached_nodes: Optional[list["NodeModel"]] = None + + @staticmethod + def from_db(webhook: IntegrationWebhook): + from .graph import NodeModel + + return Webhook( + id=webhook.id, + user_id=webhook.userId, + provider=webhook.provider, + credentials_id=webhook.credentialsId, + webhook_type=webhook.webhookType, + resource=webhook.resource, + events=webhook.events, + config=dict(webhook.config), + secret=webhook.secret, + provider_webhook_id=webhook.providerWebhookId, + attached_nodes=( + [NodeModel.from_db(node) for node in webhook.AgentNodes] + if webhook.AgentNodes is not None + else None + ), + ) + + +# --------------------- CRUD functions --------------------- # + + +async def create_webhook(webhook: Webhook) -> Webhook: + created_webhook = await IntegrationWebhook.prisma().create( + data={ + "id": webhook.id, + "userId": webhook.user_id, + "provider": webhook.provider, + "credentialsId": webhook.credentials_id, + "webhookType": webhook.webhook_type, + "resource": webhook.resource, + "events": webhook.events, + "config": Json(webhook.config), + "secret": webhook.secret, + "providerWebhookId": webhook.provider_webhook_id, + } + ) + return Webhook.from_db(created_webhook) + + +async def get_webhook(webhook_id: str) -> Webhook: + """⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints.""" + webhook = await IntegrationWebhook.prisma().find_unique_or_raise( + where={"id": webhook_id}, + include=INTEGRATION_WEBHOOK_INCLUDE, + ) + return Webhook.from_db(webhook) + + +async def get_all_webhooks(credentials_id: str) -> list[Webhook]: + """⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints.""" + webhooks = await IntegrationWebhook.prisma().find_many( + where={"credentialsId": credentials_id}, + include=INTEGRATION_WEBHOOK_INCLUDE, + ) + return [Webhook.from_db(webhook) for webhook in webhooks] + + +async def find_webhook( + credentials_id: str, webhook_type: str, resource: str, events: list[str] +) -> Webhook | None: + """⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints.""" + webhook = await IntegrationWebhook.prisma().find_first( + where={ + "credentialsId": credentials_id, + "webhookType": webhook_type, + "resource": resource, + "events": {"has_every": events}, + }, + include=INTEGRATION_WEBHOOK_INCLUDE, + ) + return Webhook.from_db(webhook) if webhook else None + + +async def update_webhook_config(webhook_id: str, updated_config: dict) -> Webhook: + """⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints.""" + _updated_webhook = await IntegrationWebhook.prisma().update( + where={"id": webhook_id}, + data={"config": Json(updated_config)}, + include=INTEGRATION_WEBHOOK_INCLUDE, + ) + if _updated_webhook is None: + raise ValueError(f"Webhook #{webhook_id} not found") + return Webhook.from_db(_updated_webhook) + + +async def delete_webhook(webhook_id: str) -> None: + """⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints.""" + deleted = await IntegrationWebhook.prisma().delete(where={"id": webhook_id}) + if not deleted: + raise ValueError(f"Webhook #{webhook_id} not found") + + +# --------------------- WEBHOOK EVENTS --------------------- # + + +class WebhookEvent(BaseDbModel): + provider: str + webhook_id: str + event_type: str + payload: dict + + +class WebhookEventBus(AsyncRedisEventBus[WebhookEvent]): + Model = WebhookEvent + + @property + def event_bus_name(self) -> str: + return "webhooks" + + async def publish(self, event: WebhookEvent): + await self.publish_event(event, f"{event.webhook_id}/{event.event_type}") + + async def listen( + self, webhook_id: str, event_type: Optional[str] = None + ) -> AsyncGenerator[WebhookEvent, None]: + async for event in self.listen_events(f"{webhook_id}/{event_type or '*'}"): + yield event + + +event_bus = WebhookEventBus() + + +async def publish_webhook_event(event: WebhookEvent): + await event_bus.publish(event) + + +async def listen_for_webhook_event( + webhook_id: str, event_type: Optional[str] = None +) -> WebhookEvent | None: + async for event in event_bus.listen(webhook_id, event_type): + return event # Only one event is expected diff --git a/autogpt_platform/backend/backend/data/model.py b/autogpt_platform/backend/backend/data/model.py index 4068bdcfa..9a988133e 100644 --- a/autogpt_platform/backend/backend/data/model.py +++ b/autogpt_platform/backend/backend/data/model.py @@ -113,6 +113,7 @@ def SchemaField( advanced: Optional[bool] = None, secret: bool = False, exclude: bool = False, + hidden: Optional[bool] = None, **kwargs, ) -> T: json_extra = { @@ -121,6 +122,7 @@ def SchemaField( "placeholder": placeholder, "secret": secret, "advanced": advanced, + "hidden": hidden, }.items() if v is not None } diff --git a/autogpt_platform/backend/backend/data/queue.py b/autogpt_platform/backend/backend/data/queue.py index 3b3db57ec..b6fa72d53 100644 --- a/autogpt_platform/backend/backend/data/queue.py +++ b/autogpt_platform/backend/backend/data/queue.py @@ -9,11 +9,8 @@ from redis.asyncio.client import PubSub as AsyncPubSub from redis.client import PubSub from backend.data import redis -from backend.data.execution import ExecutionResult -from backend.util.settings import Config logger = logging.getLogger(__name__) -config = Config() class DateTimeEncoder(json.JSONEncoder): @@ -36,7 +33,7 @@ class BaseRedisEventBus(Generic[M], ABC): def _serialize_message(self, item: M, channel_key: str) -> tuple[str, str]: message = json.dumps(item.model_dump(), cls=DateTimeEncoder) - channel_name = f"{self.event_bus_name}-{channel_key}" + channel_name = f"{self.event_bus_name}/{channel_key}" logger.info(f"[{channel_name}] Publishing an event to Redis {message}") return message, channel_name @@ -54,7 +51,7 @@ class BaseRedisEventBus(Generic[M], ABC): def _subscribe( self, connection: redis.Redis | redis.AsyncRedis, channel_key: str ) -> tuple[PubSub | AsyncPubSub, str]: - channel_name = f"{self.event_bus_name}-{channel_key}" + channel_name = f"{self.event_bus_name}/{channel_key}" pubsub = connection.pubsub() return pubsub, channel_name @@ -108,37 +105,3 @@ class AsyncRedisEventBus(BaseRedisEventBus[M], ABC): async for message in pubsub.listen(): if event := self._deserialize_message(message, channel_key): yield event - - -class RedisExecutionEventBus(RedisEventBus[ExecutionResult]): - Model = ExecutionResult - - @property - def event_bus_name(self) -> str: - return config.execution_event_bus_name - - def publish(self, res: ExecutionResult): - self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}") - - def listen( - self, graph_id: str = "*", graph_exec_id: str = "*" - ) -> Generator[ExecutionResult, None, None]: - for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"): - yield execution_result - - -class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]): - Model = ExecutionResult - - @property - def event_bus_name(self) -> str: - return config.execution_event_bus_name - - async def publish(self, res: ExecutionResult): - await self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}") - - async def listen( - self, graph_id: str = "*", graph_exec_id: str = "*" - ) -> AsyncGenerator[ExecutionResult, None]: - async for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"): - yield execution_result diff --git a/autogpt_platform/backend/backend/executor/database.py b/autogpt_platform/backend/backend/executor/database.py index 2597429b3..4016363c1 100644 --- a/autogpt_platform/backend/backend/executor/database.py +++ b/autogpt_platform/backend/backend/executor/database.py @@ -4,6 +4,7 @@ from typing import Any, Callable, Concatenate, Coroutine, ParamSpec, TypeVar, ca from backend.data.credit import get_user_credit_model from backend.data.execution import ( ExecutionResult, + RedisExecutionEventBus, create_graph_execution, get_execution_results, get_incomplete_executions, @@ -15,14 +16,13 @@ from backend.data.execution import ( upsert_execution_output, ) from backend.data.graph import get_graph, get_node -from backend.data.queue import RedisExecutionEventBus from backend.data.user import ( get_user_integrations, get_user_metadata, update_user_integrations, update_user_metadata, ) -from backend.util.service import AppService, expose +from backend.util.service import AppService, expose, register_pydantic_serializers from backend.util.settings import Config P = ParamSpec("P") @@ -56,6 +56,9 @@ class DatabaseManager(AppService): res = self.run_and_wait(coroutine) return res + # Register serializers for annotations on bare function + register_pydantic_serializers(f) + return wrapper # Executions diff --git a/autogpt_platform/backend/backend/executor/manager.py b/autogpt_platform/backend/backend/executor/manager.py index 2d1e2d223..46cb554db 100644 --- a/autogpt_platform/backend/backend/executor/manager.py +++ b/autogpt_platform/backend/backend/executor/manager.py @@ -30,7 +30,7 @@ from backend.data.execution import ( merge_execution_input, parse_execution_output, ) -from backend.data.graph import Graph, Link, Node +from backend.data.graph import GraphModel, Link, Node from backend.data.model import CREDENTIALS_FIELD_NAME, CredentialsMetaInput from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.util import json @@ -186,7 +186,7 @@ def execute_node( input_data, **extra_exec_kwargs ): output_size += len(json.dumps(output_data)) - log_metadata.info("Node produced output", output_name=output_data) + log_metadata.info("Node produced output", **{output_name: output_data}) db_client.upsert_execution_output(node_exec_id, output_name, output_data) for execution in _enqueue_next_nodes( @@ -253,7 +253,6 @@ def _enqueue_next_nodes( graph_id: str, log_metadata: LogMetadata, ) -> list[NodeExecution]: - def add_enqueued_execution( node_exec_id: str, node_id: str, data: BlockInput ) -> NodeExecution: @@ -713,7 +712,6 @@ class Executor: class ExecutionManager(AppService): - def __init__(self): super().__init__() self.use_redis = True @@ -775,7 +773,7 @@ class ExecutionManager(AppService): user_id: str, graph_version: int | None = None, ) -> GraphExecution: - graph: Graph | None = self.db_client.get_graph( + graph: GraphModel | None = self.db_client.get_graph( graph_id=graph_id, user_id=user_id, version=graph_version ) if not graph: @@ -799,6 +797,15 @@ class ExecutionManager(AppService): if name and name in data: input_data = {"value": data[name]} + # Extract webhook payload, and assign it to the input pin + webhook_payload_key = f"webhook_{node.webhook_id}_payload" + if ( + block.block_type == BlockType.WEBHOOK + and node.webhook_id + and webhook_payload_key in data + ): + input_data = {"payload": data[webhook_payload_key]} + input_data, error = validate_exec(node, input_data) if input_data is None: raise ValueError(error) @@ -876,7 +883,7 @@ class ExecutionManager(AppService): ) self.db_client.send_execution_update(exec_update) - def _validate_node_input_credentials(self, graph: Graph, user_id: str): + def _validate_node_input_credentials(self, graph: GraphModel, user_id: str): """Checks all credentials for all nodes of the graph""" for node in graph.nodes: diff --git a/autogpt_platform/backend/backend/integrations/creds_manager.py b/autogpt_platform/backend/backend/integrations/creds_manager.py index 96f9d1a3c..0fee2e3a8 100644 --- a/autogpt_platform/backend/backend/integrations/creds_manager.py +++ b/autogpt_platform/backend/backend/integrations/creds_manager.py @@ -11,6 +11,7 @@ from redis.lock import Lock as RedisLock from backend.data import redis from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler +from backend.util.exceptions import MissingConfigError from backend.util.settings import Settings logger = logging.getLogger(__name__) @@ -157,12 +158,14 @@ def _get_provider_oauth_handler(provider_name: str) -> BaseOAuthHandler: client_id = getattr(settings.secrets, f"{provider_name}_client_id") client_secret = getattr(settings.secrets, f"{provider_name}_client_secret") if not (client_id and client_secret): - raise Exception( # TODO: ConfigError + raise MissingConfigError( f"Integration with provider '{provider_name}' is not configured", ) handler_class = HANDLERS_BY_NAME[provider_name] - frontend_base_url = settings.config.frontend_base_url + frontend_base_url = ( + settings.config.frontend_base_url or settings.config.platform_base_url + ) return handler_class( client_id=client_id, client_secret=client_secret, diff --git a/autogpt_platform/backend/backend/integrations/providers.py b/autogpt_platform/backend/backend/integrations/providers.py new file mode 100644 index 000000000..b38becf5c --- /dev/null +++ b/autogpt_platform/backend/backend/integrations/providers.py @@ -0,0 +1,7 @@ +from enum import Enum + + +class ProviderName(str, Enum): + GITHUB = "github" + GOOGLE = "google" + NOTION = "notion" diff --git a/autogpt_platform/backend/backend/integrations/webhooks/__init__.py b/autogpt_platform/backend/backend/integrations/webhooks/__init__.py new file mode 100644 index 000000000..14d1f7216 --- /dev/null +++ b/autogpt_platform/backend/backend/integrations/webhooks/__init__.py @@ -0,0 +1,17 @@ +from typing import TYPE_CHECKING + +from .github import GithubWebhooksManager + +if TYPE_CHECKING: + from .base import BaseWebhooksManager + +# --8<-- [start:WEBHOOK_MANAGERS_BY_NAME] +WEBHOOK_MANAGERS_BY_NAME: dict[str, type["BaseWebhooksManager"]] = { + handler.PROVIDER_NAME: handler + for handler in [ + GithubWebhooksManager, + ] +} +# --8<-- [end:WEBHOOK_MANAGERS_BY_NAME] + +__all__ = ["WEBHOOK_MANAGERS_BY_NAME"] diff --git a/autogpt_platform/backend/backend/integrations/webhooks/base.py b/autogpt_platform/backend/backend/integrations/webhooks/base.py new file mode 100644 index 000000000..b30f419a0 --- /dev/null +++ b/autogpt_platform/backend/backend/integrations/webhooks/base.py @@ -0,0 +1,163 @@ +import logging +import secrets +from abc import ABC, abstractmethod +from typing import ClassVar, Generic, TypeVar +from uuid import uuid4 + +from autogpt_libs.supabase_integration_credentials_store import Credentials +from fastapi import Request +from strenum import StrEnum + +from backend.data import integrations +from backend.util.exceptions import MissingConfigError +from backend.util.settings import Config + +logger = logging.getLogger(__name__) +app_config = Config() + +WT = TypeVar("WT", bound=StrEnum) + + +class BaseWebhooksManager(ABC, Generic[WT]): + # --8<-- [start:BaseWebhooksManager1] + PROVIDER_NAME: ClassVar[str] + # --8<-- [end:BaseWebhooksManager1] + + WebhookType: WT + + async def get_suitable_webhook( + self, + user_id: str, + credentials: Credentials, + webhook_type: WT, + resource: str, + events: list[str], + ) -> integrations.Webhook: + if not app_config.platform_base_url: + raise MissingConfigError( + "PLATFORM_BASE_URL must be set to use Webhook functionality" + ) + + if webhook := await integrations.find_webhook( + credentials.id, webhook_type, resource, events + ): + return webhook + return await self._create_webhook( + user_id, credentials, webhook_type, resource, events + ) + + async def prune_webhook_if_dangling( + self, webhook_id: str, credentials: Credentials + ) -> bool: + webhook = await integrations.get_webhook(webhook_id) + if webhook.attached_nodes is None: + raise ValueError("Error retrieving webhook including attached nodes") + if webhook.attached_nodes: + # Don't prune webhook if in use + return False + + await self._deregister_webhook(webhook, credentials) + await integrations.delete_webhook(webhook.id) + return True + + # --8<-- [start:BaseWebhooksManager3] + @classmethod + @abstractmethod + async def validate_payload( + cls, webhook: integrations.Webhook, request: Request + ) -> tuple[dict, str]: + """ + Validates an incoming webhook request and returns its payload and type. + + Params: + webhook: Object representing the configured webhook and its properties in our system. + request: Incoming FastAPI `Request` + + Returns: + dict: The validated payload + str: The event type associated with the payload + """ + + # --8<-- [end:BaseWebhooksManager3] + + # --8<-- [start:BaseWebhooksManager5] + async def trigger_ping(self, webhook: integrations.Webhook) -> None: + """ + Triggers a ping to the given webhook. + + Raises: + NotImplementedError: if the provider doesn't support pinging + """ + # --8<-- [end:BaseWebhooksManager5] + raise NotImplementedError(f"{self.__class__.__name__} doesn't support pinging") + + # --8<-- [start:BaseWebhooksManager2] + @abstractmethod + async def _register_webhook( + self, + credentials: Credentials, + webhook_type: WT, + resource: str, + events: list[str], + ingress_url: str, + secret: str, + ) -> tuple[str, dict]: + """ + Registers a new webhook with the provider. + + Params: + credentials: The credentials with which to create the webhook + webhook_type: The provider-specific webhook type to create + resource: The resource to receive events for + events: The events to subscribe to + ingress_url: The ingress URL for webhook payloads + secret: Secret used to verify webhook payloads + + Returns: + str: Webhook ID assigned by the provider + config: Provider-specific configuration for the webhook + """ + ... + + # --8<-- [end:BaseWebhooksManager2] + + # --8<-- [start:BaseWebhooksManager4] + @abstractmethod + async def _deregister_webhook( + self, webhook: integrations.Webhook, credentials: Credentials + ) -> None: ... + + # --8<-- [end:BaseWebhooksManager4] + + async def _create_webhook( + self, + user_id: str, + credentials: Credentials, + webhook_type: WT, + resource: str, + events: list[str], + ) -> integrations.Webhook: + id = str(uuid4()) + secret = secrets.token_hex(32) + provider_name = self.PROVIDER_NAME + ingress_url = ( + f"{app_config.platform_base_url}/api/integrations/{provider_name}" + f"/webhooks/{id}/ingress" + ) + provider_webhook_id, config = await self._register_webhook( + credentials, webhook_type, resource, events, ingress_url, secret + ) + return await integrations.create_webhook( + integrations.Webhook( + id=id, + user_id=user_id, + provider=provider_name, + credentials_id=credentials.id, + webhook_type=webhook_type, + resource=resource, + events=events, + provider_webhook_id=provider_webhook_id, + config=config, + secret=secret, + ) + ) diff --git a/autogpt_platform/backend/backend/integrations/webhooks/github.py b/autogpt_platform/backend/backend/integrations/webhooks/github.py new file mode 100644 index 000000000..25152caff --- /dev/null +++ b/autogpt_platform/backend/backend/integrations/webhooks/github.py @@ -0,0 +1,175 @@ +import hashlib +import hmac +import logging + +import requests +from autogpt_libs.supabase_integration_credentials_store import Credentials +from fastapi import HTTPException, Request +from strenum import StrEnum + +from backend.data import integrations + +from .base import BaseWebhooksManager + +logger = logging.getLogger(__name__) + + +# --8<-- [start:GithubWebhooksManager] +class GithubWebhookType(StrEnum): + REPO = "repo" + + +class GithubWebhooksManager(BaseWebhooksManager): + PROVIDER_NAME = "github" + + WebhookType = GithubWebhookType + + GITHUB_API_URL = "https://api.github.com" + GITHUB_API_DEFAULT_HEADERS = {"Accept": "application/vnd.github.v3+json"} + + @classmethod + async def validate_payload( + cls, webhook: integrations.Webhook, request: Request + ) -> tuple[dict, str]: + if not (event_type := request.headers.get("X-GitHub-Event")): + raise HTTPException( + status_code=400, detail="X-GitHub-Event header is missing!" + ) + + if not (signature_header := request.headers.get("X-Hub-Signature-256")): + raise HTTPException( + status_code=403, detail="X-Hub-Signature-256 header is missing!" + ) + + payload_body = await request.body() + hash_object = hmac.new( + webhook.secret.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256 + ) + expected_signature = "sha256=" + hash_object.hexdigest() + + if not hmac.compare_digest(expected_signature, signature_header): + raise HTTPException( + status_code=403, detail="Request signatures didn't match!" + ) + + payload = await request.json() + if action := payload.get("action"): + event_type += f".{action}" + + return payload, event_type + + async def trigger_ping(self, webhook: integrations.Webhook) -> None: + headers = { + **self.GITHUB_API_DEFAULT_HEADERS, + "Authorization": f"Bearer {webhook.config.get('access_token')}", + } + + repo, github_hook_id = webhook.resource, webhook.provider_webhook_id + ping_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{github_hook_id}/pings" + + response = requests.post(ping_url, headers=headers) + + if response.status_code != 204: + error_msg = extract_github_error_msg(response) + raise ValueError(f"Failed to ping GitHub webhook: {error_msg}") + + async def _register_webhook( + self, + credentials: Credentials, + webhook_type: GithubWebhookType, + resource: str, + events: list[str], + ingress_url: str, + secret: str, + ) -> tuple[str, dict]: + if webhook_type == self.WebhookType.REPO and resource.count("/") > 1: + raise ValueError("Invalid repo format: expected 'owner/repo'") + + # Extract main event, e.g. `pull_request.opened` -> `pull_request` + github_events = list({event.split(".")[0] for event in events}) + + headers = { + **self.GITHUB_API_DEFAULT_HEADERS, + "Authorization": credentials.bearer(), + } + webhook_data = { + "name": "web", + "active": True, + "events": github_events, + "config": { + "url": ingress_url, + "content_type": "json", + "insecure_ssl": "0", + "secret": secret, + }, + } + + response = requests.post( + f"{self.GITHUB_API_URL}/repos/{resource}/hooks", + headers=headers, + json=webhook_data, + ) + + if response.status_code != 201: + error_msg = extract_github_error_msg(response) + if "not found" in error_msg.lower(): + error_msg = ( + f"{error_msg} " + "(Make sure the GitHub account or API key has 'repo' or " + f"webhook create permissions to '{resource}')" + ) + raise ValueError(f"Failed to create GitHub webhook: {error_msg}") + + webhook_id = response.json()["id"] + config = response.json()["config"] + + return str(webhook_id), config + + async def _deregister_webhook( + self, webhook: integrations.Webhook, credentials: Credentials + ) -> None: + webhook_type = self.WebhookType(webhook.webhook_type) + if webhook.credentials_id != credentials.id: + raise ValueError( + f"Webhook #{webhook.id} does not belong to credentials {credentials.id}" + ) + + headers = { + **self.GITHUB_API_DEFAULT_HEADERS, + "Authorization": credentials.bearer(), + } + + if webhook_type == self.WebhookType.REPO: + repo = webhook.resource + delete_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{webhook.provider_webhook_id}" # noqa + else: + raise NotImplementedError( + f"Unsupported webhook type '{webhook.webhook_type}'" + ) + + response = requests.delete(delete_url, headers=headers) + + if response.status_code not in [204, 404]: + # 204 means successful deletion, 404 means the webhook was already deleted + error_msg = extract_github_error_msg(response) + raise ValueError(f"Failed to delete GitHub webhook: {error_msg}") + + # If we reach here, the webhook was successfully deleted or didn't exist + + +# --8<-- [end:GithubWebhooksManager] + + +def extract_github_error_msg(response: requests.Response) -> str: + error_msgs = [] + resp = response.json() + if resp.get("message"): + error_msgs.append(resp["message"]) + if resp.get("errors"): + error_msgs.extend(f"* {err.get('message', err)}" for err in resp["errors"]) + if resp.get("error"): + if isinstance(resp["error"], dict): + error_msgs.append(resp["error"].get("message", resp["error"])) + else: + error_msgs.append(resp["error"]) + return "\n".join(error_msgs) diff --git a/autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py b/autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py new file mode 100644 index 000000000..1f6351d5f --- /dev/null +++ b/autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py @@ -0,0 +1,198 @@ +import logging +from typing import TYPE_CHECKING, Callable, Optional, cast + +from backend.data.block import get_block +from backend.data.graph import set_node_webhook +from backend.data.model import CREDENTIALS_FIELD_NAME +from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME + +if TYPE_CHECKING: + from autogpt_libs.supabase_integration_credentials_store.types import Credentials + + from backend.data.graph import GraphModel, NodeModel + + from .base import BaseWebhooksManager + +logger = logging.getLogger(__name__) + + +async def on_graph_activate( + graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"] +): + """ + Hook to be called when a graph is activated/created. + + ⚠️ Assuming node entities are not re-used between graph versions, ⚠️ + this hook calls `on_node_activate` on all nodes in this graph. + + Params: + get_credentials: `credentials_id` -> Credentials + """ + # Compare nodes in new_graph_version with previous_graph_version + updated_nodes = [] + for new_node in graph.nodes: + node_credentials = None + if creds_meta := new_node.input_default.get(CREDENTIALS_FIELD_NAME): + node_credentials = get_credentials(creds_meta["id"]) + if not node_credentials: + raise ValueError( + f"Node #{new_node.id} updated with non-existent " + f"credentials #{node_credentials}" + ) + + updated_node = await on_node_activate( + graph.user_id, new_node, credentials=node_credentials + ) + updated_nodes.append(updated_node) + + graph.nodes = updated_nodes + return graph + + +async def on_graph_deactivate( + graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"] +): + """ + Hook to be called when a graph is deactivated/deleted. + + ⚠️ Assuming node entities are not re-used between graph versions, ⚠️ + this hook calls `on_node_deactivate` on all nodes in `graph`. + + Params: + get_credentials: `credentials_id` -> Credentials + """ + updated_nodes = [] + for node in graph.nodes: + node_credentials = None + if creds_meta := node.input_default.get(CREDENTIALS_FIELD_NAME): + node_credentials = get_credentials(creds_meta["id"]) + if not node_credentials: + logger.error( + f"Node #{node.id} referenced non-existent " + f"credentials #{creds_meta['id']}" + ) + + updated_node = await on_node_deactivate(node, credentials=node_credentials) + updated_nodes.append(updated_node) + + graph.nodes = updated_nodes + return graph + + +async def on_node_activate( + user_id: str, + node: "NodeModel", + *, + credentials: Optional["Credentials"] = None, +) -> "NodeModel": + """Hook to be called when the node is activated/created""" + + block = get_block(node.block_id) + if not block: + raise ValueError( + f"Node #{node.id} is instance of unknown block #{node.block_id}" + ) + + if not block.webhook_config: + return node + + logger.debug( + f"Activating webhook node #{node.id} with config {block.webhook_config}" + ) + + webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]() + + try: + resource = block.webhook_config.resource_format.format(**node.input_default) + except KeyError: + resource = None + logger.debug( + f"Constructed resource string {resource} from input {node.input_default}" + ) + + event_filter_input_name = block.webhook_config.event_filter_input + has_everything_for_webhook = ( + resource is not None + and CREDENTIALS_FIELD_NAME in node.input_default + and event_filter_input_name in node.input_default + and any(is_on for is_on in node.input_default[event_filter_input_name].values()) + ) + + if has_everything_for_webhook and resource: + logger.debug(f"Node #{node} has everything for a webhook!") + if not credentials: + credentials_meta = node.input_default[CREDENTIALS_FIELD_NAME] + raise ValueError( + f"Cannot set up webhook for node #{node.id}: " + f"credentials #{credentials_meta['id']} not available" + ) + + # Shape of the event filter is enforced in Block.__init__ + event_filter = cast(dict, node.input_default[event_filter_input_name]) + events = [ + block.webhook_config.event_format.format(event=event) + for event, enabled in event_filter.items() + if enabled is True + ] + logger.debug(f"Webhook events to subscribe to: {', '.join(events)}") + + # Find/make and attach a suitable webhook to the node + new_webhook = await webhooks_manager.get_suitable_webhook( + user_id, + credentials, + block.webhook_config.webhook_type, + resource, + events, + ) + logger.debug(f"Acquired webhook: {new_webhook}") + return await set_node_webhook(node.id, new_webhook.id) + + return node + + +async def on_node_deactivate( + node: "NodeModel", + *, + credentials: Optional["Credentials"] = None, + webhooks_manager: Optional["BaseWebhooksManager"] = None, +) -> "NodeModel": + """Hook to be called when node is deactivated/deleted""" + + logger.debug(f"Deactivating node #{node.id}") + block = get_block(node.block_id) + if not block: + raise ValueError( + f"Node #{node.id} is instance of unknown block #{node.block_id}" + ) + + if not block.webhook_config: + return node + + webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]() + + if node.webhook_id: + logger.debug(f"Node #{node.id} has webhook_id {node.webhook_id}") + if not node.webhook: + logger.error(f"Node #{node.id} has webhook_id but no webhook object") + raise ValueError("node.webhook not included") + + # Detach webhook from node + logger.debug(f"Detaching webhook from node #{node.id}") + updated_node = await set_node_webhook(node.id, None) + + # Prune and deregister the webhook if it is no longer used anywhere + logger.debug("Pruning and deregistering webhook if dangling") + webhook = node.webhook + if credentials: + logger.debug(f"Pruning webhook #{webhook.id} with credentials") + await webhooks_manager.prune_webhook_if_dangling(webhook.id, credentials) + else: + logger.warning( + f"Cannot deregister webhook #{webhook.id}: credentials " + f"#{webhook.credentials_id} not available " + f"({webhook.provider} webhook ID: {webhook.provider_webhook_id})" + ) + return updated_node + + logger.debug(f"Node #{node.id} has no webhook_id, returning") + return node diff --git a/autogpt_platform/backend/backend/server/integrations/router.py b/autogpt_platform/backend/backend/server/integrations/router.py index 1e3d01e0b..ecf28cedd 100644 --- a/autogpt_platform/backend/backend/server/integrations/router.py +++ b/autogpt_platform/backend/backend/server/integrations/router.py @@ -10,8 +10,20 @@ from autogpt_libs.supabase_integration_credentials_store.types import ( from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request from pydantic import BaseModel, Field, SecretStr +from backend.data.graph import set_node_webhook +from backend.data.integrations import ( + WebhookEvent, + get_all_webhooks, + get_webhook, + listen_for_webhook_event, + publish_webhook_event, +) +from backend.executor.manager import ExecutionManager from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler +from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME +from backend.util.exceptions import NeedConfirmation +from backend.util.service import get_service_client from backend.util.settings import Settings from ..utils import get_user_id @@ -183,13 +195,22 @@ class CredentialsDeletionResponse(BaseModel): ) +class CredentialsDeletionNeedsConfirmationResponse(BaseModel): + deleted: Literal[False] = False + need_confirmation: Literal[True] = True + message: str + + @router.delete("/{provider}/credentials/{cred_id}") -def delete_credentials( +async def delete_credentials( request: Request, provider: Annotated[str, Path(title="The provider to delete credentials for")], cred_id: Annotated[str, Path(title="The ID of the credentials to delete")], user_id: Annotated[str, Depends(get_user_id)], -) -> CredentialsDeletionResponse: + force: Annotated[ + bool, Query(title="Whether to proceed if any linked webhooks are still in use") + ] = False, +) -> CredentialsDeletionResponse | CredentialsDeletionNeedsConfirmationResponse: creds = creds_manager.store.get_creds_by_id(user_id, cred_id) if not creds: raise HTTPException(status_code=404, detail="Credentials not found") @@ -198,6 +219,11 @@ def delete_credentials( status_code=404, detail="Credentials do not match the specified provider" ) + try: + await remove_all_webhooks_for_credentials(creds, force) + except NeedConfirmation as e: + return CredentialsDeletionNeedsConfirmationResponse(message=str(e)) + creds_manager.delete(user_id, cred_id) tokens_revoked = None @@ -208,7 +234,98 @@ def delete_credentials( return CredentialsDeletionResponse(revoked=tokens_revoked) -# -------- UTILITIES --------- # +# ------------------------- WEBHOOK STUFF -------------------------- # + + +# ⚠️ Note +# No user auth check because this endpoint is for webhook ingress and relies on +# validation by the provider-specific `WebhooksManager`. +@router.post("/{provider}/webhooks/{webhook_id}/ingress") +async def webhook_ingress_generic( + request: Request, + provider: Annotated[str, Path(title="Provider where the webhook was registered")], + webhook_id: Annotated[str, Path(title="Our ID for the webhook")], +): + logger.debug(f"Received {provider} webhook ingress for ID {webhook_id}") + webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]() + webhook = await get_webhook(webhook_id) + logger.debug(f"Webhook #{webhook_id}: {webhook}") + payload, event_type = await webhook_manager.validate_payload(webhook, request) + logger.debug(f"Validated {provider} {event_type} event with payload {payload}") + + webhook_event = WebhookEvent( + provider=provider, + webhook_id=webhook_id, + event_type=event_type, + payload=payload, + ) + await publish_webhook_event(webhook_event) + logger.debug(f"Webhook event published: {webhook_event}") + + if not webhook.attached_nodes: + return + + executor = get_service_client(ExecutionManager) + for node in webhook.attached_nodes: + logger.debug(f"Webhook-attached node: {node}") + if not node.is_triggered_by_event_type(event_type): + logger.debug(f"Node #{node.id} doesn't trigger on event {event_type}") + continue + logger.debug(f"Executing graph #{node.graph_id} node #{node.id}") + executor.add_execution( + node.graph_id, + data={f"webhook_{webhook_id}_payload": payload}, + user_id=webhook.user_id, + ) + + +@router.post("/{provider}/webhooks/{webhook_id}/ping") +async def webhook_ping( + provider: Annotated[str, Path(title="Provider where the webhook was registered")], + webhook_id: Annotated[str, Path(title="Our ID for the webhook")], + user_id: Annotated[str, Depends(get_user_id)], # require auth +): + webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]() + webhook = await get_webhook(webhook_id) + + await webhook_manager.trigger_ping(webhook) + if not await listen_for_webhook_event(webhook_id, event_type="ping"): + raise HTTPException(status_code=500, detail="Webhook ping event not received") + + +# --------------------------- UTILITIES ---------------------------- # + + +async def remove_all_webhooks_for_credentials( + credentials: Credentials, force: bool = False +) -> None: + """ + Remove and deregister all webhooks that were registered using the given credentials. + + Params: + credentials: The credentials for which to remove the associated webhooks. + force: Whether to proceed if any of the webhooks are still in use. + + Raises: + NeedConfirmation: If any of the webhooks are still in use and `force` is `False` + """ + webhooks = await get_all_webhooks(credentials.id) + if any(w.attached_nodes for w in webhooks) and not force: + raise NeedConfirmation( + "Some webhooks linked to these credentials are still in use by an agent" + ) + for webhook in webhooks: + # Unlink all nodes + for node in webhook.attached_nodes or []: + await set_node_webhook(node.id, None) + + # Prune the webhook + webhook_manager = WEBHOOK_MANAGERS_BY_NAME[credentials.provider]() + success = await webhook_manager.prune_webhook_if_dangling( + webhook.id, credentials + ) + if not success: + logger.warning(f"Webhook #{webhook.id} failed to prune") def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHandler: @@ -226,7 +343,11 @@ def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHa ) handler_class = HANDLERS_BY_NAME[provider_name] - frontend_base_url = settings.config.frontend_base_url or str(req.base_url) + frontend_base_url = ( + settings.config.frontend_base_url + or settings.config.platform_base_url + or str(req.base_url) + ) return handler_class( client_id=client_id, client_secret=client_secret, diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/server/rest_api.py index 59b7f04af..06e7dc64a 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/server/rest_api.py @@ -29,21 +29,6 @@ async def lifespan_context(app: fastapi.FastAPI): await backend.data.db.disconnect() -def handle_internal_http_error(status_code: int = 500, log_error: bool = True): - def handler(request: fastapi.Request, exc: Exception): - if log_error: - logger.exception(f"{request.method} {request.url.path} failed: {exc}") - return fastapi.responses.JSONResponse( - content={ - "message": f"{request.method} {request.url.path} failed", - "detail": str(exc), - }, - status_code=status_code, - ) - - return handler - - docs_url = ( "/docs" if settings.config.app_env == backend.util.settings.AppEnvironment.LOCAL @@ -62,8 +47,24 @@ app = fastapi.FastAPI( docs_url=docs_url, ) + +def handle_internal_http_error(status_code: int = 500, log_error: bool = True): + def handler(request: fastapi.Request, exc: Exception): + if log_error: + logger.exception(f"{request.method} {request.url.path} failed: {exc}") + return fastapi.responses.JSONResponse( + content={ + "message": f"{request.method} {request.url.path} failed", + "detail": str(exc), + }, + status_code=status_code, + ) + + return handler + + app.add_exception_handler(ValueError, handle_internal_http_error(400)) -app.add_exception_handler(500, handle_internal_http_error(500)) +app.add_exception_handler(Exception, handle_internal_http_error(500)) app.include_router(backend.server.routers.v1.v1_router, tags=["v1"]) diff --git a/autogpt_platform/backend/backend/server/routers/v1.py b/autogpt_platform/backend/backend/server/routers/v1.py index a44f25c1a..f1f1fda38 100644 --- a/autogpt_platform/backend/backend/server/routers/v1.py +++ b/autogpt_platform/backend/backend/server/routers/v1.py @@ -1,7 +1,7 @@ import asyncio import logging from collections import defaultdict -from typing import Annotated, Any, List +from typing import TYPE_CHECKING, Annotated, Any, Sequence import pydantic from autogpt_libs.auth.middleware import auth_middleware @@ -30,6 +30,11 @@ from backend.data.block import BlockInput, CompletedBlockOutput from backend.data.credit import get_block_costs, get_user_credit_model from backend.data.user import get_or_create_user from backend.executor import ExecutionManager, ExecutionScheduler, scheduler +from backend.integrations.creds_manager import IntegrationCredentialsManager +from backend.integrations.webhooks.graph_lifecycle_hooks import ( + on_graph_activate, + on_graph_deactivate, +) from backend.server.model import ( CreateAPIKeyRequest, CreateAPIKeyResponse, @@ -41,6 +46,9 @@ from backend.server.utils import get_user_id from backend.util.service import get_service_client from backend.util.settings import Settings +if TYPE_CHECKING: + from autogpt_libs.supabase_integration_credentials_store.types import Credentials + @thread_cached def execution_manager_client() -> ExecutionManager: @@ -54,6 +62,7 @@ def execution_scheduler_client() -> ExecutionScheduler: settings = Settings() logger = logging.getLogger(__name__) +integration_creds_manager = IntegrationCredentialsManager() _user_credit_model = get_user_credit_model() @@ -62,14 +71,10 @@ _user_credit_model = get_user_credit_model() v1_router = APIRouter(prefix="/api") -v1_router.dependencies.append(Depends(auth_middleware)) - - v1_router.include_router( backend.server.integrations.router.router, prefix="/integrations", tags=["integrations"], - dependencies=[Depends(auth_middleware)], ) v1_router.include_router( @@ -97,13 +102,17 @@ async def get_or_create_user_route(user_data: dict = Depends(auth_middleware)): @v1_router.get(path="/blocks", tags=["blocks"], dependencies=[Depends(auth_middleware)]) -def get_graph_blocks() -> list[dict[Any, Any]]: +def get_graph_blocks() -> Sequence[dict[Any, Any]]: blocks = [block() for block in backend.data.block.get_blocks().values()] costs = get_block_costs() return [{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks] -@v1_router.post(path="/blocks/{block_id}/execute", tags=["blocks"]) +@v1_router.post( + path="/blocks/{block_id}/execute", + tags=["blocks"], + dependencies=[Depends(auth_middleware)], +) def execute_graph_block(block_id: str, data: BlockInput) -> CompletedBlockOutput: obj = backend.data.block.get_block(block_id) if not obj: @@ -141,7 +150,7 @@ class DeleteGraphResponse(TypedDict): async def get_graphs( user_id: Annotated[str, Depends(get_user_id)], with_runs: bool = False, -) -> list[graph_db.Graph]: +) -> Sequence[graph_db.Graph]: return await graph_db.get_graphs( include_executions=with_runs, filter_by="active", user_id=user_id ) @@ -181,13 +190,61 @@ async def get_graph( ) async def get_graph_all_versions( graph_id: str, user_id: Annotated[str, Depends(get_user_id)] -) -> list[graph_db.Graph]: +) -> Sequence[graph_db.Graph]: graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id) if not graphs: raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") return graphs +@v1_router.post( + path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)] +) +async def create_new_graph( + create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)] +) -> graph_db.Graph: + return await do_create_graph(create_graph, is_template=False, user_id=user_id) + + +async def do_create_graph( + create_graph: CreateGraph, + is_template: bool, + # user_id doesn't have to be annotated like on other endpoints, + # because create_graph isn't used directly as an endpoint + user_id: str, +) -> graph_db.Graph: + if create_graph.graph: + graph = graph_db.make_graph_model(create_graph.graph, user_id) + elif create_graph.template_id: + # Create a new graph from a template + graph = await graph_db.get_graph( + create_graph.template_id, + create_graph.template_version, + template=True, + user_id=user_id, + ) + if not graph: + raise HTTPException( + 400, detail=f"Template #{create_graph.template_id} not found" + ) + graph.version = 1 + else: + raise HTTPException( + status_code=400, detail="Either graph or template_id must be provided." + ) + + graph.is_template = is_template + graph.is_active = not is_template + graph.reassign_ids(user_id=user_id, reassign_graph_id=True) + + graph = await graph_db.create_graph(graph, user_id=user_id) + graph = await on_graph_activate( + graph, + get_credentials=lambda id: integration_creds_manager.get(user_id, id), + ) + return graph + + @v1_router.delete( path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)] ) @@ -224,33 +281,41 @@ async def update_graph( latest_version_graph = next( v for v in existing_versions if v.version == latest_version_number ) + current_active_version = next((v for v in existing_versions if v.is_active), None) if latest_version_graph.is_template != graph.is_template: raise HTTPException( 400, detail="Changing is_template on an existing graph is forbidden" ) graph.is_active = not graph.is_template + graph = graph_db.make_graph_model(graph, user_id) graph.reassign_ids(user_id=user_id) new_graph_version = await graph_db.create_graph(graph, user_id=user_id) if new_graph_version.is_active: + + def get_credentials(credentials_id: str) -> "Credentials | None": + return integration_creds_manager.get(user_id, credentials_id) + + # Handle activation of the new graph first to ensure continuity + new_graph_version = await on_graph_activate( + new_graph_version, + get_credentials=get_credentials, + ) # Ensure new version is the only active version await graph_db.set_graph_active_version( graph_id=graph_id, version=new_graph_version.version, user_id=user_id ) + if current_active_version: + # Handle deactivation of the previously active version + await on_graph_deactivate( + current_active_version, + get_credentials=get_credentials, + ) return new_graph_version -@v1_router.post( - path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)] -) -async def create_new_graph( - create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)] -) -> graph_db.Graph: - return await do_create_graph(create_graph, is_template=False, user_id=user_id) - - @v1_router.put( path="/graphs/{graph_id}/versions/active", tags=["graphs"], @@ -262,13 +327,34 @@ async def set_graph_active_version( user_id: Annotated[str, Depends(get_user_id)], ): new_active_version = request_body.active_graph_version - if not await graph_db.get_graph(graph_id, new_active_version, user_id=user_id): + new_active_graph = await graph_db.get_graph( + graph_id, new_active_version, user_id=user_id + ) + if not new_active_graph: raise HTTPException(404, f"Graph #{graph_id} v{new_active_version} not found") + + current_active_graph = await graph_db.get_graph(graph_id, user_id=user_id) + + def get_credentials(credentials_id: str) -> "Credentials | None": + return integration_creds_manager.get(user_id, credentials_id) + + # Handle activation of the new graph first to ensure continuity + await on_graph_activate( + new_active_graph, + get_credentials=get_credentials, + ) + # Ensure new version is the only active version await graph_db.set_graph_active_version( graph_id=graph_id, - version=request_body.active_graph_version, + version=new_active_version, user_id=user_id, ) + if current_active_graph and current_active_graph.version != new_active_version: + # Handle deactivation of the previously active version + await on_graph_deactivate( + current_active_graph, + get_credentials=get_credentials, + ) @v1_router.post( @@ -298,7 +384,7 @@ def execute_graph( ) async def stop_graph_run( graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)] -) -> list[execution_db.ExecutionResult]: +) -> Sequence[execution_db.ExecutionResult]: if not await execution_db.get_graph_execution(graph_exec_id, user_id): raise HTTPException(404, detail=f"Agent execution #{graph_exec_id} not found") @@ -319,7 +405,7 @@ async def list_graph_runs( graph_id: str, user_id: Annotated[str, Depends(get_user_id)], graph_version: int | None = None, -) -> list[str]: +) -> Sequence[str]: graph = await graph_db.get_graph(graph_id, graph_version, user_id=user_id) if not graph: rev = "" if graph_version is None else f" v{graph_version}" @@ -339,7 +425,7 @@ async def get_graph_run_node_execution_results( graph_id: str, graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)], -) -> list[execution_db.ExecutionResult]: +) -> Sequence[execution_db.ExecutionResult]: graph = await graph_db.get_graph(graph_id, user_id=user_id) if not graph: raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") @@ -378,7 +464,7 @@ async def get_graph_run_status( ) async def get_templates( user_id: Annotated[str, Depends(get_user_id)] -) -> list[graph_db.Graph]: +) -> Sequence[graph_db.Graph]: return await graph_db.get_graphs(filter_by="template", user_id=user_id) @@ -394,40 +480,6 @@ async def get_template(graph_id: str, version: int | None = None) -> graph_db.Gr return graph -async def do_create_graph( - create_graph: CreateGraph, - is_template: bool, - # user_id doesn't have to be annotated like on other endpoints, - # because create_graph isn't used directly as an endpoint - user_id: str, -) -> graph_db.Graph: - if create_graph.graph: - graph = create_graph.graph - elif create_graph.template_id: - # Create a new graph from a template - graph = await graph_db.get_graph( - create_graph.template_id, - create_graph.template_version, - template=True, - user_id=user_id, - ) - if not graph: - raise HTTPException( - 400, detail=f"Template #{create_graph.template_id} not found" - ) - graph.version = 1 - else: - raise HTTPException( - status_code=400, detail="Either graph or template_id must be provided." - ) - - graph.is_template = is_template - graph.is_active = not is_template - graph.reassign_ids(user_id=user_id, reassign_graph_id=True) - - return await graph_db.create_graph(graph, user_id=user_id) - - @v1_router.post( path="/templates", tags=["templates", "graphs"], @@ -534,13 +586,13 @@ async def create_api_key( @v1_router.get( "/api-keys", - response_model=List[APIKeyWithoutHash], + response_model=list[APIKeyWithoutHash], tags=["api-keys"], dependencies=[Depends(auth_middleware)], ) async def get_api_keys( user_id: Annotated[str, Depends(get_user_id)] -) -> List[APIKeyWithoutHash]: +) -> list[APIKeyWithoutHash]: """List all API keys for the user""" try: return await list_user_api_keys(user_id) diff --git a/autogpt_platform/backend/backend/server/ws_api.py b/autogpt_platform/backend/backend/server/ws_api.py index 421a911ab..a6da64b8e 100644 --- a/autogpt_platform/backend/backend/server/ws_api.py +++ b/autogpt_platform/backend/backend/server/ws_api.py @@ -8,7 +8,7 @@ from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect from starlette.middleware.cors import CORSMiddleware from backend.data import redis -from backend.data.queue import AsyncRedisExecutionEventBus +from backend.data.execution import AsyncRedisExecutionEventBus from backend.data.user import DEFAULT_USER_ID from backend.server.conn_manager import ConnectionManager from backend.server.model import ExecutionSubscription, Methods, WsMessage diff --git a/autogpt_platform/backend/backend/util/exceptions.py b/autogpt_platform/backend/backend/util/exceptions.py new file mode 100644 index 000000000..4bb3a08d9 --- /dev/null +++ b/autogpt_platform/backend/backend/util/exceptions.py @@ -0,0 +1,6 @@ +class MissingConfigError(Exception): + """The attempted operation requires configuration which is not available""" + + +class NeedConfirmation(Exception): + """The user must explicitly confirm that they want to proceed""" diff --git a/autogpt_platform/backend/backend/util/service.py b/autogpt_platform/backend/backend/util/service.py index a0b6bde40..dffd8e37a 100644 --- a/autogpt_platform/backend/backend/util/service.py +++ b/autogpt_platform/backend/backend/util/service.py @@ -11,6 +11,7 @@ from types import NoneType, UnionType from typing import ( Annotated, Any, + Awaitable, Callable, Coroutine, Dict, @@ -64,7 +65,13 @@ def expose(func: C) -> C: logger.exception(msg) raise - # Register custom serializers and deserializers for annotated Pydantic models + register_pydantic_serializers(func) + + return pyro.expose(wrapper) # type: ignore + + +def register_pydantic_serializers(func: Callable): + """Register custom serializers and deserializers for annotated Pydantic models""" for name, annotation in func.__annotations__.items(): try: pydantic_types = _pydantic_models_from_type_annotation(annotation) @@ -81,8 +88,6 @@ def expose(func: C) -> C: model.__qualname__, _make_custom_deserializer(model) ) - return pyro.expose(wrapper) # type: ignore - def _make_custom_serializer(model: Type[BaseModel]): def custom_class_to_dict(obj): @@ -252,6 +257,10 @@ def _pydantic_models_from_type_annotation(annotation) -> Iterator[type[BaseModel key_type, value_type = args yield from _pydantic_models_from_type_annotation(key_type) yield from _pydantic_models_from_type_annotation(value_type) + elif origin in (Awaitable, Coroutine): + # For coroutines and awaitables, check the return type + return_type = args[-1] + yield from _pydantic_models_from_type_annotation(return_type) else: annotype = annotation if origin is None else origin diff --git a/autogpt_platform/backend/backend/util/settings.py b/autogpt_platform/backend/backend/util/settings.py index 98b1b66c8..34ca9336d 100644 --- a/autogpt_platform/backend/backend/util/settings.py +++ b/autogpt_platform/backend/backend/util/settings.py @@ -3,7 +3,7 @@ import os from enum import Enum from typing import Any, Dict, Generic, List, Set, Tuple, Type, TypeVar -from pydantic import BaseModel, Field, PrivateAttr, field_validator +from pydantic import BaseModel, Field, PrivateAttr, ValidationInfo, field_validator from pydantic_settings import ( BaseSettings, JsonConfigSettingsSource, @@ -136,12 +136,32 @@ class Config(UpdateTrackingModel["Config"], BaseSettings): description="The port for agent server API to run on", ) + platform_base_url: str = Field( + default="", + description="Must be set so the application knows where it's hosted at. " + "This is necessary to make sure webhooks find their way.", + ) + frontend_base_url: str = Field( - default="http://localhost:3000", + default="", description="Can be used to explicitly set the base URL for the frontend. " "This value is then used to generate redirect URLs for OAuth flows.", ) + @field_validator("platform_base_url", "frontend_base_url") + @classmethod + def validate_platform_base_url(cls, v: str, info: ValidationInfo) -> str: + if not v: + return v + if not v.startswith(("http://", "https://")): + raise ValueError( + f"{info.field_name} must be a full URL " + "including a http:// or https:// schema" + ) + if v.endswith("/"): + return v.rstrip("/") # Remove trailing slash + return v + app_env: AppEnvironment = Field( default=AppEnvironment.LOCAL, description="The name of the app environment: local or dev or prod", diff --git a/autogpt_platform/backend/backend/util/test.py b/autogpt_platform/backend/backend/util/test.py index b9204ec21..37ab2302d 100644 --- a/autogpt_platform/backend/backend/util/test.py +++ b/autogpt_platform/backend/backend/util/test.py @@ -1,9 +1,10 @@ import logging import time +from typing import Sequence from backend.data import db from backend.data.block import Block, initialize_blocks -from backend.data.execution import ExecutionStatus +from backend.data.execution import ExecutionResult, ExecutionStatus from backend.data.model import CREDENTIALS_FIELD_NAME from backend.data.user import create_default_user from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler @@ -57,7 +58,7 @@ async def wait_execution( graph_id: str, graph_exec_id: str, timeout: int = 20, -) -> list: +) -> Sequence[ExecutionResult]: async def is_execution_completed(): status = await AgentServer().test_get_graph_run_status( graph_id, graph_exec_id, user_id diff --git a/autogpt_platform/backend/migrations/20241017180251_add_webhooks_and_their_relation_to_nodes/migration.sql b/autogpt_platform/backend/migrations/20241017180251_add_webhooks_and_their_relation_to_nodes/migration.sql new file mode 100644 index 000000000..011a017c8 --- /dev/null +++ b/autogpt_platform/backend/migrations/20241017180251_add_webhooks_and_their_relation_to_nodes/migration.sql @@ -0,0 +1,26 @@ +-- AlterTable +ALTER TABLE "AgentNode" ADD COLUMN "webhookId" TEXT; + +-- CreateTable +CREATE TABLE "IntegrationWebhook" ( + "id" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3), + "userId" TEXT NOT NULL, + "provider" TEXT NOT NULL, + "credentialsId" TEXT NOT NULL, + "webhookType" TEXT NOT NULL, + "resource" TEXT NOT NULL, + "events" TEXT[], + "config" JSONB NOT NULL, + "secret" TEXT NOT NULL, + "providerWebhookId" TEXT NOT NULL, + + CONSTRAINT "IntegrationWebhook_pkey" PRIMARY KEY ("id") +); + +-- AddForeignKey +ALTER TABLE "AgentNode" ADD CONSTRAINT "AgentNode_webhookId_fkey" FOREIGN KEY ("webhookId") REFERENCES "IntegrationWebhook"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "IntegrationWebhook" ADD CONSTRAINT "IntegrationWebhook_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE; diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index 39829b8a2..df3198870 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -23,6 +23,7 @@ model User { // Relations AgentGraphs AgentGraph[] AgentGraphExecutions AgentGraphExecution[] + IntegrationWebhooks IntegrationWebhook[] AnalyticsDetails AnalyticsDetails[] AnalyticsMetrics AnalyticsMetrics[] UserBlockCredit UserBlockCredit[] @@ -74,6 +75,10 @@ model AgentNode { // JSON serialized dict[str, str] containing predefined input values. constantInput String @default("{}") + // For webhook-triggered blocks: reference to the webhook that triggers the node + webhookId String? + Webhook IntegrationWebhook? @relation(fields: [webhookId], references: [id]) + // JSON serialized dict[str, str] containing the node metadata. metadata String @default("{}") @@ -186,6 +191,28 @@ model AgentNodeExecutionInputOutput { @@unique([referencedByInputExecId, referencedByOutputExecId, name]) } +// Webhook that is registered with a provider and propagates to one or more nodes +model IntegrationWebhook { + id String @id @default(uuid()) + createdAt DateTime @default(now()) + updatedAt DateTime? @updatedAt + + userId String + user User @relation(fields: [userId], references: [id], onDelete: Restrict) // Webhooks must be deregistered before deleting + + provider String // e.g. 'github' + credentialsId String // relation to the credentials that the webhook was created with + webhookType String // e.g. 'repo' + resource String // e.g. 'Significant-Gravitas/AutoGPT' + events String[] // e.g. ['created', 'updated'] + config Json + secret String // crypto string, used to verify payload authenticity + + providerWebhookId String // Webhook ID assigned by the provider + + AgentNodes AgentNode[] +} + model AnalyticsDetails { // PK uses gen_random_uuid() to allow the db inserts to happen outside of prisma // typical uuid() inserts are handled by prisma diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index 53aeb99ed..84dbfaf35 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -26,6 +26,7 @@ "@faker-js/faker": "^9.2.0", "@hookform/resolvers": "^3.9.1", "@next/third-parties": "^15.0.3", + "@radix-ui/react-alert-dialog": "^1.1.2", "@radix-ui/react-avatar": "^1.1.1", "@radix-ui/react-checkbox": "^1.1.2", "@radix-ui/react-collapsible": "^1.1.1", diff --git a/autogpt_platform/frontend/src/app/globals.css b/autogpt_platform/frontend/src/app/globals.css index 7930a00b3..7be802f39 100644 --- a/autogpt_platform/frontend/src/app/globals.css +++ b/autogpt_platform/frontend/src/app/globals.css @@ -74,7 +74,7 @@ } .agpt-border-input { - @apply border-input focus-visible:border-gray-400 focus-visible:outline-none; + @apply border border-input focus-visible:border-gray-400 focus-visible:outline-none; } .agpt-shadow-input { diff --git a/autogpt_platform/frontend/src/app/profile/page.tsx b/autogpt_platform/frontend/src/app/profile/page.tsx index e01c4b1b8..93d9b7708 100644 --- a/autogpt_platform/frontend/src/app/profile/page.tsx +++ b/autogpt_platform/frontend/src/app/profile/page.tsx @@ -4,7 +4,7 @@ import { useSupabase } from "@/components/SupabaseProvider"; import { Button } from "@/components/ui/button"; import useUser from "@/hooks/useUser"; import { useRouter } from "next/navigation"; -import { useCallback, useContext, useMemo } from "react"; +import { useCallback, useContext, useMemo, useState } from "react"; import { FaSpinner } from "react-icons/fa"; import { Separator } from "@/components/ui/separator"; import { useToast } from "@/components/ui/use-toast"; @@ -21,6 +21,16 @@ import { TableRow, } from "@/components/ui/table"; import { CredentialsProviderName } from "@/lib/autogpt-server-api"; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, +} from "@/components/ui/alert-dialog"; export default function PrivatePage() { const { user, isLoading, error } = useUser(); @@ -29,15 +39,40 @@ export default function PrivatePage() { const providers = useContext(CredentialsProvidersContext); const { toast } = useToast(); + const [confirmationDialogState, setConfirmationDialogState] = useState< + | { + open: true; + message: string; + onConfirm: () => void; + onReject: () => void; + } + | { open: false } + >({ open: false }); + const removeCredentials = useCallback( - async (provider: CredentialsProviderName, id: string) => { + async ( + provider: CredentialsProviderName, + id: string, + force: boolean = false, + ) => { if (!providers || !providers[provider]) { return; } + let result; try { - const { revoked } = await providers[provider].deleteCredentials(id); - if (revoked !== false) { + result = await providers[provider].deleteCredentials(id, force); + } catch (error: any) { + toast({ + title: "Something went wrong when deleting credentials: " + error, + variant: "destructive", + duration: 2000, + }); + setConfirmationDialogState({ open: false }); + return; + } + if (result.deleted) { + if (result.revoked) { toast({ title: "Credentials deleted", duration: 2000, @@ -49,11 +84,13 @@ export default function PrivatePage() { duration: 3000, }); } - } catch (error: any) { - toast({ - title: "Something went wrong when deleting credentials: " + error, - variant: "destructive", - duration: 2000, + setConfirmationDialogState({ open: false }); + } else if (result.need_confirmation) { + setConfirmationDialogState({ + open: true, + message: result.message, + onConfirm: () => removeCredentials(provider, id, true), + onReject: () => setConfirmationDialogState({ open: false }), }); } }, @@ -158,6 +195,36 @@ export default function PrivatePage() { ))} + + + + + Are you sure? + + {confirmationDialogState.open && confirmationDialogState.message} + + + + + confirmationDialogState.open && + confirmationDialogState.onReject() + } + > + Cancel + + + confirmationDialogState.open && + confirmationDialogState.onConfirm() + } + > + Continue + + + + ); } diff --git a/autogpt_platform/frontend/src/components/CustomNode.tsx b/autogpt_platform/frontend/src/components/CustomNode.tsx index 4157a6599..710aa1932 100644 --- a/autogpt_platform/frontend/src/components/CustomNode.tsx +++ b/autogpt_platform/frontend/src/components/CustomNode.tsx @@ -38,6 +38,7 @@ import { getPrimaryCategoryColor } from "@/lib/utils"; import { FlowContext } from "./Flow"; import { Badge } from "./ui/badge"; import NodeOutputs from "./NodeOutputs"; +import SchemaTooltip from "./SchemaTooltip"; import { IconCoin } from "./ui/icons"; import * as Separator from "@radix-ui/react-separator"; import * as ContextMenu from "@radix-ui/react-context-menu"; @@ -166,7 +167,7 @@ export function CustomNode({
@@ -205,16 +206,18 @@ export function CustomNode({ return keys.map(([propKey, propSchema]) => { const isRequired = data.inputSchema.required?.includes(propKey); - const isConnected = isHandleConnected(propKey); const isAdvanced = propSchema.advanced; + const isHidden = propSchema.hidden; const isConnectable = + // No input connection handles on INPUT and WEBHOOK blocks + ![BlockUIType.INPUT, BlockUIType.WEBHOOK].includes(nodeType) && // No input connection handles for credentials propKey !== "credentials" && - // No input connection handles on INPUT blocks - nodeType !== BlockUIType.INPUT && // For OUTPUT blocks, only show the 'value' (hides 'name') input connection handle !(nodeType == BlockUIType.OUTPUT && propKey == "name"); + const isConnected = isInputHandleConnected(propKey); return ( + !isHidden && (isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
{isConnectable ? ( @@ -227,15 +230,15 @@ export function CustomNode({ /> ) : ( propKey != "credentials" && ( - - {propSchema.title || beautifyString(propKey)} - +
+ + {propSchema.title || beautifyString(propKey)} + + +
) )} - {!isConnected && ( + {isConnected || ( { + const isInputHandleConnected = (key: string) => { return ( data.connections && data.connections.some((conn: any) => { if (typeof conn === "string") { - const [source, target] = conn.split(" -> "); - return ( - (target.includes(key) && target.includes(data.title)) || - (source.includes(key) && source.includes(data.title)) - ); + const [_source, target] = conn.split(" -> "); + return target.includes(key) && target.includes(data.title); } - return ( - (conn.target === id && conn.targetHandle === key) || - (conn.source === id && conn.sourceHandle === key) - ); + return conn.target === id && conn.targetHandle === key; + }) + ); + }; + + const isOutputHandleConnected = (key: string) => { + return ( + data.connections && + data.connections.some((conn: any) => { + if (typeof conn === "string") { + const [source, _target] = conn.split(" -> "); + return source.includes(key) && source.includes(data.title); + } + return conn.source === id && conn.sourceHandle === key; }) ); }; diff --git a/autogpt_platform/frontend/src/components/flow.css b/autogpt_platform/frontend/src/components/flow.css index 30ef76b68..abbee903b 100644 --- a/autogpt_platform/frontend/src/components/flow.css +++ b/autogpt_platform/frontend/src/components/flow.css @@ -11,22 +11,6 @@ code { monospace; } -input, -textarea { - background-color: #ffffff; - color: #000000; - border: 1px solid #555; - padding: 8px; - border-radius: 4px; - width: calc(100% - 18px); - box-sizing: border-box; -} - -input::placeholder, -textarea::placeholder { - color: #aaa; -} - .modal { position: absolute; top: 50%; diff --git a/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx b/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx index df08a2ffe..d48a581bd 100644 --- a/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx +++ b/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx @@ -3,6 +3,7 @@ import { cn } from "@/lib/utils"; import { useForm } from "react-hook-form"; import { Input } from "@/components/ui/input"; import { Button } from "@/components/ui/button"; +import SchemaTooltip from "@/components/SchemaTooltip"; import useCredentials from "@/hooks/useCredentials"; import { zodResolver } from "@hookform/resolvers/zod"; import AutoGPTServerAPI from "@/lib/autogpt-server-api"; @@ -235,12 +236,10 @@ export const CredentialsInput: FC<{ if (savedApiKeys.length === 0 && savedOAuthCredentials.length === 0) { return ( <> - - Credentials - +
+ Credentials + +
{supportsOAuth2 && (
{error && {error}}
diff --git a/autogpt_platform/frontend/src/components/ui/alert-dialog.tsx b/autogpt_platform/frontend/src/components/ui/alert-dialog.tsx new file mode 100644 index 000000000..41adf9a22 --- /dev/null +++ b/autogpt_platform/frontend/src/components/ui/alert-dialog.tsx @@ -0,0 +1,143 @@ +"use client"; + +import * as React from "react"; +import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog"; + +import { cn } from "@/lib/utils"; +import { buttonVariants } from "@/components/ui/button"; +import { VariantProps } from "class-variance-authority"; + +const AlertDialog = AlertDialogPrimitive.Root; + +const AlertDialogTrigger = AlertDialogPrimitive.Trigger; + +const AlertDialogPortal = AlertDialogPrimitive.Portal; + +const AlertDialogOverlay = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +AlertDialogOverlay.displayName = AlertDialogPrimitive.Overlay.displayName; + +const AlertDialogContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + + +)); +AlertDialogContent.displayName = AlertDialogPrimitive.Content.displayName; + +const AlertDialogHeader = ({ + className, + ...props +}: React.HTMLAttributes) => ( +
+); +AlertDialogHeader.displayName = "AlertDialogHeader"; + +const AlertDialogFooter = ({ + className, + ...props +}: React.HTMLAttributes) => ( +
+); +AlertDialogFooter.displayName = "AlertDialogFooter"; + +const AlertDialogTitle = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +AlertDialogTitle.displayName = AlertDialogPrimitive.Title.displayName; + +const AlertDialogDescription = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +AlertDialogDescription.displayName = + AlertDialogPrimitive.Description.displayName; + +const AlertDialogAction = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef & + VariantProps +>(({ className, variant, ...props }, ref) => ( + +)); +AlertDialogAction.displayName = AlertDialogPrimitive.Action.displayName; + +const AlertDialogCancel = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +AlertDialogCancel.displayName = AlertDialogPrimitive.Cancel.displayName; + +export { + AlertDialog, + AlertDialogPortal, + AlertDialogOverlay, + AlertDialogTrigger, + AlertDialogContent, + AlertDialogHeader, + AlertDialogFooter, + AlertDialogTitle, + AlertDialogDescription, + AlertDialogAction, + AlertDialogCancel, +}; diff --git a/autogpt_platform/frontend/src/components/ui/multiselect.tsx b/autogpt_platform/frontend/src/components/ui/multiselect.tsx index 5400688b0..876444262 100644 --- a/autogpt_platform/frontend/src/components/ui/multiselect.tsx +++ b/autogpt_platform/frontend/src/components/ui/multiselect.tsx @@ -144,7 +144,7 @@ const MultiSelector = forwardRef( ref={ref} onKeyDown={handleKeyDown} className={cn( - "flex flex-col space-y-2 overflow-visible bg-transparent", + "flex flex-col overflow-visible bg-transparent", className, )} dir={dir} @@ -174,7 +174,7 @@ const MultiSelectorTrigger = forwardRef<
->(({ children }, ref) => { +>(({ children, className }, ref) => { const { open } = useMultiSelect(); return ( -
+
{open && children}
); diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts index da3e77f1c..eac9939ab 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts @@ -1,24 +1,24 @@ import { SupabaseClient } from "@supabase/supabase-js"; import { - AnalyticsMetrics, AnalyticsDetails, + AnalyticsMetrics, APIKeyCredentials, Block, + CredentialsDeleteNeedConfirmationResponse, CredentialsDeleteResponse, CredentialsMetaResponse, + ExecutionMeta, Graph, GraphCreatable, - GraphUpdateable, + GraphExecuteResponse, GraphMeta, GraphMetaWithRuns, - GraphExecuteResponse, - ExecutionMeta, + GraphUpdateable, NodeExecutionResult, OAuth2Credentials, - User, - ScheduleCreatable, - ScheduleUpdateable, Schedule, + ScheduleCreatable, + User, } from "./types"; export default class BaseAutoGPTServerAPI { @@ -226,10 +226,14 @@ export default class BaseAutoGPTServerAPI { deleteCredentials( provider: string, id: string, - ): Promise { + force: boolean = true, + ): Promise< + CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse + > { return this._request( "DELETE", `/integrations/${provider}/credentials/${id}`, + force ? { force: true } : undefined, ); } @@ -271,13 +275,14 @@ export default class BaseAutoGPTServerAPI { ?.access_token || ""; let url = this.baseUrl + path; - if (method === "GET" && payload) { + const payloadAsQuery = ["GET", "DELETE"].includes(method); + if (payloadAsQuery && payload) { // For GET requests, use payload as query const queryParams = new URLSearchParams(payload); url += `?${queryParams.toString()}`; } - const hasRequestBody = method !== "GET" && payload !== undefined; + const hasRequestBody = !payloadAsQuery && payload !== undefined; const response = await fetch(url, { method, headers: { diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts index 0794cc861..c63c192b7 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts @@ -56,6 +56,7 @@ export type BlockIOSubSchemaMeta = { description?: string; placeholder?: string; advanced?: boolean; + hidden?: boolean; }; export type BlockIOObjectSubSchema = BlockIOSubSchemaMeta & { @@ -271,6 +272,13 @@ export type CredentialsDeleteResponse = { revoked: boolean | null; }; +/* Mirror of backend/server/integrations/router.py:CredentialsDeletionNeedsConfirmationResponse */ +export type CredentialsDeleteNeedConfirmationResponse = { + deleted: false; + need_confirmation: true; + message: string; +}; + /* Mirror of backend/data/model.py:CredentialsMetaInput */ export type CredentialsMetaInput = { id: string; @@ -317,6 +325,7 @@ export enum BlockUIType { INPUT = "Input", OUTPUT = "Output", NOTE = "Note", + WEBHOOK = "Webhook", AGENT = "Agent", } diff --git a/autogpt_platform/frontend/yarn.lock b/autogpt_platform/frontend/yarn.lock index 2445587c6..755dd45af 100644 --- a/autogpt_platform/frontend/yarn.lock +++ b/autogpt_platform/frontend/yarn.lock @@ -2100,6 +2100,18 @@ resolved "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz" integrity sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA== +"@radix-ui/react-alert-dialog@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-alert-dialog/-/react-alert-dialog-1.1.2.tgz#ac3bb7f71f5cbb595d3d0949bb12b598c2a99981" + integrity sha512-eGSlLzPhKO+TErxkiGcCZGuvbVMnLA1MTnyBksGOeGRGkxHiiJUujsjmNTdWTm4iHVSRaUao9/4Ur671auMghQ== + dependencies: + "@radix-ui/primitive" "1.1.0" + "@radix-ui/react-compose-refs" "1.1.0" + "@radix-ui/react-context" "1.1.1" + "@radix-ui/react-dialog" "1.1.2" + "@radix-ui/react-primitive" "2.0.0" + "@radix-ui/react-slot" "1.1.0" + "@radix-ui/react-arrow@1.1.0": version "1.1.0" resolved "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.0.tgz" @@ -2182,7 +2194,7 @@ resolved "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz" integrity sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q== -"@radix-ui/react-dialog@^1.1.2": +"@radix-ui/react-dialog@1.1.2", "@radix-ui/react-dialog@^1.1.2": version "1.1.2" resolved "https://registry.yarnpkg.com/@radix-ui/react-dialog/-/react-dialog-1.1.2.tgz#d9345575211d6f2d13e209e84aec9a8584b54d6c" integrity sha512-Yj4dZtqa2o+kG61fzB0H2qUvmwBA2oyQroGLyNtBj1beo1khoQ3q1a2AO8rrQYjd8256CO9+N8L9tvsS+bnIyA== @@ -2428,13 +2440,20 @@ dependencies: "@radix-ui/react-primitive" "2.0.0" -"@radix-ui/react-slot@1.1.0", "@radix-ui/react-slot@^1.1.0": +"@radix-ui/react-slot@1.1.0": version "1.1.0" resolved "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.0.tgz" integrity sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw== dependencies: "@radix-ui/react-compose-refs" "1.1.0" +"@radix-ui/react-slot@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.1.0.tgz#7c5e48c36ef5496d97b08f1357bb26ed7c714b84" + integrity sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw== + dependencies: + "@radix-ui/react-compose-refs" "1.1.0" + "@radix-ui/react-switch@^1.1.1": version "1.1.1" resolved "https://registry.yarnpkg.com/@radix-ui/react-switch/-/react-switch-1.1.1.tgz#1401658c24d66a18610f18793afbaa7fedf5429a" diff --git a/docs/content/platform/new_blocks.md b/docs/content/platform/new_blocks.md index 4ef48ea73..faad5e8a9 100644 --- a/docs/content/platform/new_blocks.md +++ b/docs/content/platform/new_blocks.md @@ -83,7 +83,7 @@ Follow these steps to create and test a new block: In this case, we're mocking the `get_request` method to always return a dictionary with an 'extract' key, simulating a successful API response. This allows us to test the block's logic without making actual network requests, which could be slow, unreliable, or rate-limited. -5. **Implement the `run` method with error handling:**, this should contain the main logic of the block: +5. **Implement the `run` method with error handling.** This should contain the main logic of the block: ```python def run(self, input_data: Input, **kwargs) -> BlockOutput: @@ -234,7 +234,7 @@ All our existing handlers and the base class can be found [here][OAuth2 handlers Every handler must implement the following parts of the [`BaseOAuthHandler`] interface: -```python title="autogpt_platform/backend/backend/integrations/oauth/base.py" +```python title="backend/integrations/oauth/base.py" --8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler1" --8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler2" --8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler3" @@ -249,13 +249,13 @@ Aside from implementing the `OAuthHandler` itself, adding a handler into the sys - Adding the handler class to `HANDLERS_BY_NAME` under [`integrations/oauth/__init__.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/__init__.py) -```python title="autogpt_platform/backend/backend/integrations/oauth/__init__.py" +```python title="backend/integrations/oauth/__init__.py" --8<-- "autogpt_platform/backend/backend/integrations/oauth/__init__.py:HANDLERS_BY_NAMEExample" ``` - Adding `{provider}_client_id` and `{provider}_client_secret` to the application's `Secrets` under [`util/settings.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/util/settings.py) -```python title="autogpt_platform/backend/backend/util/settings.py" +```python title="backend/util/settings.py" --8<-- "autogpt_platform/backend/backend/util/settings.py:OAuthServerCredentialsExample" ``` @@ -286,13 +286,13 @@ Finally you will need to add the provider to the `CredentialsType` enum in [`fro - GitHub blocks with API key + OAuth2 support: [`blocks/github`](https://github.com/Significant-Gravitas/AutoGPT/tree/master/autogpt_platform/backend/backend/blocks/github/) -```python title="blocks/github/issues.py" +```python title="backend/blocks/github/issues.py" --8<-- "autogpt_platform/backend/backend/blocks/github/issues.py:GithubCommentBlockExample" ``` - GitHub OAuth2 handler: [`integrations/oauth/github.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/github.py) -```python title="blocks/github/github.py" +```python title="backend/integrations/oauth/github.py" --8<-- "autogpt_platform/backend/backend/integrations/oauth/github.py:GithubOAuthHandlerExample" ``` @@ -300,18 +300,148 @@ Finally you will need to add the provider to the `CredentialsType` enum in [`fro - Google OAuth2 handler: [`integrations/oauth/google.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/google.py) -```python title="integrations/oauth/google.py" +```python title="backend/integrations/oauth/google.py" --8<-- "autogpt_platform/backend/backend/integrations/oauth/google.py:GoogleOAuthHandlerExample" ``` You can see that google has defined a `DEFAULT_SCOPES` variable, this is used to set the scopes that are requested no matter what the user asks for. -```python title="blocks/google/_auth.py" +```python title="backend/blocks/google/_auth.py" --8<-- "autogpt_platform/backend/backend/blocks/google/_auth.py:GoogleOAuthIsConfigured" ``` You can also see that `GOOGLE_OAUTH_IS_CONFIGURED` is used to disable the blocks that require OAuth if the oauth is not configured. This is in the `__init__` method of each block. This is because there is no api key fallback for google blocks so we need to make sure that the oauth is configured before we allow the user to use the blocks. +### Webhook-triggered Blocks + +Webhook-triggered blocks allow your agent to respond to external events in real-time. +These blocks are triggered by incoming webhooks from third-party services +rather than being executed manually. + +Creating and running a webhook-triggered block involves three main components: + +- The block itself, which specifies: + - Inputs for the user to select a resource and events to subscribe to + - A `credentials` input with the scopes needed to manage webhooks + - Logic to turn the webhook payload into outputs for the webhook block +- The `WebhooksManager` for the corresponding webhook service provider, which handles: + - (De)registering webhooks with the provider + - Parsing and validating incoming webhook payloads +- The credentials system for the corresponding service provider, which may include an `OAuthHandler` + +There is more going on under the hood, e.g. to store and retrieve webhooks and their +links to nodes, but to add a webhook-triggered block you shouldn't need to make changes +to those parts of the system. + +#### Creating a Webhook-triggered Block + +To create a webhook-triggered block, follow these additional steps on top of the basic block creation process: + +1. **Define `webhook_config`** in your block's `__init__` method. + +
+ Example: GitHubPullRequestTriggerBlock + + ```python title="backend/blocks/github/triggers.py" + --8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-webhook_config" + ``` +
+ +
+ BlockWebhookConfig definition + + ```python title="backend/data/block.py" + --8<-- "autogpt_platform/backend/backend/data/block.py:BlockWebhookConfig" + ``` +
+ +2. **Define event filter input** in your block's Input schema. + This allows the user to select which specific types of events will trigger the block in their agent. + +
+ Example: GitHubPullRequestTriggerBlock + + ```python title="backend/blocks/github/triggers.py" + --8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-event-filter" + ``` +
+ + - The name of the input field (`events` in this case) must match `webhook_config.event_filter_input`. + - The event filter itself must be a Pydantic model with only boolean fields. + +4. **Include payload field** in your block's Input schema. + +
+ Example: GitHubTriggerBase + + ```python title="backend/blocks/github/triggers.py" + --8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-payload-field" + ``` +
+ +5. **Define `credentials` input** in your block's Input schema. + - Its scopes must be sufficient to manage a user's webhooks through the provider's API + - See [Blocks with authentication](#blocks-with-authentication) for further details + +6. **Process webhook payload** and output relevant parts of it in your block's `run` method. + +
+ Example: GitHubPullRequestTriggerBlock + + ```python + def run(self, input_data: Input, **kwargs) -> BlockOutput: + yield "payload", input_data.payload + yield "sender", input_data.payload["sender"] + yield "event", input_data.payload["action"] + yield "number", input_data.payload["number"] + yield "pull_request", input_data.payload["pull_request"] + ``` + + Note that the `credentials` parameter can be omitted if the credentials + aren't used at block runtime, like in the example. +
+ +#### Adding a Webhooks Manager + +To add support for a new webhook provider, you'll need to create a WebhooksManager that implements the `BaseWebhooksManager` interface: + +```python title="backend/integrations/webhooks/base.py" +--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager1" + +--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager2" +--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager3" +--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager4" +--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager5" +``` + +And add a reference to your `WebhooksManager` class in `WEBHOOK_MANAGERS_BY_NAME`: + +```python title="backend/integrations/webhooks/__init__.py" +--8<-- "autogpt_platform/backend/backend/integrations/webhooks/__init__.py:WEBHOOK_MANAGERS_BY_NAME" +``` + +#### Example: GitHub Webhook Integration + +
+ +GitHub Webhook triggers: blocks/github/triggers.py + + +```python title="backend/blocks/github/triggers.py" +--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:GithubTriggerExample" +``` +
+ +
+ +GitHub Webhooks Manager: integrations/webhooks/github.py + + +```python title="backend/integrations/webhooks/github.py" +--8<-- "autogpt_platform/backend/backend/integrations/webhooks/github.py:GithubWebhooksManager" +``` +
+ ## Key Points to Remember - **Unique ID**: Give your block a unique ID in the **init** method. From 3d21d54daba084821b410885adb8c6fe817ce0c6 Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Mon, 25 Nov 2024 18:26:13 +0000 Subject: [PATCH 09/20] fix(backend): Add missing `strenum` dependency Follow-up hotfix for #8358 --- autogpt_platform/backend/poetry.lock | 18 +++++++++--------- autogpt_platform/backend/pyproject.toml | 1 + 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/autogpt_platform/backend/poetry.lock b/autogpt_platform/backend/poetry.lock index d568eeae3..619b7b8c7 100644 --- a/autogpt_platform/backend/poetry.lock +++ b/autogpt_platform/backend/poetry.lock @@ -297,9 +297,9 @@ expiringdict = "^1.2.2" google-cloud-logging = "^3.11.3" pydantic = "^2.9.2" pydantic-settings = "^2.6.1" -pyjwt = "^2.8.0" +pyjwt = "^2.10.0" python-dotenv = "^1.0.1" -supabase = "^2.9.1" +supabase = "^2.10.0" [package.source] type = "directory" @@ -1986,8 +1986,8 @@ python-dateutil = ">=2.5.3" tqdm = ">=4.64.1" typing-extensions = ">=3.7.4" urllib3 = [ - {version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, {version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, + {version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, ] [package.extras] @@ -2359,8 +2359,8 @@ files = [ annotated-types = ">=0.6.0" pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, ] [package.extras] @@ -2501,13 +2501,13 @@ files = [ [[package]] name = "pyjwt" -version = "2.9.0" +version = "2.10.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, + {file = "PyJWT-2.10.0-py3-none-any.whl", hash = "sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15"}, + {file = "pyjwt-2.10.0.tar.gz", hash = "sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c"}, ] [package.extras] @@ -3933,4 +3933,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "e33b0da31247495e8704fee5224f7b0cf53859cd0ce8bafb39889548a649f5fb" +content-hash = "02293c7c8933c2a6f7bb62af70212a7ca3340e6817352d743a9950874e6a9485" diff --git a/autogpt_platform/backend/pyproject.toml b/autogpt_platform/backend/pyproject.toml index 57297e207..9679c4c9b 100644 --- a/autogpt_platform/backend/pyproject.toml +++ b/autogpt_platform/backend/pyproject.toml @@ -38,6 +38,7 @@ pytest-asyncio = "^0.24.0" python-dotenv = "^1.0.1" redis = "^5.2.0" sentry-sdk = "2.18.0" +strenum = "^0.4.9" supabase = "^2.10.0" tenacity = "^9.0.0" uvicorn = { extras = ["standard"], version = "^0.32.0" } From 03f776681a25066061dfe268d19579dcc15ccbf6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 22:49:45 +0000 Subject: [PATCH 10/20] build(deps-dev): bump the development-dependencies group in /autogpt_platform/backend with 2 updates (#8761) build(deps-dev): bump the development-dependencies group Bumps the development-dependencies group in /autogpt_platform/backend with 2 updates: [poethepoet](https://github.com/nat-n/poethepoet) and [ruff](https://github.com/astral-sh/ruff). Updates `poethepoet` from 0.30.0 to 0.31.0 - [Release notes](https://github.com/nat-n/poethepoet/releases) - [Commits](https://github.com/nat-n/poethepoet/compare/v0.30.0...v0.31.0) Updates `ruff` from 0.7.4 to 0.8.0 - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.7.4...0.8.0) --- updated-dependencies: - dependency-name: poethepoet dependency-type: direct:development update-type: version-update:semver-minor dependency-group: development-dependencies - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-minor dependency-group: development-dependencies ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- autogpt_platform/backend/poetry.lock | 52 ++++++++++++------------- autogpt_platform/backend/pyproject.toml | 4 +- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/autogpt_platform/backend/poetry.lock b/autogpt_platform/backend/poetry.lock index 619b7b8c7..8ac57ebcd 100644 --- a/autogpt_platform/backend/poetry.lock +++ b/autogpt_platform/backend/poetry.lock @@ -1986,8 +1986,8 @@ python-dateutil = ">=2.5.3" tqdm = ">=4.64.1" typing-extensions = ">=3.7.4" urllib3 = [ - {version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, {version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, ] [package.extras] @@ -2051,13 +2051,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "poethepoet" -version = "0.30.0" +version = "0.31.0" description = "A task runner that works well with poetry." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "poethepoet-0.30.0-py3-none-any.whl", hash = "sha256:bf875741407a98da9e96f2f2d0b2c4c34f56d89939a7f53a4b6b3a64b546ec4e"}, - {file = "poethepoet-0.30.0.tar.gz", hash = "sha256:9f7ccda2d6525616ce989ca8ef973739fd668f50bef0b9d3631421d504d9ae4a"}, + {file = "poethepoet-0.31.0-py3-none-any.whl", hash = "sha256:5067c5adf9f228b8af1f3df7d57dc319ed8b3f153bf21faf99f7b74494174c3d"}, + {file = "poethepoet-0.31.0.tar.gz", hash = "sha256:b1cffb120149101b02ffa0583c6e61dfee53953a741df3dabf179836bdef97f5"}, ] [package.dependencies] @@ -2359,8 +2359,8 @@ files = [ annotated-types = ">=0.6.0" pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, ] [package.extras] @@ -2954,29 +2954,29 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.7.4" +version = "0.8.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, - {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, - {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, - {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, - {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, - {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, - {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, + {file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"}, + {file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"}, + {file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"}, + {file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"}, + {file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"}, + {file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"}, + {file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"}, ] [[package]] @@ -3933,4 +3933,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "02293c7c8933c2a6f7bb62af70212a7ca3340e6817352d743a9950874e6a9485" +content-hash = "ac9354e2f206e1a78e6c99d3eab9d538b97c04348270d4776024102526ea7b0b" diff --git a/autogpt_platform/backend/pyproject.toml b/autogpt_platform/backend/pyproject.toml index 9679c4c9b..6c84316d6 100644 --- a/autogpt_platform/backend/pyproject.toml +++ b/autogpt_platform/backend/pyproject.toml @@ -51,11 +51,11 @@ cryptography = "^43.0.3" sqlalchemy = "^2.0.36" psycopg2-binary = "^2.9.10" [tool.poetry.group.dev.dependencies] -poethepoet = "^0.30.0" +poethepoet = "^0.31.0" httpx = "^0.27.0" pytest-watcher = "^0.4.2" requests = "^2.32.3" -ruff = "^0.7.4" +ruff = "^0.8.0" pyright = "^1.1.389" isort = "^5.13.2" black = "^24.10.0" From c6e838da375880e12b7f21183a4c4d3380aae7b6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 08:12:27 +0000 Subject: [PATCH 11/20] chore(market): Update Ruff from 0.7.4 to 0.8.0 (#8758) --- autogpt_platform/market/poetry.lock | 40 +++++++++++++------------- autogpt_platform/market/pyproject.toml | 2 +- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/autogpt_platform/market/poetry.lock b/autogpt_platform/market/poetry.lock index c52141b74..325f8ffaf 100644 --- a/autogpt_platform/market/poetry.lock +++ b/autogpt_platform/market/poetry.lock @@ -1058,29 +1058,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.7.4" +version = "0.8.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, - {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, - {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, - {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, - {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, - {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, - {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, + {file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"}, + {file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"}, + {file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"}, + {file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"}, + {file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"}, + {file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"}, + {file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"}, ] [[package]] @@ -1298,4 +1298,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "985f87e9d6e2b7232f880a476c69c626bc4227156d8a57d8f1867236b215f82f" +content-hash = "89a2655b6c666f40a0319881580bc447aea78febee65a04eebf73fd092e2147e" diff --git a/autogpt_platform/market/pyproject.toml b/autogpt_platform/market/pyproject.toml index 76876d2d0..8147cf069 100644 --- a/autogpt_platform/market/pyproject.toml +++ b/autogpt_platform/market/pyproject.toml @@ -28,7 +28,7 @@ pytest-asyncio = "^0.24.0" pytest-watcher = "^0.4.3" requests = "^2.32.3" -ruff = "^0.7.4" +ruff = "^0.8.0" pyright = "^1.1.389" isort = "^5.13.2" black = "^24.10.0" From f1414550f9d0f7ee174e0084fa7246a824d9a334 Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Tue, 26 Nov 2024 18:03:06 +0100 Subject: [PATCH 12/20] refactor(platform): Combine per-provider credentials API calls (#8772) - Add `/integrations/credentials` endpoint which lists all credentials for the authenticated user - Amend credential fetching logic in front end to fetch all at once instead of per provider - Resolves #8770 - Resolves (hopefully) #8613 --- .../backend/server/integrations/router.py | 23 +++++- .../integrations/credentials-provider.tsx | 71 ++++++++++++------- .../src/lib/autogpt-server-api/baseClient.ts | 8 ++- .../src/lib/autogpt-server-api/types.ts | 3 +- 4 files changed, 76 insertions(+), 29 deletions(-) diff --git a/autogpt_platform/backend/backend/server/integrations/router.py b/autogpt_platform/backend/backend/server/integrations/router.py index ecf28cedd..d18ba5c46 100644 --- a/autogpt_platform/backend/backend/server/integrations/router.py +++ b/autogpt_platform/backend/backend/server/integrations/router.py @@ -65,6 +65,7 @@ def login( class CredentialsMetaResponse(BaseModel): id: str + provider: str type: CredentialsType title: str | None scopes: list[str] | None @@ -119,6 +120,7 @@ def callback( ) return CredentialsMetaResponse( id=credentials.id, + provider=credentials.provider, type=credentials.type, title=credentials.title, scopes=credentials.scopes, @@ -126,8 +128,26 @@ def callback( ) -@router.get("/{provider}/credentials") +@router.get("/credentials") def list_credentials( + user_id: Annotated[str, Depends(get_user_id)], +) -> list[CredentialsMetaResponse]: + credentials = creds_manager.store.get_all_creds(user_id) + return [ + CredentialsMetaResponse( + id=cred.id, + provider=cred.provider, + type=cred.type, + title=cred.title, + scopes=cred.scopes if isinstance(cred, OAuth2Credentials) else None, + username=cred.username if isinstance(cred, OAuth2Credentials) else None, + ) + for cred in credentials + ] + + +@router.get("/{provider}/credentials") +def list_credentials_by_provider( provider: Annotated[str, Path(title="The provider to list credentials for")], user_id: Annotated[str, Depends(get_user_id)], ) -> list[CredentialsMetaResponse]: @@ -135,6 +155,7 @@ def list_credentials( return [ CredentialsMetaResponse( id=cred.id, + provider=cred.provider, type=cred.type, title=cred.title, scopes=cred.scopes if isinstance(cred, OAuth2Credentials) else None, diff --git a/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx b/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx index 8307b81e4..aeab8f48c 100644 --- a/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx +++ b/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx @@ -184,43 +184,64 @@ export default function CredentialsProvider({ api.isAuthenticated().then((isAuthenticated) => { if (!isAuthenticated) return; - CREDENTIALS_PROVIDER_NAMES.forEach( - (provider: CredentialsProviderName) => { - api.listCredentials(provider).then((response) => { - const { oauthCreds, apiKeys } = response.reduce<{ + api.listCredentials().then((response) => { + const credentialsByProvider = response.reduce( + (acc, cred) => { + if (!acc[cred.provider]) { + acc[cred.provider] = { oauthCreds: [], apiKeys: [] }; + } + if (cred.type === "oauth2") { + acc[cred.provider].oauthCreds.push(cred); + } else if (cred.type === "api_key") { + acc[cred.provider].apiKeys.push(cred); + } + return acc; + }, + {} as Record< + CredentialsProviderName, + { oauthCreds: CredentialsMetaResponse[]; apiKeys: CredentialsMetaResponse[]; - }>( - (acc, cred) => { - if (cred.type === "oauth2") { - acc.oauthCreds.push(cred); - } else if (cred.type === "api_key") { - acc.apiKeys.push(cred); - } - return acc; - }, - { oauthCreds: [], apiKeys: [] }, - ); + } + >, + ); - setProviders((prev) => ({ - ...prev, + setProviders((prev) => ({ + ...prev, + ...Object.entries(credentialsByProvider).reduce( + (acc, [provider, { apiKeys, oauthCreds }]) => ({ + ...acc, [provider]: { provider, - providerName: providerDisplayNames[provider], + providerName: + providerDisplayNames[provider as CredentialsProviderName], savedApiKeys: apiKeys, savedOAuthCredentials: oauthCreds, oAuthCallback: (code: string, state_token: string) => - oAuthCallback(provider, code, state_token), + oAuthCallback( + provider as CredentialsProviderName, + code, + state_token, + ), createAPIKeyCredentials: ( credentials: APIKeyCredentialsCreatable, - ) => createAPIKeyCredentials(provider, credentials), + ) => + createAPIKeyCredentials( + provider as CredentialsProviderName, + credentials, + ), deleteCredentials: (id: string, force: boolean = false) => - deleteCredentials(provider, id, force), + deleteCredentials( + provider as CredentialsProviderName, + id, + force, + ), }, - })); - }); - }, - ); + }), + {}, + ), + })); + }); }); }, [api, createAPIKeyCredentials, deleteCredentials, oAuthCallback]); diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts index eac9939ab..c8f25d13d 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/baseClient.ts @@ -212,8 +212,12 @@ export default class BaseAutoGPTServerAPI { ); } - listCredentials(provider: string): Promise { - return this._get(`/integrations/${provider}/credentials`); + listCredentials(provider?: string): Promise { + return this._get( + provider + ? `/integrations/${provider}/credentials` + : "/integrations/credentials", + ); } getCredentials( diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts index c63c192b7..59c1b717d 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts @@ -260,6 +260,7 @@ export type NodeExecutionResult = { /* Mirror of backend/server/integrations/router.py:CredentialsMetaResponse */ export type CredentialsMetaResponse = { id: string; + provider: CredentialsProviderName; type: CredentialsType; title?: string; scopes?: Array; @@ -292,7 +293,7 @@ type BaseCredentials = { id: string; type: CredentialsType; title?: string; - provider: string; + provider: CredentialsProviderName; }; /* Mirror of autogpt_libs/supabase_integration_credentials_store/types.py:OAuth2Credentials */ From 951948d239a9de5d9bedf984c993c6d951e14ed7 Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Wed, 27 Nov 2024 02:10:51 +0530 Subject: [PATCH 13/20] fix(platform): allowing condition block to compare 2 strings (#8771) Co-authored-by: Reinier van der Leer Co-authored-by: Bently --- autogpt_platform/backend/backend/blocks/branching.py | 10 ++++++++-- docs/content/platform/advanced_setup.md | 2 +- docs/content/platform/getting-started.md | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/autogpt_platform/backend/backend/blocks/branching.py b/autogpt_platform/backend/backend/blocks/branching.py index 65a01c977..daf967bc6 100644 --- a/autogpt_platform/backend/backend/blocks/branching.py +++ b/autogpt_platform/backend/backend/blocks/branching.py @@ -75,11 +75,17 @@ class ConditionBlock(Block): value1 = input_data.value1 if isinstance(value1, str): - value1 = float(value1.strip()) + try: + value1 = float(value1.strip()) + except ValueError: + value1 = value1.strip() value2 = input_data.value2 if isinstance(value2, str): - value2 = float(value2.strip()) + try: + value2 = float(value2.strip()) + except ValueError: + value2 = value2.strip() yes_value = input_data.yes_value if input_data.yes_value is not None else value1 no_value = input_data.no_value if input_data.no_value is not None else value2 diff --git a/docs/content/platform/advanced_setup.md b/docs/content/platform/advanced_setup.md index 9bb413b70..cfe8f9ae9 100644 --- a/docs/content/platform/advanced_setup.md +++ b/docs/content/platform/advanced_setup.md @@ -55,7 +55,7 @@ This will generate the Prisma client for PostgreSQL. You will also need to run t ```bash cd autogpt_platform/ -docker compose up -d +docker compose up -d --build ``` You can then run the migrations from the `backend` directory. diff --git a/docs/content/platform/getting-started.md b/docs/content/platform/getting-started.md index f4e82392b..b57ff7de8 100644 --- a/docs/content/platform/getting-started.md +++ b/docs/content/platform/getting-started.md @@ -90,7 +90,7 @@ To run the backend services, follow these steps: * Run the backend services: ``` - docker compose up -d + docker compose up -d --build ``` This command will start all the necessary backend services defined in the `docker-compose.combined.yml` file in detached mode. From 708ed9a91c637bd2e5dffdd2024c86fde64fd025 Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Wed, 27 Nov 2024 02:16:13 +0530 Subject: [PATCH 14/20] fix(platform): handle None value in issue body when fetching GitHub issues (#8773) Co-authored-by: Reinier van der Leer --- autogpt_platform/backend/backend/blocks/github/issues.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/autogpt_platform/backend/backend/blocks/github/issues.py b/autogpt_platform/backend/backend/blocks/github/issues.py index 24790acce..8cab07db3 100644 --- a/autogpt_platform/backend/backend/blocks/github/issues.py +++ b/autogpt_platform/backend/backend/blocks/github/issues.py @@ -234,9 +234,12 @@ class GithubReadIssueBlock(Block): credentials, input_data.issue_url, ) - yield "title", title - yield "body", body - yield "user", user + if title: + yield "title", title + if body: + yield "body", body + if user: + yield "user", user class GithubListIssuesBlock(Block): From 4aae15d769e90daf6ce6296a1af29048a1309de4 Mon Sep 17 00:00:00 2001 From: Bently Date: Tue, 26 Nov 2024 21:38:43 +0000 Subject: [PATCH 16/20] feat(blocks): Add Word Character Count Block (#8781) * Adds Word Character Count Block Co-Authored-By: SerchioSD <69461657+serchiosd@users.noreply.github.com> * update test_output --------- Co-authored-by: SerchioSD <69461657+serchiosd@users.noreply.github.com> --- .../blocks/count_words_and_char_block.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 autogpt_platform/backend/backend/blocks/count_words_and_char_block.py diff --git a/autogpt_platform/backend/backend/blocks/count_words_and_char_block.py b/autogpt_platform/backend/backend/blocks/count_words_and_char_block.py new file mode 100644 index 000000000..13f9e3977 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/count_words_and_char_block.py @@ -0,0 +1,43 @@ +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class WordCharacterCountBlock(Block): + class Input(BlockSchema): + text: str = SchemaField( + description="Input text to count words and characters", + placeholder="Enter your text here", + advanced=False, + ) + + class Output(BlockSchema): + word_count: int = SchemaField(description="Number of words in the input text") + character_count: int = SchemaField( + description="Number of characters in the input text" + ) + error: str = SchemaField( + description="Error message if the counting operation failed" + ) + + def __init__(self): + super().__init__( + id="ab2a782d-22cf-4587-8a70-55b59b3f9f90", + description="Counts the number of words and characters in a given text.", + categories={BlockCategory.TEXT}, + input_schema=WordCharacterCountBlock.Input, + output_schema=WordCharacterCountBlock.Output, + test_input={"text": "Hello, how are you?"}, + test_output=[("word_count", 4), ("character_count", 19)], + ) + + def run(self, input_data: Input, **kwargs) -> BlockOutput: + try: + text = input_data.text + word_count = len(text.split()) + character_count = len(text) + + yield "word_count", word_count + yield "character_count", character_count + + except Exception as e: + yield "error", str(e) From effd1e35a3cd15f59fba4f26ddfec80c4737c8ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 22:43:06 +0000 Subject: [PATCH 17/20] chore(libs): Update dev dependency Ruff from 0.7.4 to 0.8.0 (#8760) build(deps-dev): bump ruff Bumps the development-dependencies group in /autogpt_platform/autogpt_libs with 1 update: [ruff](https://github.com/astral-sh/ruff). Updates `ruff` from 0.7.4 to 0.8.0 - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.7.4...0.8.0) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-minor dependency-group: development-dependencies ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- autogpt_platform/autogpt_libs/poetry.lock | 40 ++++++++++---------- autogpt_platform/autogpt_libs/pyproject.toml | 2 +- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/autogpt_platform/autogpt_libs/poetry.lock b/autogpt_platform/autogpt_libs/poetry.lock index 21493f743..847c9418f 100644 --- a/autogpt_platform/autogpt_libs/poetry.lock +++ b/autogpt_platform/autogpt_libs/poetry.lock @@ -1324,29 +1324,29 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.7.4" +version = "0.8.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, - {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, - {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, - {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, - {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, - {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, - {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, + {file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"}, + {file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"}, + {file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"}, + {file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"}, + {file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"}, + {file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"}, + {file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"}, ] [[package]] @@ -1750,4 +1750,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4.0" -content-hash = "48184ad1281689c7743b8ca23135a647dc52257d54702d88b043fe31fe27ff27" +content-hash = "8cd9e3cd56544dc826b545b47b79fe6d20bb6ac84660770c436d1347418d4028" diff --git a/autogpt_platform/autogpt_libs/pyproject.toml b/autogpt_platform/autogpt_libs/pyproject.toml index 62d29a94c..d8268a86a 100644 --- a/autogpt_platform/autogpt_libs/pyproject.toml +++ b/autogpt_platform/autogpt_libs/pyproject.toml @@ -19,7 +19,7 @@ supabase = "^2.10.0" [tool.poetry.group.dev.dependencies] redis = "^5.2.0" -ruff = "^0.7.4" +ruff = "^0.8.0" [build-system] requires = ["poetry-core"] From 6bfe7ff497e05f81db054ffdf0d6bfdaff06c65f Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Wed, 27 Nov 2024 12:46:29 +0530 Subject: [PATCH 18/20] fix(frontend): Add integer type definition for node handles (#8803) --- autogpt_platform/frontend/src/components/NodeHandle.tsx | 1 + autogpt_platform/frontend/src/lib/utils.ts | 3 +++ 2 files changed, 4 insertions(+) diff --git a/autogpt_platform/frontend/src/components/NodeHandle.tsx b/autogpt_platform/frontend/src/components/NodeHandle.tsx index e42b740d3..b489cccd5 100644 --- a/autogpt_platform/frontend/src/components/NodeHandle.tsx +++ b/autogpt_platform/frontend/src/components/NodeHandle.tsx @@ -22,6 +22,7 @@ const NodeHandle: FC = ({ const typeName: Record = { string: "text", number: "number", + integer: "integer", boolean: "true/false", object: "object", array: "list", diff --git a/autogpt_platform/frontend/src/lib/utils.ts b/autogpt_platform/frontend/src/lib/utils.ts index 029901cd8..b4b7747d8 100644 --- a/autogpt_platform/frontend/src/lib/utils.ts +++ b/autogpt_platform/frontend/src/lib/utils.ts @@ -41,6 +41,7 @@ export function getTypeTextColor(type: string | null): string { { string: "text-green-500", number: "text-blue-500", + integer: "text-blue-500", boolean: "text-yellow-500", object: "text-purple-500", array: "text-indigo-500", @@ -58,6 +59,7 @@ export function getTypeBgColor(type: string | null): string { { string: "border-green-500", number: "border-blue-500", + integer: "border-blue-500", boolean: "border-yellow-500", object: "border-purple-500", array: "border-indigo-500", @@ -74,6 +76,7 @@ export function getTypeColor(type: string | null): string { { string: "#22c55e", number: "#3b82f6", + integer: "#3b82f6", boolean: "#eab308", object: "#a855f7", array: "#6366f1", From 86fbbae65c0dd049f7f4430d30f09210b1baeac5 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Wed, 27 Nov 2024 15:37:26 +0700 Subject: [PATCH 19/20] fix(frontend): Add text length limit when displaying Graph & Block name with different length in different places (#8746) --- .../frontend/src/components/CustomNode.tsx | 11 +++++++--- .../components/edit/control/BlocksControl.tsx | 21 ++++++++++++------- .../components/edit/control/SaveControl.tsx | 2 ++ .../src/components/monitor/AgentFlowList.tsx | 10 +++++++-- .../src/components/monitor/FlowRunInfo.tsx | 6 +++--- .../src/components/monitor/FlowRunsList.tsx | 6 +++++- .../src/components/monitor/scheduleTable.tsx | 3 ++- .../frontend/src/components/ui/render.tsx | 20 +++++++++++------- .../src/components/ui/scroll-area.tsx | 5 ++++- 9 files changed, 58 insertions(+), 26 deletions(-) diff --git a/autogpt_platform/frontend/src/components/CustomNode.tsx b/autogpt_platform/frontend/src/components/CustomNode.tsx index 710aa1932..01f43e313 100644 --- a/autogpt_platform/frontend/src/components/CustomNode.tsx +++ b/autogpt_platform/frontend/src/components/CustomNode.tsx @@ -28,6 +28,7 @@ import { } from "@/lib/utils"; import { Button } from "@/components/ui/button"; import { Switch } from "@/components/ui/switch"; +import { TextRenderer } from "@/components/ui/render"; import { history } from "./history"; import NodeHandle from "./NodeHandle"; import { @@ -564,9 +565,13 @@ export function CustomNode({
- {beautifyString( - data.blockType?.replace(/Block$/, "") || data.title, - )} + +
#{id.split("-")[0]}
diff --git a/autogpt_platform/frontend/src/components/edit/control/BlocksControl.tsx b/autogpt_platform/frontend/src/components/edit/control/BlocksControl.tsx index 6a19771ae..a4183de4d 100644 --- a/autogpt_platform/frontend/src/components/edit/control/BlocksControl.tsx +++ b/autogpt_platform/frontend/src/components/edit/control/BlocksControl.tsx @@ -3,6 +3,7 @@ import { Card, CardContent, CardHeader } from "@/components/ui/card"; import { Label } from "@/components/ui/label"; import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; +import { TextRenderer } from "@/components/ui/render"; import { ScrollArea } from "@/components/ui/scroll-area"; import { beautifyString } from "@/lib/utils"; import { @@ -180,7 +181,7 @@ export const BlocksControl: React.FC = ({ {getFilteredBlockList().map((block) => ( @@ -202,13 +203,19 @@ export const BlocksControl: React.FC = ({ className="block truncate pb-1 text-sm font-semibold" data-id={`block-name-${block.id}`} > - {beautifyString(block.name).replace(/ Block$/, "")} + - - {/* Cap description at 100 characters max */} - {block.description?.length > 100 - ? block.description.slice(0, 100) + "..." - : block.description} + +
onNameChange(e.target.value)} data-id="save-control-name-input" + maxLength={100} /> onDescriptionChange(e.target.value)} data-id="save-control-description-input" + maxLength={500} /> {agentMeta?.version && ( <> diff --git a/autogpt_platform/frontend/src/components/monitor/AgentFlowList.tsx b/autogpt_platform/frontend/src/components/monitor/AgentFlowList.tsx index 9aef109a3..e9fda65a6 100644 --- a/autogpt_platform/frontend/src/components/monitor/AgentFlowList.tsx +++ b/autogpt_platform/frontend/src/components/monitor/AgentFlowList.tsx @@ -2,6 +2,7 @@ import AutoGPTServerAPI, { GraphMeta } from "@/lib/autogpt-server-api"; import React, { useEffect, useMemo, useState } from "react"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { Button } from "@/components/ui/button"; +import { TextRenderer } from "@/components/ui/render"; import Link from "next/link"; import { Dialog, @@ -94,7 +95,10 @@ export const AgentFlowList = ({ }); }} > - {template.name} + ))} @@ -162,7 +166,9 @@ export const AgentFlowList = ({ onClick={() => onSelectFlow(flow)} data-state={selectedFlow?.id == flow.id ? "selected" : null} > - {flow.name} + + + {/* */} {/* {flow.updatedAt ?? "???"} diff --git a/autogpt_platform/frontend/src/components/monitor/FlowRunInfo.tsx b/autogpt_platform/frontend/src/components/monitor/FlowRunInfo.tsx index b98c2c68b..e7d74577d 100644 --- a/autogpt_platform/frontend/src/components/monitor/FlowRunInfo.tsx +++ b/autogpt_platform/frontend/src/components/monitor/FlowRunInfo.tsx @@ -71,7 +71,7 @@ export const FlowRunInfo: React.FC< result: result.output_data?.output || undefined, })), ); - }, [api, flow.id, flow.version, flowRun.id]); + }, [api, flow.id, flowRun.id]); // Fetch graph and execution data useEffect(() => { @@ -80,7 +80,7 @@ export const FlowRunInfo: React.FC< } fetchBlockResults(); - }, [isOutputOpen, blockOutputs]); + }, [isOutputOpen, blockOutputs, fetchBlockResults]); if (flowRun.graphID != flow.id) { throw new Error( @@ -90,7 +90,7 @@ export const FlowRunInfo: React.FC< const handleStopRun = useCallback(() => { api.stopGraphExecution(flow.id, flowRun.id); - }, [flow.id, flowRun.id]); + }, [api, flow.id, flowRun.id]); return ( <> diff --git a/autogpt_platform/frontend/src/components/monitor/FlowRunsList.tsx b/autogpt_platform/frontend/src/components/monitor/FlowRunsList.tsx index ed2935556..51d16a927 100644 --- a/autogpt_platform/frontend/src/components/monitor/FlowRunsList.tsx +++ b/autogpt_platform/frontend/src/components/monitor/FlowRunsList.tsx @@ -12,6 +12,7 @@ import { } from "@/components/ui/table"; import moment from "moment/moment"; import { FlowRunStatusBadge } from "@/components/monitor/FlowRunStatusBadge"; +import { TextRenderer } from "../ui/render"; export const FlowRunsList: React.FC<{ flows: GraphMeta[]; @@ -43,7 +44,10 @@ export const FlowRunsList: React.FC<{ data-state={selectedRun?.id == run.id ? "selected" : null} > - {flows.find((f) => f.id == run.graphID)!.name} + f.id == run.graphID)!.name} + truncateLengthLimit={30} + /> {moment(run.startTime).format("HH:mm")} diff --git a/autogpt_platform/frontend/src/components/monitor/scheduleTable.tsx b/autogpt_platform/frontend/src/components/monitor/scheduleTable.tsx index 31740e92d..85cffba4e 100644 --- a/autogpt_platform/frontend/src/components/monitor/scheduleTable.tsx +++ b/autogpt_platform/frontend/src/components/monitor/scheduleTable.tsx @@ -30,6 +30,7 @@ import { DialogHeader, DialogTitle, } from "@/components/ui/dialog"; +import { TextRenderer } from "../ui/render"; interface SchedulesTableProps { schedules: Schedule[]; @@ -111,7 +112,7 @@ export const SchedulesTable = ({ {agents.map((agent, i) => ( - {agent.name} + ))} diff --git a/autogpt_platform/frontend/src/components/ui/render.tsx b/autogpt_platform/frontend/src/components/ui/render.tsx index 45ec142bc..ca7d6a00c 100644 --- a/autogpt_platform/frontend/src/components/ui/render.tsx +++ b/autogpt_platform/frontend/src/components/ui/render.tsx @@ -76,15 +76,14 @@ const AudioRenderer: React.FC<{ audioUrl: string }> = ({ audioUrl }) => (
); -const TextRenderer: React.FC<{ value: any; truncateLongData?: boolean }> = ({ - value, - truncateLongData, -}) => { - const maxChars = 100; +export const TextRenderer: React.FC<{ + value: any; + truncateLengthLimit?: number; +}> = ({ value, truncateLengthLimit }) => { const text = typeof value === "object" ? JSON.stringify(value, null, 2) : String(value); - return truncateLongData && text.length > maxChars - ? text.slice(0, maxChars) + "..." + return truncateLengthLimit && text.length > truncateLengthLimit + ? text.slice(0, truncateLengthLimit) + "..." : text; }; @@ -101,5 +100,10 @@ export const ContentRenderer: React.FC<{ return ; } } - return ; + return ( + + ); }; diff --git a/autogpt_platform/frontend/src/components/ui/scroll-area.tsx b/autogpt_platform/frontend/src/components/ui/scroll-area.tsx index c10b0407c..ab4dc3b8c 100644 --- a/autogpt_platform/frontend/src/components/ui/scroll-area.tsx +++ b/autogpt_platform/frontend/src/components/ui/scroll-area.tsx @@ -14,7 +14,10 @@ const ScrollArea = React.forwardRef< className={cn("relative overflow-hidden", className)} {...props} > - + {children} From 5dd151b41e8377409663e1dc61a5bdb14cf1f7b9 Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Wed, 27 Nov 2024 03:44:19 -0600 Subject: [PATCH 20/20] feat(tests): add baseline utility for integration testing from frontend ui (#8765) --- .gitignore | 2 + .../frontend/src/app/profile/page.tsx | 4 +- .../frontend/src/tests/auth.spec.ts | 29 +++-- .../frontend/src/tests/fixtures/index.ts | 107 ++++++++++++++++-- .../frontend/src/tests/pages/base.page.ts | 15 +++ .../frontend/src/tests/pages/navbar.page.ts | 51 +++++++++ .../frontend/src/tests/pages/profile.page.ts | 38 +++++++ .../frontend/src/tests/profile.spec.ts | 57 ++++++++++ 8 files changed, 279 insertions(+), 24 deletions(-) create mode 100644 autogpt_platform/frontend/src/tests/pages/base.page.ts create mode 100644 autogpt_platform/frontend/src/tests/pages/navbar.page.ts create mode 100644 autogpt_platform/frontend/src/tests/pages/profile.page.ts create mode 100644 autogpt_platform/frontend/src/tests/profile.spec.ts diff --git a/.gitignore b/.gitignore index 6590e2129..225a4b93b 100644 --- a/.gitignore +++ b/.gitignore @@ -171,3 +171,5 @@ ig* .github_access_token LICENSE.rtf autogpt_platform/backend/settings.py +/.auth +/autogpt_platform/frontend/.auth diff --git a/autogpt_platform/frontend/src/app/profile/page.tsx b/autogpt_platform/frontend/src/app/profile/page.tsx index 93d9b7708..c1df9e705 100644 --- a/autogpt_platform/frontend/src/app/profile/page.tsx +++ b/autogpt_platform/frontend/src/app/profile/page.tsx @@ -143,7 +143,9 @@ export default function PrivatePage() { return (
-

Hello {user.email}

+

+ Hello {user.email} +