mirror of
https://github.com/Significant-Gravitas/Auto-GPT.git
synced 2025-01-08 11:57:32 +08:00
Merge branch 'dev' into snyk-fix-5b31706a1a40560167a7f90d6f383321
This commit is contained in:
commit
27bf859800
2
.gitignore
vendored
2
.gitignore
vendored
@ -171,3 +171,5 @@ ig*
|
||||
.github_access_token
|
||||
LICENSE.rtf
|
||||
autogpt_platform/backend/settings.py
|
||||
/.auth
|
||||
/autogpt_platform/frontend/.auth
|
||||
|
@ -35,7 +35,7 @@ The AutoGPT frontend is where users interact with our powerful AI automation pla
|
||||
|
||||
**Monitoring and Analytics:** Keep track of your agents' performance and gain insights to continually improve your automation processes.
|
||||
|
||||
[Read this guide](https://docs.agpt.co/server/new_blocks/) to learn how to build your own custom blocks.
|
||||
[Read this guide](https://docs.agpt.co/platform/new_blocks/) to learn how to build your own custom blocks.
|
||||
|
||||
### 💽 AutoGPT Server
|
||||
|
||||
|
40
autogpt_platform/autogpt_libs/poetry.lock
generated
40
autogpt_platform/autogpt_libs/poetry.lock
generated
@ -1324,29 +1324,29 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.4"
|
||||
version = "0.8.0"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
|
||||
{file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
|
||||
{file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
|
||||
{file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
|
||||
{file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
|
||||
{file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"},
|
||||
{file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"},
|
||||
{file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"},
|
||||
{file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"},
|
||||
{file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1750,4 +1750,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "48184ad1281689c7743b8ca23135a647dc52257d54702d88b043fe31fe27ff27"
|
||||
content-hash = "8cd9e3cd56544dc826b545b47b79fe6d20bb6ac84660770c436d1347418d4028"
|
||||
|
@ -19,7 +19,7 @@ supabase = "^2.10.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
redis = "^5.2.0"
|
||||
ruff = "^0.7.4"
|
||||
ruff = "^0.8.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
@ -28,8 +28,15 @@ SUPABASE_URL=http://localhost:8000
|
||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
# For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow for integrations to work.
|
||||
FRONTEND_BASE_URL=http://localhost:3000
|
||||
## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow
|
||||
## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set.
|
||||
# FRONTEND_BASE_URL=http://localhost:3000
|
||||
|
||||
## PLATFORM_BASE_URL must be set to a *publicly accessible* URL pointing to your backend
|
||||
## to use the platform's webhook-related functionality.
|
||||
## If you are developing locally, you can use something like ngrok to get a publc URL
|
||||
## and tunnel it to your locally running backend.
|
||||
PLATFORM_BASE_URL=https://your-public-url-here
|
||||
|
||||
## == INTEGRATION CREDENTIALS == ##
|
||||
# Each set of server side credentials is required for the corresponding 3rd party
|
||||
|
@ -60,13 +60,6 @@ for block_cls in all_subclasses(Block):
|
||||
input_schema = block.input_schema.model_fields
|
||||
output_schema = block.output_schema.model_fields
|
||||
|
||||
# Prevent duplicate field name in input_schema and output_schema
|
||||
duplicate_field_names = set(input_schema.keys()) & set(output_schema.keys())
|
||||
if duplicate_field_names:
|
||||
raise ValueError(
|
||||
f"{block.name} has duplicate field names in input_schema and output_schema: {duplicate_field_names}"
|
||||
)
|
||||
|
||||
# Make sure `error` field is a string in the output schema
|
||||
if "error" in output_schema and output_schema["error"].annotation is not str:
|
||||
raise ValueError(
|
||||
|
@ -27,7 +27,7 @@ def get_executor_manager_client():
|
||||
|
||||
@thread_cached
|
||||
def get_event_bus():
|
||||
from backend.data.queue import RedisExecutionEventBus
|
||||
from backend.data.execution import RedisExecutionEventBus
|
||||
|
||||
return RedisExecutionEventBus()
|
||||
|
||||
|
@ -75,11 +75,17 @@ class ConditionBlock(Block):
|
||||
|
||||
value1 = input_data.value1
|
||||
if isinstance(value1, str):
|
||||
value1 = float(value1.strip())
|
||||
try:
|
||||
value1 = float(value1.strip())
|
||||
except ValueError:
|
||||
value1 = value1.strip()
|
||||
|
||||
value2 = input_data.value2
|
||||
if isinstance(value2, str):
|
||||
value2 = float(value2.strip())
|
||||
try:
|
||||
value2 = float(value2.strip())
|
||||
except ValueError:
|
||||
value2 = value2.strip()
|
||||
|
||||
yes_value = input_data.yes_value if input_data.yes_value is not None else value1
|
||||
no_value = input_data.no_value if input_data.no_value is not None else value2
|
||||
|
@ -0,0 +1,43 @@
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class WordCharacterCountBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(
|
||||
description="Input text to count words and characters",
|
||||
placeholder="Enter your text here",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
word_count: int = SchemaField(description="Number of words in the input text")
|
||||
character_count: int = SchemaField(
|
||||
description="Number of characters in the input text"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the counting operation failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ab2a782d-22cf-4587-8a70-55b59b3f9f90",
|
||||
description="Counts the number of words and characters in a given text.",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=WordCharacterCountBlock.Input,
|
||||
output_schema=WordCharacterCountBlock.Output,
|
||||
test_input={"text": "Hello, how are you?"},
|
||||
test_output=[("word_count", 4), ("character_count", 19)],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
text = input_data.text
|
||||
word_count = len(text.split())
|
||||
character_count = len(text)
|
||||
|
||||
yield "word_count", word_count
|
||||
yield "character_count", character_count
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
@ -0,0 +1,700 @@
|
||||
{
|
||||
"action": "synchronize",
|
||||
"number": 8358,
|
||||
"pull_request": {
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358",
|
||||
"id": 2128918491,
|
||||
"node_id": "PR_kwDOJKSTjM5-5Lfb",
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358",
|
||||
"diff_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.diff",
|
||||
"patch_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.patch",
|
||||
"issue_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358",
|
||||
"number": 8358,
|
||||
"state": "open",
|
||||
"locked": false,
|
||||
"title": "feat(platform, blocks): Webhook-triggered blocks",
|
||||
"user": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"body": "- Resolves #8352\r\n\r\n## Changes 🏗️\r\n\r\n- feat(blocks): Add GitHub Pull Request Trigger block\r\n\r\n### feat(platform): Add support for Webhook-triggered blocks\r\n- ⚠️ Add `PLATFORM_BASE_URL` setting\r\n\r\n- Add webhook config option and `BlockType.WEBHOOK` to `Block`\r\n - Add check to `Block.__init__` to enforce type and shape of webhook event filter\r\n - Add check to `Block.__init__` to enforce `payload` input on webhook blocks\r\n\r\n- Add `Webhook` model + CRUD functions in `backend.data.integrations` to represent webhooks created by our system\r\n - Add `IntegrationWebhook` to DB schema + reference `AgentGraphNode.webhook_id`\r\n - Add `set_node_webhook(..)` in `backend.data.graph`\r\n\r\n- Add webhook-related endpoints:\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ingress` endpoint, to receive webhook payloads, and for all associated nodes create graph executions\r\n - Add `Node.is_triggered_by_event_type(..)` helper method\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ping` endpoint, to allow testing a webhook\r\n - Add `WebhookEvent` + pub/sub functions in `backend.data.integrations`\r\n\r\n- Add `backend.integrations.webhooks` module, including:\r\n - `graph_lifecycle_hooks`, e.g. `on_graph_activate(..)`, to handle corresponding webhook creation etc.\r\n - Add calls to these hooks in the graph create/update endpoints\r\n - `BaseWebhooksManager` + `GithubWebhooksManager` to handle creating + registering, removing + deregistering, and retrieving existing webhooks, and validating incoming payloads\r\n\r\n### Other improvements\r\n- fix(blocks): Allow having an input and output pin with the same name\r\n- feat(blocks): Allow hiding inputs (e.g. `payload`) with `SchemaField(hidden=True)`\r\n- feat(backend/data): Add `graph_id`, `graph_version` to `Node`; `user_id` to `GraphMeta`\r\n - Add `Creatable` versions of `Node`, `GraphMeta` and `Graph` without these properties\r\n - Add `graph_from_creatable(..)` helper function in `backend.data.graph`\r\n- refactor(backend/data): Make `RedisEventQueue` generic\r\n- refactor(frontend): Deduplicate & clean up code for different block types in `generateInputHandles(..)` in `CustomNode`\r\n- refactor(backend): Remove unused subgraph functionality\r\n\r\n## How it works\r\n- When a graph is created, the `on_graph_activate` and `on_node_activate` hooks are called on the graph and its nodes\r\n- If a webhook-triggered node has presets for all the relevant inputs, `on_node_activate` will get/create a suitable webhook and link it by setting `AgentGraphNode.webhook_id`\r\n - `on_node_activate` uses `webhook_manager.get_suitable_webhook(..)`, which tries to find a suitable webhook (with matching requirements) or creates it if none exists yet\r\n- When a graph is deactivated (in favor of a newer/other version) or deleted, `on_graph_deactivate` and `on_node_deactivate` are called on the graph and its nodes to clean up webhooks that are no longer in use\r\n- When a valid webhook payload is received, two things happen:\r\n 1. It is broadcast on the Redis channel `webhooks/{webhook_id}/{event_type}`\r\n 2. Graph executions are initiated for all nodes triggered by this webhook\r\n\r\n## TODO\r\n- [ ] #8537\r\n- [x] #8538\r\n- [ ] #8357\r\n- [ ] ~~#8554~~ can be done in a follow-up PR\r\n- [ ] Test test test!\r\n- [ ] Add note on `repo` input of webhook blocks that the credentials used must have the right permissions for the given organization/repo\r\n- [x] Implement proper detection and graceful handling of webhook creation failing due to insufficient permissions. This should give a clear message to the user to e.g. \"give the app access to this organization in your settings\".\r\n- [ ] Nice-to-have: make a button on webhook blocks to trigger a ping and check its result. The API endpoints for this is already implemented.",
|
||||
"created_at": "2024-10-16T22:13:47Z",
|
||||
"updated_at": "2024-11-11T18:34:54Z",
|
||||
"closed_at": null,
|
||||
"merged_at": null,
|
||||
"merge_commit_sha": "cbfd0cdd8db52cdd5a3b7ce088fc0ab4617a652e",
|
||||
"assignee": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"assignees": [
|
||||
{
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
],
|
||||
"requested_reviewers": [
|
||||
{
|
||||
"login": "kcze",
|
||||
"id": 34861343,
|
||||
"node_id": "MDQ6VXNlcjM0ODYxMzQz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/34861343?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/kcze",
|
||||
"html_url": "https://github.com/kcze",
|
||||
"followers_url": "https://api.github.com/users/kcze/followers",
|
||||
"following_url": "https://api.github.com/users/kcze/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/kcze/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/kcze/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/kcze/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/kcze/orgs",
|
||||
"repos_url": "https://api.github.com/users/kcze/repos",
|
||||
"events_url": "https://api.github.com/users/kcze/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/kcze/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
],
|
||||
"requested_teams": [
|
||||
{
|
||||
"name": "DevOps",
|
||||
"id": 9547361,
|
||||
"node_id": "T_kwDOB8roIc4Aka5h",
|
||||
"slug": "devops",
|
||||
"description": "",
|
||||
"privacy": "closed",
|
||||
"notification_setting": "notifications_enabled",
|
||||
"url": "https://api.github.com/organizations/130738209/team/9547361",
|
||||
"html_url": "https://github.com/orgs/Significant-Gravitas/teams/devops",
|
||||
"members_url": "https://api.github.com/organizations/130738209/team/9547361/members{/member}",
|
||||
"repositories_url": "https://api.github.com/organizations/130738209/team/9547361/repos",
|
||||
"permission": "pull",
|
||||
"parent": null
|
||||
}
|
||||
],
|
||||
"labels": [
|
||||
{
|
||||
"id": 5272676214,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABOkandg",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/documentation",
|
||||
"name": "documentation",
|
||||
"color": "0075ca",
|
||||
"default": true,
|
||||
"description": "Improvements or additions to documentation"
|
||||
},
|
||||
{
|
||||
"id": 5410633769,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABQn-4KQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/size/xl",
|
||||
"name": "size/xl",
|
||||
"color": "E751DD",
|
||||
"default": false,
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"id": 6892322271,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABmtB93w",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/Review%20effort%20[1-5]:%204",
|
||||
"name": "Review effort [1-5]: 4",
|
||||
"color": "d1bcf9",
|
||||
"default": false,
|
||||
"description": null
|
||||
},
|
||||
{
|
||||
"id": 7218433025,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABrkCMAQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/frontend",
|
||||
"name": "platform/frontend",
|
||||
"color": "033C07",
|
||||
"default": false,
|
||||
"description": "AutoGPT Platform - Front end"
|
||||
},
|
||||
{
|
||||
"id": 7219356193,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABrk6iIQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/backend",
|
||||
"name": "platform/backend",
|
||||
"color": "ededed",
|
||||
"default": false,
|
||||
"description": "AutoGPT Platform - Back end"
|
||||
},
|
||||
{
|
||||
"id": 7515330106,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABv_LWOg",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/blocks",
|
||||
"name": "platform/blocks",
|
||||
"color": "eb5757",
|
||||
"default": false,
|
||||
"description": null
|
||||
}
|
||||
],
|
||||
"milestone": null,
|
||||
"draft": false,
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits",
|
||||
"review_comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments",
|
||||
"review_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"head": {
|
||||
"label": "Significant-Gravitas:reinier/open-1961-implement-github-on-pull-request-block",
|
||||
"ref": "reinier/open-1961-implement-github-on-pull-request-block",
|
||||
"sha": "8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"user": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"repo": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"allow_squash_merge": true,
|
||||
"allow_merge_commit": false,
|
||||
"allow_rebase_merge": false,
|
||||
"allow_auto_merge": true,
|
||||
"delete_branch_on_merge": true,
|
||||
"allow_update_branch": true,
|
||||
"use_squash_pr_title_as_default": true,
|
||||
"squash_merge_commit_message": "COMMIT_MESSAGES",
|
||||
"squash_merge_commit_title": "PR_TITLE",
|
||||
"merge_commit_message": "BLANK",
|
||||
"merge_commit_title": "PR_TITLE"
|
||||
}
|
||||
},
|
||||
"base": {
|
||||
"label": "Significant-Gravitas:dev",
|
||||
"ref": "dev",
|
||||
"sha": "0b5b95eff5e18c1e162d2b30b66a7be2bed1cbc2",
|
||||
"user": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"repo": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"allow_squash_merge": true,
|
||||
"allow_merge_commit": false,
|
||||
"allow_rebase_merge": false,
|
||||
"allow_auto_merge": true,
|
||||
"delete_branch_on_merge": true,
|
||||
"allow_update_branch": true,
|
||||
"use_squash_pr_title_as_default": true,
|
||||
"squash_merge_commit_message": "COMMIT_MESSAGES",
|
||||
"squash_merge_commit_title": "PR_TITLE",
|
||||
"merge_commit_message": "BLANK",
|
||||
"merge_commit_title": "PR_TITLE"
|
||||
}
|
||||
},
|
||||
"_links": {
|
||||
"self": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358"
|
||||
},
|
||||
"html": {
|
||||
"href": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358"
|
||||
},
|
||||
"issue": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358"
|
||||
},
|
||||
"comments": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments"
|
||||
},
|
||||
"review_comments": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments"
|
||||
},
|
||||
"review_comment": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}"
|
||||
},
|
||||
"commits": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits"
|
||||
},
|
||||
"statuses": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0"
|
||||
}
|
||||
},
|
||||
"author_association": "MEMBER",
|
||||
"auto_merge": null,
|
||||
"active_lock_reason": null,
|
||||
"merged": false,
|
||||
"mergeable": null,
|
||||
"rebaseable": null,
|
||||
"mergeable_state": "unknown",
|
||||
"merged_by": null,
|
||||
"comments": 12,
|
||||
"review_comments": 29,
|
||||
"maintainer_can_modify": false,
|
||||
"commits": 62,
|
||||
"additions": 1674,
|
||||
"deletions": 331,
|
||||
"changed_files": 36
|
||||
},
|
||||
"before": "f40aef87672203f47bbbd53f83fae0964c5624da",
|
||||
"after": "8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"repository": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"custom_properties": {
|
||||
|
||||
}
|
||||
},
|
||||
"organization": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"url": "https://api.github.com/orgs/Significant-Gravitas",
|
||||
"repos_url": "https://api.github.com/orgs/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/orgs/Significant-Gravitas/events",
|
||||
"hooks_url": "https://api.github.com/orgs/Significant-Gravitas/hooks",
|
||||
"issues_url": "https://api.github.com/orgs/Significant-Gravitas/issues",
|
||||
"members_url": "https://api.github.com/orgs/Significant-Gravitas/members{/member}",
|
||||
"public_members_url": "https://api.github.com/orgs/Significant-Gravitas/public_members{/member}",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"description": ""
|
||||
},
|
||||
"enterprise": {
|
||||
"id": 149607,
|
||||
"slug": "significant-gravitas",
|
||||
"name": "Significant Gravitas",
|
||||
"node_id": "E_kgDOAAJIZw",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/b/149607?v=4",
|
||||
"description": "The creators of AutoGPT",
|
||||
"website_url": "discord.gg/autogpt",
|
||||
"html_url": "https://github.com/enterprises/significant-gravitas",
|
||||
"created_at": "2024-04-18T17:43:53Z",
|
||||
"updated_at": "2024-10-23T16:59:55Z"
|
||||
},
|
||||
"sender": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
}
|
@ -234,9 +234,12 @@ class GithubReadIssueBlock(Block):
|
||||
credentials,
|
||||
input_data.issue_url,
|
||||
)
|
||||
yield "title", title
|
||||
yield "body", body
|
||||
yield "user", user
|
||||
if title:
|
||||
yield "title", title
|
||||
if body:
|
||||
yield "body", body
|
||||
if user:
|
||||
yield "user", user
|
||||
|
||||
|
||||
class GithubListIssuesBlock(Block):
|
||||
|
156
autogpt_platform/backend/backend/blocks/github/triggers.py
Normal file
156
autogpt_platform/backend/backend/blocks/github/triggers.py
Normal file
@ -0,0 +1,156 @@
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GithubCredentialsField,
|
||||
GithubCredentialsInput,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# --8<-- [start:GithubTriggerExample]
|
||||
class GitHubTriggerBase:
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
|
||||
repo: str = SchemaField(
|
||||
description=(
|
||||
"Repository to subscribe to.\n\n"
|
||||
"**Note:** Make sure your GitHub credentials have permissions "
|
||||
"to create webhooks on this repo."
|
||||
),
|
||||
placeholder="{owner}/{repo}",
|
||||
)
|
||||
# --8<-- [start:example-payload-field]
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
# --8<-- [end:example-payload-field]
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
description="The complete webhook payload that was received from GitHub. "
|
||||
"Includes information about the affected resource (e.g. pull request), "
|
||||
"the event, and the user who triggered the event."
|
||||
)
|
||||
triggered_by_user: dict = SchemaField(
|
||||
description="Object representing the GitHub user who triggered the event"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the payload could not be processed"
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "payload", input_data.payload
|
||||
yield "triggered_by_user", input_data.payload["sender"]
|
||||
|
||||
|
||||
class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block):
|
||||
EXAMPLE_PAYLOAD_FILE = (
|
||||
Path(__file__).parent / "example_payloads" / "pull_request.synchronize.json"
|
||||
)
|
||||
|
||||
# --8<-- [start:example-event-filter]
|
||||
class Input(GitHubTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request
|
||||
"""
|
||||
|
||||
opened: bool = False
|
||||
edited: bool = False
|
||||
closed: bool = False
|
||||
reopened: bool = False
|
||||
synchronize: bool = False
|
||||
assigned: bool = False
|
||||
unassigned: bool = False
|
||||
labeled: bool = False
|
||||
unlabeled: bool = False
|
||||
converted_to_draft: bool = False
|
||||
locked: bool = False
|
||||
unlocked: bool = False
|
||||
enqueued: bool = False
|
||||
dequeued: bool = False
|
||||
milestoned: bool = False
|
||||
demilestoned: bool = False
|
||||
ready_for_review: bool = False
|
||||
review_requested: bool = False
|
||||
review_request_removed: bool = False
|
||||
auto_merge_enabled: bool = False
|
||||
auto_merge_disabled: bool = False
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events", description="The events to subscribe to"
|
||||
)
|
||||
# --8<-- [end:example-event-filter]
|
||||
|
||||
class Output(GitHubTriggerBase.Output):
|
||||
event: str = SchemaField(
|
||||
description="The PR event that triggered the webhook (e.g. 'opened')"
|
||||
)
|
||||
number: int = SchemaField(description="The number of the affected pull request")
|
||||
pull_request: dict = SchemaField(
|
||||
description="Object representing the affected pull request"
|
||||
)
|
||||
pull_request_url: str = SchemaField(
|
||||
description="The URL of the affected pull request"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(self.EXAMPLE_PAYLOAD_FILE.read_text())
|
||||
|
||||
super().__init__(
|
||||
id="6c60ec01-8128-419e-988f-96a063ee2fea",
|
||||
description="This block triggers on pull request events and outputs the event type and payload.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT},
|
||||
input_schema=GithubPullRequestTriggerBlock.Input,
|
||||
output_schema=GithubPullRequestTriggerBlock.Output,
|
||||
# --8<-- [start:example-webhook_config]
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider="github",
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
event_format="pull_request.{event}",
|
||||
),
|
||||
# --8<-- [end:example-webhook_config]
|
||||
test_input={
|
||||
"repo": "Significant-Gravitas/AutoGPT",
|
||||
"events": {"opened": True, "synchronize": True},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"payload": example_payload,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("payload", example_payload),
|
||||
("triggered_by_user", example_payload["sender"]),
|
||||
("event", example_payload["action"]),
|
||||
("number", example_payload["number"]),
|
||||
("pull_request", example_payload["pull_request"]),
|
||||
("pull_request_url", example_payload["pull_request"]["html_url"]),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
yield from super().run(input_data, **kwargs)
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "number", input_data.payload["number"]
|
||||
yield "pull_request", input_data.payload["pull_request"]
|
||||
yield "pull_request_url", input_data.payload["pull_request"]["html_url"]
|
||||
|
||||
|
||||
# --8<-- [end:GithubTriggerExample]
|
@ -20,9 +20,12 @@ from prisma.models import AgentBlock
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.util import json
|
||||
from backend.util.settings import Config
|
||||
|
||||
from .model import CREDENTIALS_FIELD_NAME, ContributorDetails, CredentialsMetaInput
|
||||
|
||||
app_config = Config()
|
||||
|
||||
BlockData = tuple[str, Any] # Input & Output data should be a tuple of (name, data).
|
||||
BlockInput = dict[str, Any] # Input: 1 input pin consumes 1 data.
|
||||
BlockOutput = Generator[BlockData, None, None] # Output: 1 output pin produces n data.
|
||||
@ -34,6 +37,7 @@ class BlockType(Enum):
|
||||
INPUT = "Input"
|
||||
OUTPUT = "Output"
|
||||
NOTE = "Note"
|
||||
WEBHOOK = "Webhook"
|
||||
AGENT = "Agent"
|
||||
|
||||
|
||||
@ -94,15 +98,7 @@ class BlockSchema(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def validate_data(cls, data: BlockInput) -> str | None:
|
||||
"""
|
||||
Validate the data against the schema.
|
||||
Returns the validation error message if the data does not match the schema.
|
||||
"""
|
||||
try:
|
||||
jsonschema.validate(data, cls.jsonschema())
|
||||
return None
|
||||
except jsonschema.ValidationError as e:
|
||||
return str(e)
|
||||
return json.validate_with_jsonschema(schema=cls.jsonschema(), data=data)
|
||||
|
||||
@classmethod
|
||||
def validate_field(cls, field_name: str, data: BlockInput) -> str | None:
|
||||
@ -185,6 +181,41 @@ class EmptySchema(BlockSchema):
|
||||
pass
|
||||
|
||||
|
||||
# --8<-- [start:BlockWebhookConfig]
|
||||
class BlockWebhookConfig(BaseModel):
|
||||
provider: str
|
||||
"""The service provider that the webhook connects to"""
|
||||
|
||||
webhook_type: str
|
||||
"""
|
||||
Identifier for the webhook type. E.g. GitHub has repo and organization level hooks.
|
||||
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
resource_format: str
|
||||
"""
|
||||
Template string for the resource that a block instance subscribes to.
|
||||
Fields will be filled from the block's inputs (except `payload`).
|
||||
|
||||
Example: `f"{repo}/pull_requests"` (note: not how it's actually implemented)
|
||||
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
event_filter_input: str
|
||||
"""Name of the block's event filter input."""
|
||||
|
||||
event_format: str = "{event}"
|
||||
"""
|
||||
Template string for the event(s) that a block instance subscribes to.
|
||||
Applied individually to each event selected in the event filter input.
|
||||
|
||||
Example: `"pull_request.{event}"` -> `"pull_request.opened"`
|
||||
"""
|
||||
# --8<-- [end:BlockWebhookConfig]
|
||||
|
||||
|
||||
class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
def __init__(
|
||||
self,
|
||||
@ -201,6 +232,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
disabled: bool = False,
|
||||
static_output: bool = False,
|
||||
block_type: BlockType = BlockType.STANDARD,
|
||||
webhook_config: Optional[BlockWebhookConfig] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the block with the given schema.
|
||||
@ -231,9 +263,38 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
self.contributors = contributors or set()
|
||||
self.disabled = disabled
|
||||
self.static_output = static_output
|
||||
self.block_type = block_type
|
||||
self.block_type = block_type if not webhook_config else BlockType.WEBHOOK
|
||||
self.webhook_config = webhook_config
|
||||
self.execution_stats = {}
|
||||
|
||||
if self.webhook_config:
|
||||
# Enforce shape of webhook event filter
|
||||
event_filter_field = self.input_schema.model_fields[
|
||||
self.webhook_config.event_filter_input
|
||||
]
|
||||
if not (
|
||||
isinstance(event_filter_field.annotation, type)
|
||||
and issubclass(event_filter_field.annotation, BaseModel)
|
||||
and all(
|
||||
field.annotation is bool
|
||||
for field in event_filter_field.annotation.model_fields.values()
|
||||
)
|
||||
):
|
||||
raise NotImplementedError(
|
||||
f"{self.name} has an invalid webhook event selector: "
|
||||
"field must be a BaseModel and all its fields must be boolean"
|
||||
)
|
||||
|
||||
# Enforce presence of 'payload' input
|
||||
if "payload" not in self.input_schema.model_fields:
|
||||
raise TypeError(
|
||||
f"{self.name} is webhook-triggered but has no 'payload' input"
|
||||
)
|
||||
|
||||
# Disable webhook-triggered block if webhook functionality not available
|
||||
if not app_config.platform_base_url:
|
||||
self.disabled = True
|
||||
|
||||
@classmethod
|
||||
def create(cls: Type["Block"]) -> "Block":
|
||||
return cls()
|
||||
|
@ -11,6 +11,8 @@ from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.util.settings import Config
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
class UserCreditBase(ABC):
|
||||
def __init__(self, num_user_credits_refill: int):
|
||||
@ -202,7 +204,6 @@ class DisabledUserCredit(UserCreditBase):
|
||||
|
||||
|
||||
def get_user_credit_model() -> UserCreditBase:
|
||||
config = Config()
|
||||
if config.enable_credit.lower() == "true":
|
||||
return UserCredit(config.num_user_credits_refill)
|
||||
else:
|
||||
|
@ -1,7 +1,7 @@
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from multiprocessing import Manager
|
||||
from typing import Any, Generic, TypeVar
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, TypeVar
|
||||
|
||||
from prisma.enums import AgentExecutionStatus
|
||||
from prisma.models import (
|
||||
@ -14,7 +14,9 @@ from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import BlockData, BlockInput, CompletedBlockOutput
|
||||
from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus, RedisEventBus
|
||||
from backend.util import json, mock
|
||||
from backend.util.settings import Config
|
||||
|
||||
|
||||
class GraphExecution(BaseModel):
|
||||
@ -271,7 +273,6 @@ async def update_graph_execution_stats(
|
||||
graph_exec_id: str,
|
||||
stats: dict[str, Any],
|
||||
) -> ExecutionResult:
|
||||
|
||||
status = ExecutionStatus.FAILED if stats.get("error") else ExecutionStatus.COMPLETED
|
||||
res = await AgentGraphExecution.prisma().update(
|
||||
where={"id": graph_exec_id},
|
||||
@ -471,3 +472,42 @@ async def get_incomplete_executions(
|
||||
include=EXECUTION_RESULT_INCLUDE,
|
||||
)
|
||||
return [ExecutionResult.from_db(execution) for execution in executions]
|
||||
|
||||
|
||||
# --------------------- Event Bus --------------------- #
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
class RedisExecutionEventBus(RedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
def publish(self, res: ExecutionResult):
|
||||
self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}")
|
||||
|
||||
def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> Generator[ExecutionResult, None, None]:
|
||||
for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
|
||||
class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
async def publish(self, res: ExecutionResult):
|
||||
await self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}")
|
||||
|
||||
async def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> AsyncGenerator[ExecutionResult, None]:
|
||||
async for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
@ -3,7 +3,7 @@ import logging
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Literal, Type
|
||||
from typing import Any, Literal, Optional, Type
|
||||
|
||||
import prisma
|
||||
from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink
|
||||
@ -12,12 +12,14 @@ from pydantic.fields import computed_field
|
||||
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock
|
||||
from backend.data.block import BlockInput, BlockType, get_block, get_blocks
|
||||
from backend.data.db import BaseDbModel, transaction
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
|
||||
from backend.util import json
|
||||
|
||||
from .block import BlockInput, BlockType, get_block, get_blocks
|
||||
from .db import BaseDbModel, transaction
|
||||
from .execution import ExecutionStatus
|
||||
from .includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
|
||||
from .integrations import Webhook
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -50,20 +52,51 @@ class Node(BaseDbModel):
|
||||
input_links: list[Link] = []
|
||||
output_links: list[Link] = []
|
||||
|
||||
webhook_id: Optional[str] = None
|
||||
|
||||
|
||||
class NodeModel(Node):
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
|
||||
webhook: Optional[Webhook] = None
|
||||
|
||||
@staticmethod
|
||||
def from_db(node: AgentNode):
|
||||
if not node.AgentBlock:
|
||||
raise ValueError(f"Invalid node {node.id}, invalid AgentBlock.")
|
||||
obj = Node(
|
||||
obj = NodeModel(
|
||||
id=node.id,
|
||||
block_id=node.AgentBlock.id,
|
||||
input_default=json.loads(node.constantInput, target_type=dict[str, Any]),
|
||||
metadata=json.loads(node.metadata, target_type=dict[str, Any]),
|
||||
graph_id=node.agentGraphId,
|
||||
graph_version=node.agentGraphVersion,
|
||||
webhook_id=node.webhookId,
|
||||
webhook=Webhook.from_db(node.Webhook) if node.Webhook else None,
|
||||
)
|
||||
obj.input_links = [Link.from_db(link) for link in node.Input or []]
|
||||
obj.output_links = [Link.from_db(link) for link in node.Output or []]
|
||||
return obj
|
||||
|
||||
def is_triggered_by_event_type(self, event_type: str) -> bool:
|
||||
if not (block := get_block(self.block_id)):
|
||||
raise ValueError(f"Block #{self.block_id} not found for node #{self.id}")
|
||||
if not block.webhook_config:
|
||||
raise TypeError("This method can't be used on non-webhook blocks")
|
||||
event_filter = self.input_default.get(block.webhook_config.event_filter_input)
|
||||
if not event_filter:
|
||||
raise ValueError(f"Event filter is not configured on node #{self.id}")
|
||||
return event_type in [
|
||||
block.webhook_config.event_format.format(event=k)
|
||||
for k in event_filter
|
||||
if event_filter[k] is True
|
||||
]
|
||||
|
||||
|
||||
# Fix 2-way reference Node <-> Webhook
|
||||
Webhook.model_rebuild()
|
||||
|
||||
|
||||
class GraphExecution(BaseDbModel):
|
||||
execution_id: str
|
||||
@ -110,33 +143,6 @@ class Graph(BaseDbModel):
|
||||
nodes: list[Node] = []
|
||||
links: list[Link] = []
|
||||
|
||||
@staticmethod
|
||||
def _generate_schema(
|
||||
type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input],
|
||||
data: list[dict],
|
||||
) -> dict[str, Any]:
|
||||
props = []
|
||||
for p in data:
|
||||
try:
|
||||
props.append(type_class(**p))
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid {type_class}: {p}, {e}")
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
p.name: {
|
||||
"secret": p.secret,
|
||||
"advanced": p.advanced,
|
||||
"title": p.title or p.name,
|
||||
**({"description": p.description} if p.description else {}),
|
||||
**({"default": p.value} if p.value is not None else {}),
|
||||
}
|
||||
for p in props
|
||||
},
|
||||
"required": [p.name for p in props if p.value is None],
|
||||
}
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def input_schema(self) -> dict[str, Any]:
|
||||
@ -165,6 +171,38 @@ class Graph(BaseDbModel):
|
||||
],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _generate_schema(
|
||||
type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input],
|
||||
data: list[dict],
|
||||
) -> dict[str, Any]:
|
||||
props = []
|
||||
for p in data:
|
||||
try:
|
||||
props.append(type_class(**p))
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid {type_class}: {p}, {e}")
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
p.name: {
|
||||
"secret": p.secret,
|
||||
"advanced": p.advanced,
|
||||
"title": p.title or p.name,
|
||||
**({"description": p.description} if p.description else {}),
|
||||
**({"default": p.value} if p.value is not None else {}),
|
||||
}
|
||||
for p in props
|
||||
},
|
||||
"required": [p.name for p in props if p.value is None],
|
||||
}
|
||||
|
||||
|
||||
class GraphModel(Graph):
|
||||
user_id: str
|
||||
nodes: list[NodeModel] = [] # type: ignore
|
||||
|
||||
@property
|
||||
def starting_nodes(self) -> list[Node]:
|
||||
outbound_nodes = {link.sink_id for link in self.links}
|
||||
@ -291,36 +329,39 @@ class Graph(BaseDbModel):
|
||||
GraphExecution.from_db(execution)
|
||||
for execution in graph.AgentGraphExecution or []
|
||||
]
|
||||
nodes = graph.AgentNodes or []
|
||||
|
||||
return Graph(
|
||||
return GraphModel(
|
||||
id=graph.id,
|
||||
user_id=graph.userId,
|
||||
version=graph.version,
|
||||
is_active=graph.isActive,
|
||||
is_template=graph.isTemplate,
|
||||
name=graph.name or "",
|
||||
description=graph.description or "",
|
||||
executions=executions,
|
||||
nodes=[Graph._process_node(node, hide_credentials) for node in nodes],
|
||||
nodes=[
|
||||
GraphModel._process_node(node, hide_credentials)
|
||||
for node in graph.AgentNodes or []
|
||||
],
|
||||
links=list(
|
||||
{
|
||||
Link.from_db(link)
|
||||
for node in nodes
|
||||
for node in graph.AgentNodes or []
|
||||
for link in (node.Input or []) + (node.Output or [])
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _process_node(node: AgentNode, hide_credentials: bool) -> Node:
|
||||
node_dict = node.model_dump()
|
||||
def _process_node(node: AgentNode, hide_credentials: bool) -> NodeModel:
|
||||
node_dict = {field: getattr(node, field) for field in node.model_fields}
|
||||
if hide_credentials and "constantInput" in node_dict:
|
||||
constant_input = json.loads(
|
||||
node_dict["constantInput"], target_type=dict[str, Any]
|
||||
)
|
||||
constant_input = Graph._hide_credentials_in_input(constant_input)
|
||||
constant_input = GraphModel._hide_credentials_in_input(constant_input)
|
||||
node_dict["constantInput"] = json.dumps(constant_input)
|
||||
return Node.from_db(AgentNode(**node_dict))
|
||||
return NodeModel.from_db(AgentNode(**node_dict))
|
||||
|
||||
@staticmethod
|
||||
def _hide_credentials_in_input(input_data: dict[str, Any]) -> dict[str, Any]:
|
||||
@ -328,7 +369,7 @@ class Graph(BaseDbModel):
|
||||
result = {}
|
||||
for key, value in input_data.items():
|
||||
if isinstance(value, dict):
|
||||
result[key] = Graph._hide_credentials_in_input(value)
|
||||
result[key] = GraphModel._hide_credentials_in_input(value)
|
||||
elif isinstance(value, str) and any(
|
||||
sensitive_key in key.lower() for sensitive_key in sensitive_keys
|
||||
):
|
||||
@ -339,22 +380,37 @@ class Graph(BaseDbModel):
|
||||
return result
|
||||
|
||||
|
||||
# --------------------- Model functions --------------------- #
|
||||
# --------------------- CRUD functions --------------------- #
|
||||
|
||||
|
||||
async def get_node(node_id: str) -> Node:
|
||||
async def get_node(node_id: str) -> NodeModel:
|
||||
node = await AgentNode.prisma().find_unique_or_raise(
|
||||
where={"id": node_id},
|
||||
include=AGENT_NODE_INCLUDE,
|
||||
)
|
||||
return Node.from_db(node)
|
||||
return NodeModel.from_db(node)
|
||||
|
||||
|
||||
async def set_node_webhook(node_id: str, webhook_id: str | None) -> NodeModel:
|
||||
node = await AgentNode.prisma().update(
|
||||
where={"id": node_id},
|
||||
data=(
|
||||
{"Webhook": {"connect": {"id": webhook_id}}}
|
||||
if webhook_id
|
||||
else {"Webhook": {"disconnect": True}}
|
||||
),
|
||||
include=AGENT_NODE_INCLUDE,
|
||||
)
|
||||
if not node:
|
||||
raise ValueError(f"Node #{node_id} not found")
|
||||
return NodeModel.from_db(node)
|
||||
|
||||
|
||||
async def get_graphs(
|
||||
user_id: str,
|
||||
include_executions: bool = False,
|
||||
filter_by: Literal["active", "template"] | None = "active",
|
||||
) -> list[Graph]:
|
||||
) -> list[GraphModel]:
|
||||
"""
|
||||
Retrieves graph metadata objects.
|
||||
Default behaviour is to get all currently active graphs.
|
||||
@ -365,7 +421,7 @@ async def get_graphs(
|
||||
user_id: The ID of the user that owns the graph.
|
||||
|
||||
Returns:
|
||||
list[Graph]: A list of objects representing the retrieved graph metadata.
|
||||
list[GraphModel]: A list of objects representing the retrieved graphs.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {}
|
||||
|
||||
@ -386,7 +442,7 @@ async def get_graphs(
|
||||
include=graph_include,
|
||||
)
|
||||
|
||||
return [Graph.from_db(graph) for graph in graphs]
|
||||
return [GraphModel.from_db(graph) for graph in graphs]
|
||||
|
||||
|
||||
async def get_graph(
|
||||
@ -395,7 +451,7 @@ async def get_graph(
|
||||
template: bool = False,
|
||||
user_id: str | None = None,
|
||||
hide_credentials: bool = False,
|
||||
) -> Graph | None:
|
||||
) -> GraphModel | None:
|
||||
"""
|
||||
Retrieves a graph from the DB.
|
||||
Defaults to the version with `is_active` if `version` is not passed,
|
||||
@ -420,38 +476,35 @@ async def get_graph(
|
||||
include=AGENT_GRAPH_INCLUDE,
|
||||
order={"version": "desc"},
|
||||
)
|
||||
return Graph.from_db(graph, hide_credentials) if graph else None
|
||||
return GraphModel.from_db(graph, hide_credentials) if graph else None
|
||||
|
||||
|
||||
async def set_graph_active_version(graph_id: str, version: int, user_id: str) -> None:
|
||||
# Check if the graph belongs to the user
|
||||
graph = await AgentGraph.prisma().find_first(
|
||||
# Activate the requested version if it exists and is owned by the user.
|
||||
updated_count = await AgentGraph.prisma().update_many(
|
||||
data={"isActive": True},
|
||||
where={
|
||||
"id": graph_id,
|
||||
"version": version,
|
||||
"userId": user_id,
|
||||
}
|
||||
)
|
||||
if not graph:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user")
|
||||
|
||||
updated_graph = await AgentGraph.prisma().update(
|
||||
data={"isActive": True},
|
||||
where={
|
||||
"graphVersionId": {"id": graph_id, "version": version},
|
||||
},
|
||||
)
|
||||
if not updated_graph:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found")
|
||||
if updated_count == 0:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user")
|
||||
|
||||
# Deactivate all other versions
|
||||
# Deactivate all other versions.
|
||||
await AgentGraph.prisma().update_many(
|
||||
data={"isActive": False},
|
||||
where={"id": graph_id, "version": {"not": version}, "userId": user_id},
|
||||
where={
|
||||
"id": graph_id,
|
||||
"version": {"not": version},
|
||||
"userId": user_id,
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]:
|
||||
async def get_graph_all_versions(graph_id: str, user_id: str) -> list[GraphModel]:
|
||||
graph_versions = await AgentGraph.prisma().find_many(
|
||||
where={"id": graph_id, "userId": user_id},
|
||||
order={"version": "desc"},
|
||||
@ -461,7 +514,7 @@ async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]:
|
||||
if not graph_versions:
|
||||
return []
|
||||
|
||||
return [Graph.from_db(graph) for graph in graph_versions]
|
||||
return [GraphModel.from_db(graph) for graph in graph_versions]
|
||||
|
||||
|
||||
async def delete_graph(graph_id: str, user_id: str) -> int:
|
||||
@ -473,7 +526,7 @@ async def delete_graph(graph_id: str, user_id: str) -> int:
|
||||
return entries_count
|
||||
|
||||
|
||||
async def create_graph(graph: Graph, user_id: str) -> Graph:
|
||||
async def create_graph(graph: Graph, user_id: str) -> GraphModel:
|
||||
async with transaction() as tx:
|
||||
await __create_graph(tx, graph, user_id)
|
||||
|
||||
@ -534,6 +587,32 @@ async def __create_graph(tx, graph: Graph, user_id: str):
|
||||
# ------------------------ UTILITIES ------------------------ #
|
||||
|
||||
|
||||
def make_graph_model(creatable_graph: Graph, user_id: str) -> GraphModel:
|
||||
"""
|
||||
Convert a Graph to a GraphModel, setting graph_id and graph_version on all nodes.
|
||||
|
||||
Args:
|
||||
creatable_graph (Graph): The creatable graph to convert.
|
||||
user_id (str): The ID of the user creating the graph.
|
||||
|
||||
Returns:
|
||||
GraphModel: The converted Graph object.
|
||||
"""
|
||||
# Create a new Graph object, inheriting properties from CreatableGraph
|
||||
return GraphModel(
|
||||
**creatable_graph.model_dump(exclude={"nodes"}),
|
||||
user_id=user_id,
|
||||
nodes=[
|
||||
NodeModel(
|
||||
**creatable_node.model_dump(),
|
||||
graph_id=creatable_graph.id,
|
||||
graph_version=creatable_graph.version,
|
||||
)
|
||||
for creatable_node in creatable_graph.nodes
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
async def fix_llm_provider_credentials():
|
||||
"""Fix node credentials with provider `llm`"""
|
||||
from autogpt_libs.supabase_integration_credentials_store import (
|
||||
@ -547,16 +626,14 @@ async def fix_llm_provider_credentials():
|
||||
|
||||
broken_nodes = await prisma.get_client().query_raw(
|
||||
"""
|
||||
SELECT "User".id user_id,
|
||||
SELECT graph."userId" user_id,
|
||||
node.id node_id,
|
||||
node."constantInput" node_preset_input
|
||||
FROM platform."AgentNode" node
|
||||
LEFT JOIN platform."AgentGraph" graph
|
||||
ON node."agentGraphId" = graph.id
|
||||
LEFT JOIN platform."User" "User"
|
||||
ON graph."userId" = "User".id
|
||||
WHERE node."constantInput"::jsonb->'credentials'->>'provider' = 'llm'
|
||||
ORDER BY user_id;
|
||||
ORDER BY graph."userId";
|
||||
"""
|
||||
)
|
||||
logger.info(f"Fixing LLM credential inputs on {len(broken_nodes)} nodes")
|
||||
|
@ -3,6 +3,7 @@ import prisma
|
||||
AGENT_NODE_INCLUDE: prisma.types.AgentNodeInclude = {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"Webhook": True,
|
||||
"AgentBlock": True,
|
||||
}
|
||||
|
||||
@ -27,3 +28,7 @@ GRAPH_EXECUTION_INCLUDE: prisma.types.AgentGraphExecutionInclude = {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
INTEGRATION_WEBHOOK_INCLUDE: prisma.types.IntegrationWebhookInclude = {
|
||||
"AgentNodes": {"include": AGENT_NODE_INCLUDE} # type: ignore
|
||||
}
|
||||
|
168
autogpt_platform/backend/backend/data/integrations.py
Normal file
168
autogpt_platform/backend/backend/data/integrations.py
Normal file
@ -0,0 +1,168 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, AsyncGenerator, Optional
|
||||
|
||||
from prisma import Json
|
||||
from prisma.models import IntegrationWebhook
|
||||
from pydantic import Field
|
||||
|
||||
from backend.data.includes import INTEGRATION_WEBHOOK_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus
|
||||
|
||||
from .db import BaseDbModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .graph import NodeModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Webhook(BaseDbModel):
|
||||
user_id: str
|
||||
provider: str
|
||||
credentials_id: str
|
||||
webhook_type: str
|
||||
resource: str
|
||||
events: list[str]
|
||||
config: dict = Field(default_factory=dict)
|
||||
secret: str
|
||||
|
||||
provider_webhook_id: str
|
||||
|
||||
attached_nodes: Optional[list["NodeModel"]] = None
|
||||
|
||||
@staticmethod
|
||||
def from_db(webhook: IntegrationWebhook):
|
||||
from .graph import NodeModel
|
||||
|
||||
return Webhook(
|
||||
id=webhook.id,
|
||||
user_id=webhook.userId,
|
||||
provider=webhook.provider,
|
||||
credentials_id=webhook.credentialsId,
|
||||
webhook_type=webhook.webhookType,
|
||||
resource=webhook.resource,
|
||||
events=webhook.events,
|
||||
config=dict(webhook.config),
|
||||
secret=webhook.secret,
|
||||
provider_webhook_id=webhook.providerWebhookId,
|
||||
attached_nodes=(
|
||||
[NodeModel.from_db(node) for node in webhook.AgentNodes]
|
||||
if webhook.AgentNodes is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# --------------------- CRUD functions --------------------- #
|
||||
|
||||
|
||||
async def create_webhook(webhook: Webhook) -> Webhook:
|
||||
created_webhook = await IntegrationWebhook.prisma().create(
|
||||
data={
|
||||
"id": webhook.id,
|
||||
"userId": webhook.user_id,
|
||||
"provider": webhook.provider,
|
||||
"credentialsId": webhook.credentials_id,
|
||||
"webhookType": webhook.webhook_type,
|
||||
"resource": webhook.resource,
|
||||
"events": webhook.events,
|
||||
"config": Json(webhook.config),
|
||||
"secret": webhook.secret,
|
||||
"providerWebhookId": webhook.provider_webhook_id,
|
||||
}
|
||||
)
|
||||
return Webhook.from_db(created_webhook)
|
||||
|
||||
|
||||
async def get_webhook(webhook_id: str) -> Webhook:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhook = await IntegrationWebhook.prisma().find_unique_or_raise(
|
||||
where={"id": webhook_id},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return Webhook.from_db(webhook)
|
||||
|
||||
|
||||
async def get_all_webhooks(credentials_id: str) -> list[Webhook]:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhooks = await IntegrationWebhook.prisma().find_many(
|
||||
where={"credentialsId": credentials_id},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return [Webhook.from_db(webhook) for webhook in webhooks]
|
||||
|
||||
|
||||
async def find_webhook(
|
||||
credentials_id: str, webhook_type: str, resource: str, events: list[str]
|
||||
) -> Webhook | None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhook = await IntegrationWebhook.prisma().find_first(
|
||||
where={
|
||||
"credentialsId": credentials_id,
|
||||
"webhookType": webhook_type,
|
||||
"resource": resource,
|
||||
"events": {"has_every": events},
|
||||
},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return Webhook.from_db(webhook) if webhook else None
|
||||
|
||||
|
||||
async def update_webhook_config(webhook_id: str, updated_config: dict) -> Webhook:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
_updated_webhook = await IntegrationWebhook.prisma().update(
|
||||
where={"id": webhook_id},
|
||||
data={"config": Json(updated_config)},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
if _updated_webhook is None:
|
||||
raise ValueError(f"Webhook #{webhook_id} not found")
|
||||
return Webhook.from_db(_updated_webhook)
|
||||
|
||||
|
||||
async def delete_webhook(webhook_id: str) -> None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
deleted = await IntegrationWebhook.prisma().delete(where={"id": webhook_id})
|
||||
if not deleted:
|
||||
raise ValueError(f"Webhook #{webhook_id} not found")
|
||||
|
||||
|
||||
# --------------------- WEBHOOK EVENTS --------------------- #
|
||||
|
||||
|
||||
class WebhookEvent(BaseDbModel):
|
||||
provider: str
|
||||
webhook_id: str
|
||||
event_type: str
|
||||
payload: dict
|
||||
|
||||
|
||||
class WebhookEventBus(AsyncRedisEventBus[WebhookEvent]):
|
||||
Model = WebhookEvent
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return "webhooks"
|
||||
|
||||
async def publish(self, event: WebhookEvent):
|
||||
await self.publish_event(event, f"{event.webhook_id}/{event.event_type}")
|
||||
|
||||
async def listen(
|
||||
self, webhook_id: str, event_type: Optional[str] = None
|
||||
) -> AsyncGenerator[WebhookEvent, None]:
|
||||
async for event in self.listen_events(f"{webhook_id}/{event_type or '*'}"):
|
||||
yield event
|
||||
|
||||
|
||||
event_bus = WebhookEventBus()
|
||||
|
||||
|
||||
async def publish_webhook_event(event: WebhookEvent):
|
||||
await event_bus.publish(event)
|
||||
|
||||
|
||||
async def listen_for_webhook_event(
|
||||
webhook_id: str, event_type: Optional[str] = None
|
||||
) -> WebhookEvent | None:
|
||||
async for event in event_bus.listen(webhook_id, event_type):
|
||||
return event # Only one event is expected
|
@ -113,6 +113,7 @@ def SchemaField(
|
||||
advanced: Optional[bool] = None,
|
||||
secret: bool = False,
|
||||
exclude: bool = False,
|
||||
hidden: Optional[bool] = None,
|
||||
**kwargs,
|
||||
) -> T:
|
||||
json_extra = {
|
||||
@ -121,6 +122,7 @@ def SchemaField(
|
||||
"placeholder": placeholder,
|
||||
"secret": secret,
|
||||
"advanced": advanced,
|
||||
"hidden": hidden,
|
||||
}.items()
|
||||
if v is not None
|
||||
}
|
||||
|
@ -9,11 +9,8 @@ from redis.asyncio.client import PubSub as AsyncPubSub
|
||||
from redis.client import PubSub
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.execution import ExecutionResult
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = Config()
|
||||
|
||||
|
||||
class DateTimeEncoder(json.JSONEncoder):
|
||||
@ -36,7 +33,7 @@ class BaseRedisEventBus(Generic[M], ABC):
|
||||
|
||||
def _serialize_message(self, item: M, channel_key: str) -> tuple[str, str]:
|
||||
message = json.dumps(item.model_dump(), cls=DateTimeEncoder)
|
||||
channel_name = f"{self.event_bus_name}-{channel_key}"
|
||||
channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
logger.info(f"[{channel_name}] Publishing an event to Redis {message}")
|
||||
return message, channel_name
|
||||
|
||||
@ -54,7 +51,7 @@ class BaseRedisEventBus(Generic[M], ABC):
|
||||
def _subscribe(
|
||||
self, connection: redis.Redis | redis.AsyncRedis, channel_key: str
|
||||
) -> tuple[PubSub | AsyncPubSub, str]:
|
||||
channel_name = f"{self.event_bus_name}-{channel_key}"
|
||||
channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
pubsub = connection.pubsub()
|
||||
return pubsub, channel_name
|
||||
|
||||
@ -108,37 +105,3 @@ class AsyncRedisEventBus(BaseRedisEventBus[M], ABC):
|
||||
async for message in pubsub.listen():
|
||||
if event := self._deserialize_message(message, channel_key):
|
||||
yield event
|
||||
|
||||
|
||||
class RedisExecutionEventBus(RedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
def publish(self, res: ExecutionResult):
|
||||
self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}")
|
||||
|
||||
def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> Generator[ExecutionResult, None, None]:
|
||||
for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
|
||||
class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
async def publish(self, res: ExecutionResult):
|
||||
await self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}")
|
||||
|
||||
async def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> AsyncGenerator[ExecutionResult, None]:
|
||||
async for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
@ -4,6 +4,7 @@ from typing import Any, Callable, Concatenate, Coroutine, ParamSpec, TypeVar, ca
|
||||
from backend.data.credit import get_user_credit_model
|
||||
from backend.data.execution import (
|
||||
ExecutionResult,
|
||||
RedisExecutionEventBus,
|
||||
create_graph_execution,
|
||||
get_execution_results,
|
||||
get_incomplete_executions,
|
||||
@ -15,18 +16,18 @@ from backend.data.execution import (
|
||||
upsert_execution_output,
|
||||
)
|
||||
from backend.data.graph import get_graph, get_node
|
||||
from backend.data.queue import RedisExecutionEventBus
|
||||
from backend.data.user import (
|
||||
get_user_integrations,
|
||||
get_user_metadata,
|
||||
update_user_integrations,
|
||||
update_user_metadata,
|
||||
)
|
||||
from backend.util.service import AppService, expose
|
||||
from backend.util.service import AppService, expose, register_pydantic_serializers
|
||||
from backend.util.settings import Config
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
config = Config()
|
||||
|
||||
|
||||
class DatabaseManager(AppService):
|
||||
@ -38,7 +39,7 @@ class DatabaseManager(AppService):
|
||||
|
||||
@classmethod
|
||||
def get_port(cls) -> int:
|
||||
return Config().database_api_port
|
||||
return config.database_api_port
|
||||
|
||||
@expose
|
||||
def send_execution_update(self, execution_result: ExecutionResult):
|
||||
@ -55,6 +56,9 @@ class DatabaseManager(AppService):
|
||||
res = self.run_and_wait(coroutine)
|
||||
return res
|
||||
|
||||
# Register serializers for annotations on bare function
|
||||
register_pydantic_serializers(f)
|
||||
|
||||
return wrapper
|
||||
|
||||
# Executions
|
||||
|
@ -18,6 +18,7 @@ if TYPE_CHECKING:
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
from backend.data import redis
|
||||
from backend.data.block import Block, BlockData, BlockInput, BlockType, get_block
|
||||
from backend.data.execution import (
|
||||
@ -29,7 +30,7 @@ from backend.data.execution import (
|
||||
merge_execution_input,
|
||||
parse_execution_output,
|
||||
)
|
||||
from backend.data.graph import Graph, Link, Node
|
||||
from backend.data.graph import GraphModel, Link, Node
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME, CredentialsMetaInput
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.util import json
|
||||
@ -135,7 +136,6 @@ def execute_node(
|
||||
logger.error(f"Block {node.block_id} not found.")
|
||||
return
|
||||
|
||||
# Sanity check: validate the execution input.
|
||||
log_metadata = LogMetadata(
|
||||
user_id=user_id,
|
||||
graph_eid=graph_exec_id,
|
||||
@ -144,11 +144,20 @@ def execute_node(
|
||||
node_id=node_id,
|
||||
block_name=node_block.name,
|
||||
)
|
||||
|
||||
# Sanity check: validate the execution input.
|
||||
input_data, error = validate_exec(node, data.data, resolve_input=False)
|
||||
if input_data is None:
|
||||
log_metadata.error(f"Skip execution, input validation error: {error}")
|
||||
db_client.upsert_execution_output(node_exec_id, "error", error)
|
||||
update_execution(ExecutionStatus.FAILED)
|
||||
return
|
||||
|
||||
# Re-shape the input data for agent block.
|
||||
# AgentExecutorBlock specially separate the node input_data & its input_default.
|
||||
if isinstance(node_block, AgentExecutorBlock):
|
||||
input_data = {**node.input_default, "data": input_data}
|
||||
|
||||
# Execute the node
|
||||
input_data_str = json.dumps(input_data)
|
||||
input_size = len(input_data_str)
|
||||
@ -177,7 +186,7 @@ def execute_node(
|
||||
input_data, **extra_exec_kwargs
|
||||
):
|
||||
output_size += len(json.dumps(output_data))
|
||||
log_metadata.info("Node produced output", output_name=output_data)
|
||||
log_metadata.info("Node produced output", **{output_name: output_data})
|
||||
db_client.upsert_execution_output(node_exec_id, output_name, output_data)
|
||||
|
||||
for execution in _enqueue_next_nodes(
|
||||
@ -244,7 +253,6 @@ def _enqueue_next_nodes(
|
||||
graph_id: str,
|
||||
log_metadata: LogMetadata,
|
||||
) -> list[NodeExecution]:
|
||||
|
||||
def add_enqueued_execution(
|
||||
node_exec_id: str, node_id: str, data: BlockInput
|
||||
) -> NodeExecution:
|
||||
@ -376,31 +384,46 @@ def validate_exec(
|
||||
if not node_block:
|
||||
return None, f"Block for {node.block_id} not found."
|
||||
|
||||
error_prefix = f"Input data missing for {node_block.name}:"
|
||||
if isinstance(node_block, AgentExecutorBlock):
|
||||
# Validate the execution metadata for the agent executor block.
|
||||
try:
|
||||
exec_data = AgentExecutorBlock.Input(**node.input_default)
|
||||
except Exception as e:
|
||||
return None, f"Input data doesn't match {node_block.name}: {str(e)}"
|
||||
|
||||
# Validation input
|
||||
input_schema = exec_data.input_schema
|
||||
required_fields = set(input_schema["required"])
|
||||
input_default = exec_data.data
|
||||
else:
|
||||
# Convert non-matching data types to the expected input schema.
|
||||
for name, data_type in node_block.input_schema.__annotations__.items():
|
||||
if (value := data.get(name)) and (type(value) is not data_type):
|
||||
data[name] = convert(value, data_type)
|
||||
|
||||
# Validation input
|
||||
input_schema = node_block.input_schema.jsonschema()
|
||||
required_fields = node_block.input_schema.get_required_fields()
|
||||
input_default = node.input_default
|
||||
|
||||
# Input data (without default values) should contain all required fields.
|
||||
error_prefix = f"Input data missing or mismatch for `{node_block.name}`:"
|
||||
input_fields_from_nodes = {link.sink_name for link in node.input_links}
|
||||
if not input_fields_from_nodes.issubset(data):
|
||||
return None, f"{error_prefix} {input_fields_from_nodes - set(data)}"
|
||||
|
||||
# Merge input data with default values and resolve dynamic dict/list/object pins.
|
||||
data = {**node.input_default, **data}
|
||||
data = {**input_default, **data}
|
||||
if resolve_input:
|
||||
data = merge_execution_input(data)
|
||||
|
||||
# Input data post-merge should contain all required fields from the schema.
|
||||
input_fields_from_schema = node_block.input_schema.get_required_fields()
|
||||
if not input_fields_from_schema.issubset(data):
|
||||
return None, f"{error_prefix} {input_fields_from_schema - set(data)}"
|
||||
|
||||
# Convert non-matching data types to the expected input schema.
|
||||
for name, data_type in node_block.input_schema.__annotations__.items():
|
||||
if (value := data.get(name)) and (type(value) is not data_type):
|
||||
data[name] = convert(value, data_type)
|
||||
if not required_fields.issubset(data):
|
||||
return None, f"{error_prefix} {required_fields - set(data)}"
|
||||
|
||||
# Last validation: Validate the input values against the schema.
|
||||
if error := node_block.input_schema.validate_data(data):
|
||||
error_message = f"Input data doesn't match {node_block.name}: {error}"
|
||||
if error := json.validate_with_jsonschema(schema=input_schema, data=data):
|
||||
error_message = f"{error_prefix} {error}"
|
||||
logger.error(error_message)
|
||||
return None, error_message
|
||||
|
||||
@ -689,7 +712,6 @@ class Executor:
|
||||
|
||||
|
||||
class ExecutionManager(AppService):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.use_redis = True
|
||||
@ -751,7 +773,7 @@ class ExecutionManager(AppService):
|
||||
user_id: str,
|
||||
graph_version: int | None = None,
|
||||
) -> GraphExecution:
|
||||
graph: Graph | None = self.db_client.get_graph(
|
||||
graph: GraphModel | None = self.db_client.get_graph(
|
||||
graph_id=graph_id, user_id=user_id, version=graph_version
|
||||
)
|
||||
if not graph:
|
||||
@ -775,6 +797,15 @@ class ExecutionManager(AppService):
|
||||
if name and name in data:
|
||||
input_data = {"value": data[name]}
|
||||
|
||||
# Extract webhook payload, and assign it to the input pin
|
||||
webhook_payload_key = f"webhook_{node.webhook_id}_payload"
|
||||
if (
|
||||
block.block_type == BlockType.WEBHOOK
|
||||
and node.webhook_id
|
||||
and webhook_payload_key in data
|
||||
):
|
||||
input_data = {"payload": data[webhook_payload_key]}
|
||||
|
||||
input_data, error = validate_exec(node, input_data)
|
||||
if input_data is None:
|
||||
raise ValueError(error)
|
||||
@ -852,7 +883,7 @@ class ExecutionManager(AppService):
|
||||
)
|
||||
self.db_client.send_execution_update(exec_update)
|
||||
|
||||
def _validate_node_input_credentials(self, graph: Graph, user_id: str):
|
||||
def _validate_node_input_credentials(self, graph: GraphModel, user_id: str):
|
||||
"""Checks all credentials for all nodes of the graph"""
|
||||
|
||||
for node in graph.nodes:
|
||||
|
@ -38,6 +38,7 @@ def _extract_schema_from_url(database_url) -> tuple[str, str]:
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = Config()
|
||||
|
||||
|
||||
def log(msg, **kwargs):
|
||||
@ -96,7 +97,7 @@ class ExecutionScheduler(AppService):
|
||||
|
||||
@classmethod
|
||||
def get_port(cls) -> int:
|
||||
return Config().execution_scheduler_port
|
||||
return config.execution_scheduler_port
|
||||
|
||||
@property
|
||||
@thread_cached
|
||||
|
@ -11,6 +11,7 @@ from redis.lock import Lock as RedisLock
|
||||
|
||||
from backend.data import redis
|
||||
from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -157,12 +158,14 @@ def _get_provider_oauth_handler(provider_name: str) -> BaseOAuthHandler:
|
||||
client_id = getattr(settings.secrets, f"{provider_name}_client_id")
|
||||
client_secret = getattr(settings.secrets, f"{provider_name}_client_secret")
|
||||
if not (client_id and client_secret):
|
||||
raise Exception( # TODO: ConfigError
|
||||
raise MissingConfigError(
|
||||
f"Integration with provider '{provider_name}' is not configured",
|
||||
)
|
||||
|
||||
handler_class = HANDLERS_BY_NAME[provider_name]
|
||||
frontend_base_url = settings.config.frontend_base_url
|
||||
frontend_base_url = (
|
||||
settings.config.frontend_base_url or settings.config.platform_base_url
|
||||
)
|
||||
return handler_class(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
|
@ -0,0 +1,7 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ProviderName(str, Enum):
|
||||
GITHUB = "github"
|
||||
GOOGLE = "google"
|
||||
NOTION = "notion"
|
@ -0,0 +1,17 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .github import GithubWebhooksManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
# --8<-- [start:WEBHOOK_MANAGERS_BY_NAME]
|
||||
WEBHOOK_MANAGERS_BY_NAME: dict[str, type["BaseWebhooksManager"]] = {
|
||||
handler.PROVIDER_NAME: handler
|
||||
for handler in [
|
||||
GithubWebhooksManager,
|
||||
]
|
||||
}
|
||||
# --8<-- [end:WEBHOOK_MANAGERS_BY_NAME]
|
||||
|
||||
__all__ = ["WEBHOOK_MANAGERS_BY_NAME"]
|
163
autogpt_platform/backend/backend/integrations/webhooks/base.py
Normal file
163
autogpt_platform/backend/backend/integrations/webhooks/base.py
Normal file
@ -0,0 +1,163 @@
|
||||
import logging
|
||||
import secrets
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import ClassVar, Generic, TypeVar
|
||||
from uuid import uuid4
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import Credentials
|
||||
from fastapi import Request
|
||||
from strenum import StrEnum
|
||||
|
||||
from backend.data import integrations
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
app_config = Config()
|
||||
|
||||
WT = TypeVar("WT", bound=StrEnum)
|
||||
|
||||
|
||||
class BaseWebhooksManager(ABC, Generic[WT]):
|
||||
# --8<-- [start:BaseWebhooksManager1]
|
||||
PROVIDER_NAME: ClassVar[str]
|
||||
# --8<-- [end:BaseWebhooksManager1]
|
||||
|
||||
WebhookType: WT
|
||||
|
||||
async def get_suitable_webhook(
|
||||
self,
|
||||
user_id: str,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
) -> integrations.Webhook:
|
||||
if not app_config.platform_base_url:
|
||||
raise MissingConfigError(
|
||||
"PLATFORM_BASE_URL must be set to use Webhook functionality"
|
||||
)
|
||||
|
||||
if webhook := await integrations.find_webhook(
|
||||
credentials.id, webhook_type, resource, events
|
||||
):
|
||||
return webhook
|
||||
return await self._create_webhook(
|
||||
user_id, credentials, webhook_type, resource, events
|
||||
)
|
||||
|
||||
async def prune_webhook_if_dangling(
|
||||
self, webhook_id: str, credentials: Credentials
|
||||
) -> bool:
|
||||
webhook = await integrations.get_webhook(webhook_id)
|
||||
if webhook.attached_nodes is None:
|
||||
raise ValueError("Error retrieving webhook including attached nodes")
|
||||
if webhook.attached_nodes:
|
||||
# Don't prune webhook if in use
|
||||
return False
|
||||
|
||||
await self._deregister_webhook(webhook, credentials)
|
||||
await integrations.delete_webhook(webhook.id)
|
||||
return True
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager3]
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
async def validate_payload(
|
||||
cls, webhook: integrations.Webhook, request: Request
|
||||
) -> tuple[dict, str]:
|
||||
"""
|
||||
Validates an incoming webhook request and returns its payload and type.
|
||||
|
||||
Params:
|
||||
webhook: Object representing the configured webhook and its properties in our system.
|
||||
request: Incoming FastAPI `Request`
|
||||
|
||||
Returns:
|
||||
dict: The validated payload
|
||||
str: The event type associated with the payload
|
||||
"""
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager3]
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager5]
|
||||
async def trigger_ping(self, webhook: integrations.Webhook) -> None:
|
||||
"""
|
||||
Triggers a ping to the given webhook.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: if the provider doesn't support pinging
|
||||
"""
|
||||
# --8<-- [end:BaseWebhooksManager5]
|
||||
raise NotImplementedError(f"{self.__class__.__name__} doesn't support pinging")
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager2]
|
||||
@abstractmethod
|
||||
async def _register_webhook(
|
||||
self,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
ingress_url: str,
|
||||
secret: str,
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Registers a new webhook with the provider.
|
||||
|
||||
Params:
|
||||
credentials: The credentials with which to create the webhook
|
||||
webhook_type: The provider-specific webhook type to create
|
||||
resource: The resource to receive events for
|
||||
events: The events to subscribe to
|
||||
ingress_url: The ingress URL for webhook payloads
|
||||
secret: Secret used to verify webhook payloads
|
||||
|
||||
Returns:
|
||||
str: Webhook ID assigned by the provider
|
||||
config: Provider-specific configuration for the webhook
|
||||
"""
|
||||
...
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager2]
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager4]
|
||||
@abstractmethod
|
||||
async def _deregister_webhook(
|
||||
self, webhook: integrations.Webhook, credentials: Credentials
|
||||
) -> None: ...
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager4]
|
||||
|
||||
async def _create_webhook(
|
||||
self,
|
||||
user_id: str,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
) -> integrations.Webhook:
|
||||
id = str(uuid4())
|
||||
secret = secrets.token_hex(32)
|
||||
provider_name = self.PROVIDER_NAME
|
||||
ingress_url = (
|
||||
f"{app_config.platform_base_url}/api/integrations/{provider_name}"
|
||||
f"/webhooks/{id}/ingress"
|
||||
)
|
||||
provider_webhook_id, config = await self._register_webhook(
|
||||
credentials, webhook_type, resource, events, ingress_url, secret
|
||||
)
|
||||
return await integrations.create_webhook(
|
||||
integrations.Webhook(
|
||||
id=id,
|
||||
user_id=user_id,
|
||||
provider=provider_name,
|
||||
credentials_id=credentials.id,
|
||||
webhook_type=webhook_type,
|
||||
resource=resource,
|
||||
events=events,
|
||||
provider_webhook_id=provider_webhook_id,
|
||||
config=config,
|
||||
secret=secret,
|
||||
)
|
||||
)
|
175
autogpt_platform/backend/backend/integrations/webhooks/github.py
Normal file
175
autogpt_platform/backend/backend/integrations/webhooks/github.py
Normal file
@ -0,0 +1,175 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import Credentials
|
||||
from fastapi import HTTPException, Request
|
||||
from strenum import StrEnum
|
||||
|
||||
from backend.data import integrations
|
||||
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# --8<-- [start:GithubWebhooksManager]
|
||||
class GithubWebhookType(StrEnum):
|
||||
REPO = "repo"
|
||||
|
||||
|
||||
class GithubWebhooksManager(BaseWebhooksManager):
|
||||
PROVIDER_NAME = "github"
|
||||
|
||||
WebhookType = GithubWebhookType
|
||||
|
||||
GITHUB_API_URL = "https://api.github.com"
|
||||
GITHUB_API_DEFAULT_HEADERS = {"Accept": "application/vnd.github.v3+json"}
|
||||
|
||||
@classmethod
|
||||
async def validate_payload(
|
||||
cls, webhook: integrations.Webhook, request: Request
|
||||
) -> tuple[dict, str]:
|
||||
if not (event_type := request.headers.get("X-GitHub-Event")):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="X-GitHub-Event header is missing!"
|
||||
)
|
||||
|
||||
if not (signature_header := request.headers.get("X-Hub-Signature-256")):
|
||||
raise HTTPException(
|
||||
status_code=403, detail="X-Hub-Signature-256 header is missing!"
|
||||
)
|
||||
|
||||
payload_body = await request.body()
|
||||
hash_object = hmac.new(
|
||||
webhook.secret.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256
|
||||
)
|
||||
expected_signature = "sha256=" + hash_object.hexdigest()
|
||||
|
||||
if not hmac.compare_digest(expected_signature, signature_header):
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Request signatures didn't match!"
|
||||
)
|
||||
|
||||
payload = await request.json()
|
||||
if action := payload.get("action"):
|
||||
event_type += f".{action}"
|
||||
|
||||
return payload, event_type
|
||||
|
||||
async def trigger_ping(self, webhook: integrations.Webhook) -> None:
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": f"Bearer {webhook.config.get('access_token')}",
|
||||
}
|
||||
|
||||
repo, github_hook_id = webhook.resource, webhook.provider_webhook_id
|
||||
ping_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{github_hook_id}/pings"
|
||||
|
||||
response = requests.post(ping_url, headers=headers)
|
||||
|
||||
if response.status_code != 204:
|
||||
error_msg = extract_github_error_msg(response)
|
||||
raise ValueError(f"Failed to ping GitHub webhook: {error_msg}")
|
||||
|
||||
async def _register_webhook(
|
||||
self,
|
||||
credentials: Credentials,
|
||||
webhook_type: GithubWebhookType,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
ingress_url: str,
|
||||
secret: str,
|
||||
) -> tuple[str, dict]:
|
||||
if webhook_type == self.WebhookType.REPO and resource.count("/") > 1:
|
||||
raise ValueError("Invalid repo format: expected 'owner/repo'")
|
||||
|
||||
# Extract main event, e.g. `pull_request.opened` -> `pull_request`
|
||||
github_events = list({event.split(".")[0] for event in events})
|
||||
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": credentials.bearer(),
|
||||
}
|
||||
webhook_data = {
|
||||
"name": "web",
|
||||
"active": True,
|
||||
"events": github_events,
|
||||
"config": {
|
||||
"url": ingress_url,
|
||||
"content_type": "json",
|
||||
"insecure_ssl": "0",
|
||||
"secret": secret,
|
||||
},
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{self.GITHUB_API_URL}/repos/{resource}/hooks",
|
||||
headers=headers,
|
||||
json=webhook_data,
|
||||
)
|
||||
|
||||
if response.status_code != 201:
|
||||
error_msg = extract_github_error_msg(response)
|
||||
if "not found" in error_msg.lower():
|
||||
error_msg = (
|
||||
f"{error_msg} "
|
||||
"(Make sure the GitHub account or API key has 'repo' or "
|
||||
f"webhook create permissions to '{resource}')"
|
||||
)
|
||||
raise ValueError(f"Failed to create GitHub webhook: {error_msg}")
|
||||
|
||||
webhook_id = response.json()["id"]
|
||||
config = response.json()["config"]
|
||||
|
||||
return str(webhook_id), config
|
||||
|
||||
async def _deregister_webhook(
|
||||
self, webhook: integrations.Webhook, credentials: Credentials
|
||||
) -> None:
|
||||
webhook_type = self.WebhookType(webhook.webhook_type)
|
||||
if webhook.credentials_id != credentials.id:
|
||||
raise ValueError(
|
||||
f"Webhook #{webhook.id} does not belong to credentials {credentials.id}"
|
||||
)
|
||||
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": credentials.bearer(),
|
||||
}
|
||||
|
||||
if webhook_type == self.WebhookType.REPO:
|
||||
repo = webhook.resource
|
||||
delete_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{webhook.provider_webhook_id}" # noqa
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unsupported webhook type '{webhook.webhook_type}'"
|
||||
)
|
||||
|
||||
response = requests.delete(delete_url, headers=headers)
|
||||
|
||||
if response.status_code not in [204, 404]:
|
||||
# 204 means successful deletion, 404 means the webhook was already deleted
|
||||
error_msg = extract_github_error_msg(response)
|
||||
raise ValueError(f"Failed to delete GitHub webhook: {error_msg}")
|
||||
|
||||
# If we reach here, the webhook was successfully deleted or didn't exist
|
||||
|
||||
|
||||
# --8<-- [end:GithubWebhooksManager]
|
||||
|
||||
|
||||
def extract_github_error_msg(response: requests.Response) -> str:
|
||||
error_msgs = []
|
||||
resp = response.json()
|
||||
if resp.get("message"):
|
||||
error_msgs.append(resp["message"])
|
||||
if resp.get("errors"):
|
||||
error_msgs.extend(f"* {err.get('message', err)}" for err in resp["errors"])
|
||||
if resp.get("error"):
|
||||
if isinstance(resp["error"], dict):
|
||||
error_msgs.append(resp["error"].get("message", resp["error"]))
|
||||
else:
|
||||
error_msgs.append(resp["error"])
|
||||
return "\n".join(error_msgs)
|
@ -0,0 +1,198 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Callable, Optional, cast
|
||||
|
||||
from backend.data.block import get_block
|
||||
from backend.data.graph import set_node_webhook
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import Credentials
|
||||
|
||||
from backend.data.graph import GraphModel, NodeModel
|
||||
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def on_graph_activate(
|
||||
graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"]
|
||||
):
|
||||
"""
|
||||
Hook to be called when a graph is activated/created.
|
||||
|
||||
⚠️ Assuming node entities are not re-used between graph versions, ⚠️
|
||||
this hook calls `on_node_activate` on all nodes in this graph.
|
||||
|
||||
Params:
|
||||
get_credentials: `credentials_id` -> Credentials
|
||||
"""
|
||||
# Compare nodes in new_graph_version with previous_graph_version
|
||||
updated_nodes = []
|
||||
for new_node in graph.nodes:
|
||||
node_credentials = None
|
||||
if creds_meta := new_node.input_default.get(CREDENTIALS_FIELD_NAME):
|
||||
node_credentials = get_credentials(creds_meta["id"])
|
||||
if not node_credentials:
|
||||
raise ValueError(
|
||||
f"Node #{new_node.id} updated with non-existent "
|
||||
f"credentials #{node_credentials}"
|
||||
)
|
||||
|
||||
updated_node = await on_node_activate(
|
||||
graph.user_id, new_node, credentials=node_credentials
|
||||
)
|
||||
updated_nodes.append(updated_node)
|
||||
|
||||
graph.nodes = updated_nodes
|
||||
return graph
|
||||
|
||||
|
||||
async def on_graph_deactivate(
|
||||
graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"]
|
||||
):
|
||||
"""
|
||||
Hook to be called when a graph is deactivated/deleted.
|
||||
|
||||
⚠️ Assuming node entities are not re-used between graph versions, ⚠️
|
||||
this hook calls `on_node_deactivate` on all nodes in `graph`.
|
||||
|
||||
Params:
|
||||
get_credentials: `credentials_id` -> Credentials
|
||||
"""
|
||||
updated_nodes = []
|
||||
for node in graph.nodes:
|
||||
node_credentials = None
|
||||
if creds_meta := node.input_default.get(CREDENTIALS_FIELD_NAME):
|
||||
node_credentials = get_credentials(creds_meta["id"])
|
||||
if not node_credentials:
|
||||
logger.error(
|
||||
f"Node #{node.id} referenced non-existent "
|
||||
f"credentials #{creds_meta['id']}"
|
||||
)
|
||||
|
||||
updated_node = await on_node_deactivate(node, credentials=node_credentials)
|
||||
updated_nodes.append(updated_node)
|
||||
|
||||
graph.nodes = updated_nodes
|
||||
return graph
|
||||
|
||||
|
||||
async def on_node_activate(
|
||||
user_id: str,
|
||||
node: "NodeModel",
|
||||
*,
|
||||
credentials: Optional["Credentials"] = None,
|
||||
) -> "NodeModel":
|
||||
"""Hook to be called when the node is activated/created"""
|
||||
|
||||
block = get_block(node.block_id)
|
||||
if not block:
|
||||
raise ValueError(
|
||||
f"Node #{node.id} is instance of unknown block #{node.block_id}"
|
||||
)
|
||||
|
||||
if not block.webhook_config:
|
||||
return node
|
||||
|
||||
logger.debug(
|
||||
f"Activating webhook node #{node.id} with config {block.webhook_config}"
|
||||
)
|
||||
|
||||
webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]()
|
||||
|
||||
try:
|
||||
resource = block.webhook_config.resource_format.format(**node.input_default)
|
||||
except KeyError:
|
||||
resource = None
|
||||
logger.debug(
|
||||
f"Constructed resource string {resource} from input {node.input_default}"
|
||||
)
|
||||
|
||||
event_filter_input_name = block.webhook_config.event_filter_input
|
||||
has_everything_for_webhook = (
|
||||
resource is not None
|
||||
and CREDENTIALS_FIELD_NAME in node.input_default
|
||||
and event_filter_input_name in node.input_default
|
||||
and any(is_on for is_on in node.input_default[event_filter_input_name].values())
|
||||
)
|
||||
|
||||
if has_everything_for_webhook and resource:
|
||||
logger.debug(f"Node #{node} has everything for a webhook!")
|
||||
if not credentials:
|
||||
credentials_meta = node.input_default[CREDENTIALS_FIELD_NAME]
|
||||
raise ValueError(
|
||||
f"Cannot set up webhook for node #{node.id}: "
|
||||
f"credentials #{credentials_meta['id']} not available"
|
||||
)
|
||||
|
||||
# Shape of the event filter is enforced in Block.__init__
|
||||
event_filter = cast(dict, node.input_default[event_filter_input_name])
|
||||
events = [
|
||||
block.webhook_config.event_format.format(event=event)
|
||||
for event, enabled in event_filter.items()
|
||||
if enabled is True
|
||||
]
|
||||
logger.debug(f"Webhook events to subscribe to: {', '.join(events)}")
|
||||
|
||||
# Find/make and attach a suitable webhook to the node
|
||||
new_webhook = await webhooks_manager.get_suitable_webhook(
|
||||
user_id,
|
||||
credentials,
|
||||
block.webhook_config.webhook_type,
|
||||
resource,
|
||||
events,
|
||||
)
|
||||
logger.debug(f"Acquired webhook: {new_webhook}")
|
||||
return await set_node_webhook(node.id, new_webhook.id)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
async def on_node_deactivate(
|
||||
node: "NodeModel",
|
||||
*,
|
||||
credentials: Optional["Credentials"] = None,
|
||||
webhooks_manager: Optional["BaseWebhooksManager"] = None,
|
||||
) -> "NodeModel":
|
||||
"""Hook to be called when node is deactivated/deleted"""
|
||||
|
||||
logger.debug(f"Deactivating node #{node.id}")
|
||||
block = get_block(node.block_id)
|
||||
if not block:
|
||||
raise ValueError(
|
||||
f"Node #{node.id} is instance of unknown block #{node.block_id}"
|
||||
)
|
||||
|
||||
if not block.webhook_config:
|
||||
return node
|
||||
|
||||
webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]()
|
||||
|
||||
if node.webhook_id:
|
||||
logger.debug(f"Node #{node.id} has webhook_id {node.webhook_id}")
|
||||
if not node.webhook:
|
||||
logger.error(f"Node #{node.id} has webhook_id but no webhook object")
|
||||
raise ValueError("node.webhook not included")
|
||||
|
||||
# Detach webhook from node
|
||||
logger.debug(f"Detaching webhook from node #{node.id}")
|
||||
updated_node = await set_node_webhook(node.id, None)
|
||||
|
||||
# Prune and deregister the webhook if it is no longer used anywhere
|
||||
logger.debug("Pruning and deregistering webhook if dangling")
|
||||
webhook = node.webhook
|
||||
if credentials:
|
||||
logger.debug(f"Pruning webhook #{webhook.id} with credentials")
|
||||
await webhooks_manager.prune_webhook_if_dangling(webhook.id, credentials)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Cannot deregister webhook #{webhook.id}: credentials "
|
||||
f"#{webhook.credentials_id} not available "
|
||||
f"({webhook.provider} webhook ID: {webhook.provider_webhook_id})"
|
||||
)
|
||||
return updated_node
|
||||
|
||||
logger.debug(f"Node #{node.id} has no webhook_id, returning")
|
||||
return node
|
@ -10,8 +10,20 @@ from autogpt_libs.supabase_integration_credentials_store.types import (
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
|
||||
from backend.data.graph import set_node_webhook
|
||||
from backend.data.integrations import (
|
||||
WebhookEvent,
|
||||
get_all_webhooks,
|
||||
get_webhook,
|
||||
listen_for_webhook_event,
|
||||
publish_webhook_event,
|
||||
)
|
||||
from backend.executor.manager import ExecutionManager
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
|
||||
from backend.util.exceptions import NeedConfirmation
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from ..utils import get_user_id
|
||||
@ -53,6 +65,7 @@ def login(
|
||||
|
||||
class CredentialsMetaResponse(BaseModel):
|
||||
id: str
|
||||
provider: str
|
||||
type: CredentialsType
|
||||
title: str | None
|
||||
scopes: list[str] | None
|
||||
@ -107,6 +120,7 @@ def callback(
|
||||
)
|
||||
return CredentialsMetaResponse(
|
||||
id=credentials.id,
|
||||
provider=credentials.provider,
|
||||
type=credentials.type,
|
||||
title=credentials.title,
|
||||
scopes=credentials.scopes,
|
||||
@ -114,8 +128,26 @@ def callback(
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{provider}/credentials")
|
||||
@router.get("/credentials")
|
||||
def list_credentials(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[CredentialsMetaResponse]:
|
||||
credentials = creds_manager.store.get_all_creds(user_id)
|
||||
return [
|
||||
CredentialsMetaResponse(
|
||||
id=cred.id,
|
||||
provider=cred.provider,
|
||||
type=cred.type,
|
||||
title=cred.title,
|
||||
scopes=cred.scopes if isinstance(cred, OAuth2Credentials) else None,
|
||||
username=cred.username if isinstance(cred, OAuth2Credentials) else None,
|
||||
)
|
||||
for cred in credentials
|
||||
]
|
||||
|
||||
|
||||
@router.get("/{provider}/credentials")
|
||||
def list_credentials_by_provider(
|
||||
provider: Annotated[str, Path(title="The provider to list credentials for")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[CredentialsMetaResponse]:
|
||||
@ -123,6 +155,7 @@ def list_credentials(
|
||||
return [
|
||||
CredentialsMetaResponse(
|
||||
id=cred.id,
|
||||
provider=cred.provider,
|
||||
type=cred.type,
|
||||
title=cred.title,
|
||||
scopes=cred.scopes if isinstance(cred, OAuth2Credentials) else None,
|
||||
@ -183,13 +216,22 @@ class CredentialsDeletionResponse(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class CredentialsDeletionNeedsConfirmationResponse(BaseModel):
|
||||
deleted: Literal[False] = False
|
||||
need_confirmation: Literal[True] = True
|
||||
message: str
|
||||
|
||||
|
||||
@router.delete("/{provider}/credentials/{cred_id}")
|
||||
def delete_credentials(
|
||||
async def delete_credentials(
|
||||
request: Request,
|
||||
provider: Annotated[str, Path(title="The provider to delete credentials for")],
|
||||
cred_id: Annotated[str, Path(title="The ID of the credentials to delete")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> CredentialsDeletionResponse:
|
||||
force: Annotated[
|
||||
bool, Query(title="Whether to proceed if any linked webhooks are still in use")
|
||||
] = False,
|
||||
) -> CredentialsDeletionResponse | CredentialsDeletionNeedsConfirmationResponse:
|
||||
creds = creds_manager.store.get_creds_by_id(user_id, cred_id)
|
||||
if not creds:
|
||||
raise HTTPException(status_code=404, detail="Credentials not found")
|
||||
@ -198,6 +240,11 @@ def delete_credentials(
|
||||
status_code=404, detail="Credentials do not match the specified provider"
|
||||
)
|
||||
|
||||
try:
|
||||
await remove_all_webhooks_for_credentials(creds, force)
|
||||
except NeedConfirmation as e:
|
||||
return CredentialsDeletionNeedsConfirmationResponse(message=str(e))
|
||||
|
||||
creds_manager.delete(user_id, cred_id)
|
||||
|
||||
tokens_revoked = None
|
||||
@ -208,7 +255,98 @@ def delete_credentials(
|
||||
return CredentialsDeletionResponse(revoked=tokens_revoked)
|
||||
|
||||
|
||||
# -------- UTILITIES --------- #
|
||||
# ------------------------- WEBHOOK STUFF -------------------------- #
|
||||
|
||||
|
||||
# ⚠️ Note
|
||||
# No user auth check because this endpoint is for webhook ingress and relies on
|
||||
# validation by the provider-specific `WebhooksManager`.
|
||||
@router.post("/{provider}/webhooks/{webhook_id}/ingress")
|
||||
async def webhook_ingress_generic(
|
||||
request: Request,
|
||||
provider: Annotated[str, Path(title="Provider where the webhook was registered")],
|
||||
webhook_id: Annotated[str, Path(title="Our ID for the webhook")],
|
||||
):
|
||||
logger.debug(f"Received {provider} webhook ingress for ID {webhook_id}")
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]()
|
||||
webhook = await get_webhook(webhook_id)
|
||||
logger.debug(f"Webhook #{webhook_id}: {webhook}")
|
||||
payload, event_type = await webhook_manager.validate_payload(webhook, request)
|
||||
logger.debug(f"Validated {provider} {event_type} event with payload {payload}")
|
||||
|
||||
webhook_event = WebhookEvent(
|
||||
provider=provider,
|
||||
webhook_id=webhook_id,
|
||||
event_type=event_type,
|
||||
payload=payload,
|
||||
)
|
||||
await publish_webhook_event(webhook_event)
|
||||
logger.debug(f"Webhook event published: {webhook_event}")
|
||||
|
||||
if not webhook.attached_nodes:
|
||||
return
|
||||
|
||||
executor = get_service_client(ExecutionManager)
|
||||
for node in webhook.attached_nodes:
|
||||
logger.debug(f"Webhook-attached node: {node}")
|
||||
if not node.is_triggered_by_event_type(event_type):
|
||||
logger.debug(f"Node #{node.id} doesn't trigger on event {event_type}")
|
||||
continue
|
||||
logger.debug(f"Executing graph #{node.graph_id} node #{node.id}")
|
||||
executor.add_execution(
|
||||
node.graph_id,
|
||||
data={f"webhook_{webhook_id}_payload": payload},
|
||||
user_id=webhook.user_id,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{provider}/webhooks/{webhook_id}/ping")
|
||||
async def webhook_ping(
|
||||
provider: Annotated[str, Path(title="Provider where the webhook was registered")],
|
||||
webhook_id: Annotated[str, Path(title="Our ID for the webhook")],
|
||||
user_id: Annotated[str, Depends(get_user_id)], # require auth
|
||||
):
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]()
|
||||
webhook = await get_webhook(webhook_id)
|
||||
|
||||
await webhook_manager.trigger_ping(webhook)
|
||||
if not await listen_for_webhook_event(webhook_id, event_type="ping"):
|
||||
raise HTTPException(status_code=500, detail="Webhook ping event not received")
|
||||
|
||||
|
||||
# --------------------------- UTILITIES ---------------------------- #
|
||||
|
||||
|
||||
async def remove_all_webhooks_for_credentials(
|
||||
credentials: Credentials, force: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Remove and deregister all webhooks that were registered using the given credentials.
|
||||
|
||||
Params:
|
||||
credentials: The credentials for which to remove the associated webhooks.
|
||||
force: Whether to proceed if any of the webhooks are still in use.
|
||||
|
||||
Raises:
|
||||
NeedConfirmation: If any of the webhooks are still in use and `force` is `False`
|
||||
"""
|
||||
webhooks = await get_all_webhooks(credentials.id)
|
||||
if any(w.attached_nodes for w in webhooks) and not force:
|
||||
raise NeedConfirmation(
|
||||
"Some webhooks linked to these credentials are still in use by an agent"
|
||||
)
|
||||
for webhook in webhooks:
|
||||
# Unlink all nodes
|
||||
for node in webhook.attached_nodes or []:
|
||||
await set_node_webhook(node.id, None)
|
||||
|
||||
# Prune the webhook
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[credentials.provider]()
|
||||
success = await webhook_manager.prune_webhook_if_dangling(
|
||||
webhook.id, credentials
|
||||
)
|
||||
if not success:
|
||||
logger.warning(f"Webhook #{webhook.id} failed to prune")
|
||||
|
||||
|
||||
def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHandler:
|
||||
@ -226,7 +364,11 @@ def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHa
|
||||
)
|
||||
|
||||
handler_class = HANDLERS_BY_NAME[provider_name]
|
||||
frontend_base_url = settings.config.frontend_base_url or str(req.base_url)
|
||||
frontend_base_url = (
|
||||
settings.config.frontend_base_url
|
||||
or settings.config.platform_base_url
|
||||
or str(req.base_url)
|
||||
)
|
||||
return handler_class(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
|
@ -29,21 +29,6 @@ async def lifespan_context(app: fastapi.FastAPI):
|
||||
await backend.data.db.disconnect()
|
||||
|
||||
|
||||
def handle_internal_http_error(status_code: int = 500, log_error: bool = True):
|
||||
def handler(request: fastapi.Request, exc: Exception):
|
||||
if log_error:
|
||||
logger.exception(f"{request.method} {request.url.path} failed: {exc}")
|
||||
return fastapi.responses.JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"detail": str(exc),
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
docs_url = (
|
||||
"/docs"
|
||||
if settings.config.app_env == backend.util.settings.AppEnvironment.LOCAL
|
||||
@ -62,8 +47,24 @@ app = fastapi.FastAPI(
|
||||
docs_url=docs_url,
|
||||
)
|
||||
|
||||
|
||||
def handle_internal_http_error(status_code: int = 500, log_error: bool = True):
|
||||
def handler(request: fastapi.Request, exc: Exception):
|
||||
if log_error:
|
||||
logger.exception(f"{request.method} {request.url.path} failed: {exc}")
|
||||
return fastapi.responses.JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"detail": str(exc),
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
app.add_exception_handler(ValueError, handle_internal_http_error(400))
|
||||
app.add_exception_handler(500, handle_internal_http_error(500))
|
||||
app.add_exception_handler(Exception, handle_internal_http_error(500))
|
||||
app.include_router(backend.server.routers.v1.v1_router, tags=["v1"])
|
||||
|
||||
|
||||
@ -91,9 +92,7 @@ class AgentServer(backend.util.service.AppProcess):
|
||||
async def test_execute_graph(
|
||||
graph_id: str, node_input: dict[typing.Any, typing.Any], user_id: str
|
||||
):
|
||||
return await backend.server.routers.v1.execute_graph(
|
||||
graph_id, node_input, user_id
|
||||
)
|
||||
return backend.server.routers.v1.execute_graph(graph_id, node_input, user_id)
|
||||
|
||||
@staticmethod
|
||||
async def test_create_graph(
|
||||
|
@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import Annotated, Any, List
|
||||
from typing import TYPE_CHECKING, Annotated, Any, Sequence
|
||||
|
||||
import pydantic
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
@ -30,6 +30,11 @@ from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import get_block_costs, get_user_credit_model
|
||||
from backend.data.user import get_or_create_user
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler, scheduler
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.webhooks.graph_lifecycle_hooks import (
|
||||
on_graph_activate,
|
||||
on_graph_deactivate,
|
||||
)
|
||||
from backend.server.model import (
|
||||
CreateAPIKeyRequest,
|
||||
CreateAPIKeyResponse,
|
||||
@ -41,6 +46,9 @@ from backend.server.utils import get_user_id
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import Credentials
|
||||
|
||||
|
||||
@thread_cached
|
||||
def execution_manager_client() -> ExecutionManager:
|
||||
@ -54,6 +62,7 @@ def execution_scheduler_client() -> ExecutionScheduler:
|
||||
|
||||
settings = Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
integration_creds_manager = IntegrationCredentialsManager()
|
||||
|
||||
|
||||
_user_credit_model = get_user_credit_model()
|
||||
@ -62,14 +71,10 @@ _user_credit_model = get_user_credit_model()
|
||||
v1_router = APIRouter(prefix="/api")
|
||||
|
||||
|
||||
v1_router.dependencies.append(Depends(auth_middleware))
|
||||
|
||||
|
||||
v1_router.include_router(
|
||||
backend.server.integrations.router.router,
|
||||
prefix="/integrations",
|
||||
tags=["integrations"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
v1_router.include_router(
|
||||
@ -97,13 +102,17 @@ async def get_or_create_user_route(user_data: dict = Depends(auth_middleware)):
|
||||
|
||||
|
||||
@v1_router.get(path="/blocks", tags=["blocks"], dependencies=[Depends(auth_middleware)])
|
||||
def get_graph_blocks() -> list[dict[Any, Any]]:
|
||||
def get_graph_blocks() -> Sequence[dict[Any, Any]]:
|
||||
blocks = [block() for block in backend.data.block.get_blocks().values()]
|
||||
costs = get_block_costs()
|
||||
return [{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks]
|
||||
|
||||
|
||||
@v1_router.post(path="/blocks/{block_id}/execute", tags=["blocks"])
|
||||
@v1_router.post(
|
||||
path="/blocks/{block_id}/execute",
|
||||
tags=["blocks"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
def execute_graph_block(block_id: str, data: BlockInput) -> CompletedBlockOutput:
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
@ -141,7 +150,7 @@ class DeleteGraphResponse(TypedDict):
|
||||
async def get_graphs(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
with_runs: bool = False,
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(
|
||||
include_executions=with_runs, filter_by="active", user_id=user_id
|
||||
)
|
||||
@ -181,13 +190,61 @@ async def get_graph(
|
||||
)
|
||||
async def get_graph_all_versions(
|
||||
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
|
||||
if not graphs:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
return graphs
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = graph_db.make_graph_model(create_graph.graph, user_id)
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
graph = await graph_db.create_graph(graph, user_id=user_id)
|
||||
graph = await on_graph_activate(
|
||||
graph,
|
||||
get_credentials=lambda id: integration_creds_manager.get(user_id, id),
|
||||
)
|
||||
return graph
|
||||
|
||||
|
||||
@v1_router.delete(
|
||||
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
@ -224,33 +281,41 @@ async def update_graph(
|
||||
latest_version_graph = next(
|
||||
v for v in existing_versions if v.version == latest_version_number
|
||||
)
|
||||
current_active_version = next((v for v in existing_versions if v.is_active), None)
|
||||
if latest_version_graph.is_template != graph.is_template:
|
||||
raise HTTPException(
|
||||
400, detail="Changing is_template on an existing graph is forbidden"
|
||||
)
|
||||
graph.is_active = not graph.is_template
|
||||
graph = graph_db.make_graph_model(graph, user_id)
|
||||
graph.reassign_ids(user_id=user_id)
|
||||
|
||||
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
if new_graph_version.is_active:
|
||||
|
||||
def get_credentials(credentials_id: str) -> "Credentials | None":
|
||||
return integration_creds_manager.get(user_id, credentials_id)
|
||||
|
||||
# Handle activation of the new graph first to ensure continuity
|
||||
new_graph_version = await on_graph_activate(
|
||||
new_graph_version,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id, version=new_graph_version.version, user_id=user_id
|
||||
)
|
||||
if current_active_version:
|
||||
# Handle deactivation of the previously active version
|
||||
await on_graph_deactivate(
|
||||
current_active_version,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
|
||||
return new_graph_version
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.put(
|
||||
path="/graphs/{graph_id}/versions/active",
|
||||
tags=["graphs"],
|
||||
@ -262,13 +327,34 @@ async def set_graph_active_version(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
):
|
||||
new_active_version = request_body.active_graph_version
|
||||
if not await graph_db.get_graph(graph_id, new_active_version, user_id=user_id):
|
||||
new_active_graph = await graph_db.get_graph(
|
||||
graph_id, new_active_version, user_id=user_id
|
||||
)
|
||||
if not new_active_graph:
|
||||
raise HTTPException(404, f"Graph #{graph_id} v{new_active_version} not found")
|
||||
|
||||
current_active_graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
|
||||
def get_credentials(credentials_id: str) -> "Credentials | None":
|
||||
return integration_creds_manager.get(user_id, credentials_id)
|
||||
|
||||
# Handle activation of the new graph first to ensure continuity
|
||||
await on_graph_activate(
|
||||
new_active_graph,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id,
|
||||
version=request_body.active_graph_version,
|
||||
version=new_active_version,
|
||||
user_id=user_id,
|
||||
)
|
||||
if current_active_graph and current_active_graph.version != new_active_version:
|
||||
# Handle deactivation of the previously active version
|
||||
await on_graph_deactivate(
|
||||
current_active_graph,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
@ -276,7 +362,7 @@ async def set_graph_active_version(
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def execute_graph(
|
||||
def execute_graph(
|
||||
graph_id: str,
|
||||
node_input: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
@ -298,7 +384,7 @@ async def execute_graph(
|
||||
)
|
||||
async def stop_graph_run(
|
||||
graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
) -> Sequence[execution_db.ExecutionResult]:
|
||||
if not await execution_db.get_graph_execution(graph_exec_id, user_id):
|
||||
raise HTTPException(404, detail=f"Agent execution #{graph_exec_id} not found")
|
||||
|
||||
@ -319,7 +405,7 @@ async def list_graph_runs(
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_version: int | None = None,
|
||||
) -> list[str]:
|
||||
) -> Sequence[str]:
|
||||
graph = await graph_db.get_graph(graph_id, graph_version, user_id=user_id)
|
||||
if not graph:
|
||||
rev = "" if graph_version is None else f" v{graph_version}"
|
||||
@ -339,7 +425,7 @@ async def get_graph_run_node_execution_results(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
) -> Sequence[execution_db.ExecutionResult]:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
@ -378,7 +464,7 @@ async def get_graph_run_status(
|
||||
)
|
||||
async def get_templates(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(filter_by="template", user_id=user_id)
|
||||
|
||||
|
||||
@ -394,40 +480,6 @@ async def get_template(graph_id: str, version: int | None = None) -> graph_db.Gr
|
||||
return graph
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = create_graph.graph
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
return await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/templates",
|
||||
tags=["templates", "graphs"],
|
||||
@ -481,7 +533,7 @@ async def create_schedule(
|
||||
tags=["schedules"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def delete_schedule(
|
||||
def delete_schedule(
|
||||
schedule_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[Any, Any]:
|
||||
@ -494,7 +546,7 @@ async def delete_schedule(
|
||||
tags=["schedules"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_execution_schedules(
|
||||
def get_execution_schedules(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_id: str | None = None,
|
||||
) -> list[scheduler.JobInfo]:
|
||||
@ -534,13 +586,13 @@ async def create_api_key(
|
||||
|
||||
@v1_router.get(
|
||||
"/api-keys",
|
||||
response_model=List[APIKeyWithoutHash],
|
||||
response_model=list[APIKeyWithoutHash],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_api_keys(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> List[APIKeyWithoutHash]:
|
||||
) -> list[APIKeyWithoutHash]:
|
||||
"""List all API keys for the user"""
|
||||
try:
|
||||
return await list_user_api_keys(user_id)
|
||||
|
@ -8,7 +8,7 @@ from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.queue import AsyncRedisExecutionEventBus
|
||||
from backend.data.execution import AsyncRedisExecutionEventBus
|
||||
from backend.data.user import DEFAULT_USER_ID
|
||||
from backend.server.conn_manager import ConnectionManager
|
||||
from backend.server.model import ExecutionSubscription, Methods, WsMessage
|
||||
|
6
autogpt_platform/backend/backend/util/exceptions.py
Normal file
6
autogpt_platform/backend/backend/util/exceptions.py
Normal file
@ -0,0 +1,6 @@
|
||||
class MissingConfigError(Exception):
|
||||
"""The attempted operation requires configuration which is not available"""
|
||||
|
||||
|
||||
class NeedConfirmation(Exception):
|
||||
"""The user must explicitly confirm that they want to proceed"""
|
@ -1,6 +1,7 @@
|
||||
import json
|
||||
from typing import Any, Type, TypeVar, overload
|
||||
|
||||
import jsonschema
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
from .type import type_match
|
||||
@ -30,3 +31,17 @@ def loads(data: str, *args, target_type: Type[T] | None = None, **kwargs) -> Any
|
||||
if target_type:
|
||||
return type_match(parsed, target_type)
|
||||
return parsed
|
||||
|
||||
|
||||
def validate_with_jsonschema(
|
||||
schema: dict[str, Any], data: dict[str, Any]
|
||||
) -> str | None:
|
||||
"""
|
||||
Validate the data against the schema.
|
||||
Returns the validation error message if the data does not match the schema.
|
||||
"""
|
||||
try:
|
||||
jsonschema.validate(data, schema)
|
||||
return None
|
||||
except jsonschema.ValidationError as e:
|
||||
return str(e)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
from functools import wraps
|
||||
from uuid import uuid4
|
||||
|
||||
@ -16,7 +17,7 @@ def _log_prefix(resource_name: str, conn_id: str):
|
||||
This needs to be called on the fly to get the current process ID & service name,
|
||||
not the parent process ID & service name.
|
||||
"""
|
||||
return f"[PID-{os.getpid()}|{get_service_name()}|{resource_name}-{conn_id}]"
|
||||
return f"[PID-{os.getpid()}|THREAD-{threading.get_native_id()}|{get_service_name()}|{resource_name}-{conn_id}]"
|
||||
|
||||
|
||||
def conn_retry(resource_name: str, action_name: str, max_retry: int = 5):
|
||||
@ -25,7 +26,7 @@ def conn_retry(resource_name: str, action_name: str, max_retry: int = 5):
|
||||
def on_retry(retry_state):
|
||||
prefix = _log_prefix(resource_name, conn_id)
|
||||
exception = retry_state.outcome.exception()
|
||||
logger.info(f"{prefix} {action_name} failed: {exception}. Retrying now...")
|
||||
logger.error(f"{prefix} {action_name} failed: {exception}. Retrying now...")
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
|
@ -11,6 +11,7 @@ from types import NoneType, UnionType
|
||||
from typing import (
|
||||
Annotated,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Coroutine,
|
||||
Dict,
|
||||
@ -64,7 +65,13 @@ def expose(func: C) -> C:
|
||||
logger.exception(msg)
|
||||
raise
|
||||
|
||||
# Register custom serializers and deserializers for annotated Pydantic models
|
||||
register_pydantic_serializers(func)
|
||||
|
||||
return pyro.expose(wrapper) # type: ignore
|
||||
|
||||
|
||||
def register_pydantic_serializers(func: Callable):
|
||||
"""Register custom serializers and deserializers for annotated Pydantic models"""
|
||||
for name, annotation in func.__annotations__.items():
|
||||
try:
|
||||
pydantic_types = _pydantic_models_from_type_annotation(annotation)
|
||||
@ -81,8 +88,6 @@ def expose(func: C) -> C:
|
||||
model.__qualname__, _make_custom_deserializer(model)
|
||||
)
|
||||
|
||||
return pyro.expose(wrapper) # type: ignore
|
||||
|
||||
|
||||
def _make_custom_serializer(model: Type[BaseModel]):
|
||||
def custom_class_to_dict(obj):
|
||||
@ -120,7 +125,7 @@ class AppService(AppProcess, ABC):
|
||||
|
||||
@classmethod
|
||||
def get_host(cls) -> str:
|
||||
return os.environ.get(f"{cls.service_name.upper()}_HOST", Config().pyro_host)
|
||||
return os.environ.get(f"{cls.service_name.upper()}_HOST", config.pyro_host)
|
||||
|
||||
def run_service(self) -> None:
|
||||
while True:
|
||||
@ -170,14 +175,13 @@ class AppService(AppProcess, ABC):
|
||||
|
||||
@conn_retry("Pyro", "Starting Pyro Service")
|
||||
def __start_pyro(self):
|
||||
conf = Config()
|
||||
maximum_connection_thread_count = max(
|
||||
Pyro5.config.THREADPOOL_SIZE,
|
||||
conf.num_node_workers * conf.num_graph_workers,
|
||||
config.num_node_workers * config.num_graph_workers,
|
||||
)
|
||||
|
||||
Pyro5.config.THREADPOOL_SIZE = maximum_connection_thread_count # type: ignore
|
||||
daemon = Pyro5.api.Daemon(host=conf.pyro_host, port=self.get_port())
|
||||
daemon = Pyro5.api.Daemon(host=config.pyro_host, port=self.get_port())
|
||||
self.uri = daemon.register(self, objectId=self.service_name)
|
||||
logger.info(f"[{self.service_name}] Connected to Pyro; URI = {self.uri}")
|
||||
daemon.requestLoop()
|
||||
@ -209,7 +213,7 @@ def get_service_client(service_type: Type[AS]) -> AS:
|
||||
class DynamicClient(PyroClient):
|
||||
@conn_retry("Pyro", f"Connecting to [{service_name}]")
|
||||
def __init__(self):
|
||||
host = os.environ.get(f"{service_name.upper()}_HOST", "localhost")
|
||||
host = os.environ.get(f"{service_name.upper()}_HOST", pyro_host)
|
||||
uri = f"PYRO:{service_type.service_name}@{host}:{service_type.get_port()}"
|
||||
logger.debug(f"Connecting to service [{service_name}]. URI = {uri}")
|
||||
self.proxy = Pyro5.api.Proxy(uri)
|
||||
@ -253,6 +257,10 @@ def _pydantic_models_from_type_annotation(annotation) -> Iterator[type[BaseModel
|
||||
key_type, value_type = args
|
||||
yield from _pydantic_models_from_type_annotation(key_type)
|
||||
yield from _pydantic_models_from_type_annotation(value_type)
|
||||
elif origin in (Awaitable, Coroutine):
|
||||
# For coroutines and awaitables, check the return type
|
||||
return_type = args[-1]
|
||||
yield from _pydantic_models_from_type_annotation(return_type)
|
||||
else:
|
||||
annotype = annotation if origin is None else origin
|
||||
|
||||
|
@ -3,7 +3,7 @@ import os
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, Generic, List, Set, Tuple, Type, TypeVar
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr, field_validator
|
||||
from pydantic import BaseModel, Field, PrivateAttr, ValidationInfo, field_validator
|
||||
from pydantic_settings import (
|
||||
BaseSettings,
|
||||
JsonConfigSettingsSource,
|
||||
@ -136,12 +136,32 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="The port for agent server API to run on",
|
||||
)
|
||||
|
||||
platform_base_url: str = Field(
|
||||
default="",
|
||||
description="Must be set so the application knows where it's hosted at. "
|
||||
"This is necessary to make sure webhooks find their way.",
|
||||
)
|
||||
|
||||
frontend_base_url: str = Field(
|
||||
default="http://localhost:3000",
|
||||
default="",
|
||||
description="Can be used to explicitly set the base URL for the frontend. "
|
||||
"This value is then used to generate redirect URLs for OAuth flows.",
|
||||
)
|
||||
|
||||
@field_validator("platform_base_url", "frontend_base_url")
|
||||
@classmethod
|
||||
def validate_platform_base_url(cls, v: str, info: ValidationInfo) -> str:
|
||||
if not v:
|
||||
return v
|
||||
if not v.startswith(("http://", "https://")):
|
||||
raise ValueError(
|
||||
f"{info.field_name} must be a full URL "
|
||||
"including a http:// or https:// schema"
|
||||
)
|
||||
if v.endswith("/"):
|
||||
return v.rstrip("/") # Remove trailing slash
|
||||
return v
|
||||
|
||||
app_env: AppEnvironment = Field(
|
||||
default=AppEnvironment.LOCAL,
|
||||
description="The name of the app environment: local or dev or prod",
|
||||
|
@ -1,9 +1,10 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import Sequence
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block import Block, initialize_blocks
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.execution import ExecutionResult, ExecutionStatus
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.data.user import create_default_user
|
||||
from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler
|
||||
@ -57,7 +58,7 @@ async def wait_execution(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
timeout: int = 20,
|
||||
) -> list:
|
||||
) -> Sequence[ExecutionResult]:
|
||||
async def is_execution_completed():
|
||||
status = await AgentServer().test_get_graph_run_status(
|
||||
graph_id, graph_exec_id, user_id
|
||||
|
@ -0,0 +1,26 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "AgentNode" ADD COLUMN "webhookId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "IntegrationWebhook" (
|
||||
"id" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3),
|
||||
"userId" TEXT NOT NULL,
|
||||
"provider" TEXT NOT NULL,
|
||||
"credentialsId" TEXT NOT NULL,
|
||||
"webhookType" TEXT NOT NULL,
|
||||
"resource" TEXT NOT NULL,
|
||||
"events" TEXT[],
|
||||
"config" JSONB NOT NULL,
|
||||
"secret" TEXT NOT NULL,
|
||||
"providerWebhookId" TEXT NOT NULL,
|
||||
|
||||
CONSTRAINT "IntegrationWebhook_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AgentNode" ADD CONSTRAINT "AgentNode_webhookId_fkey" FOREIGN KEY ("webhookId") REFERENCES "IntegrationWebhook"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "IntegrationWebhook" ADD CONSTRAINT "IntegrationWebhook_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
60
autogpt_platform/backend/poetry.lock
generated
60
autogpt_platform/backend/poetry.lock
generated
@ -297,9 +297,9 @@ expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.9.2"
|
||||
pydantic-settings = "^2.6.1"
|
||||
pyjwt = "^2.8.0"
|
||||
pyjwt = "^2.10.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.9.1"
|
||||
supabase = "^2.10.0"
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
@ -2051,13 +2051,13 @@ testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "poethepoet"
|
||||
version = "0.30.0"
|
||||
version = "0.31.0"
|
||||
description = "A task runner that works well with poetry."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "poethepoet-0.30.0-py3-none-any.whl", hash = "sha256:bf875741407a98da9e96f2f2d0b2c4c34f56d89939a7f53a4b6b3a64b546ec4e"},
|
||||
{file = "poethepoet-0.30.0.tar.gz", hash = "sha256:9f7ccda2d6525616ce989ca8ef973739fd668f50bef0b9d3631421d504d9ae4a"},
|
||||
{file = "poethepoet-0.31.0-py3-none-any.whl", hash = "sha256:5067c5adf9f228b8af1f3df7d57dc319ed8b3f153bf21faf99f7b74494174c3d"},
|
||||
{file = "poethepoet-0.31.0.tar.gz", hash = "sha256:b1cffb120149101b02ffa0583c6e61dfee53953a741df3dabf179836bdef97f5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2501,13 +2501,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.9.0"
|
||||
version = "2.10.0"
|
||||
description = "JSON Web Token implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"},
|
||||
{file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"},
|
||||
{file = "PyJWT-2.10.0-py3-none-any.whl", hash = "sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15"},
|
||||
{file = "pyjwt-2.10.0.tar.gz", hash = "sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@ -2954,29 +2954,29 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.4"
|
||||
version = "0.8.0"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
|
||||
{file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
|
||||
{file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
|
||||
{file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
|
||||
{file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
|
||||
{file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"},
|
||||
{file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"},
|
||||
{file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"},
|
||||
{file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"},
|
||||
{file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3933,4 +3933,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "e33b0da31247495e8704fee5224f7b0cf53859cd0ce8bafb39889548a649f5fb"
|
||||
content-hash = "ac9354e2f206e1a78e6c99d3eab9d538b97c04348270d4776024102526ea7b0b"
|
||||
|
@ -38,6 +38,7 @@ pytest-asyncio = "^0.24.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
redis = "^5.2.0"
|
||||
sentry-sdk = "2.18.0"
|
||||
strenum = "^0.4.9"
|
||||
supabase = "^2.10.0"
|
||||
tenacity = "^9.0.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.32.0" }
|
||||
@ -50,11 +51,11 @@ cryptography = "^43.0.3"
|
||||
sqlalchemy = "^2.0.36"
|
||||
psycopg2-binary = "^2.9.10"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
poethepoet = "^0.30.0"
|
||||
poethepoet = "^0.31.0"
|
||||
httpx = "^0.27.0"
|
||||
pytest-watcher = "^0.4.2"
|
||||
requests = "^2.32.3"
|
||||
ruff = "^0.7.4"
|
||||
ruff = "^0.8.0"
|
||||
pyright = "^1.1.389"
|
||||
isort = "^5.13.2"
|
||||
black = "^24.10.0"
|
||||
|
@ -23,6 +23,7 @@ model User {
|
||||
// Relations
|
||||
AgentGraphs AgentGraph[]
|
||||
AgentGraphExecutions AgentGraphExecution[]
|
||||
IntegrationWebhooks IntegrationWebhook[]
|
||||
AnalyticsDetails AnalyticsDetails[]
|
||||
AnalyticsMetrics AnalyticsMetrics[]
|
||||
UserBlockCredit UserBlockCredit[]
|
||||
@ -74,6 +75,10 @@ model AgentNode {
|
||||
// JSON serialized dict[str, str] containing predefined input values.
|
||||
constantInput String @default("{}")
|
||||
|
||||
// For webhook-triggered blocks: reference to the webhook that triggers the node
|
||||
webhookId String?
|
||||
Webhook IntegrationWebhook? @relation(fields: [webhookId], references: [id])
|
||||
|
||||
// JSON serialized dict[str, str] containing the node metadata.
|
||||
metadata String @default("{}")
|
||||
|
||||
@ -186,6 +191,28 @@ model AgentNodeExecutionInputOutput {
|
||||
@@unique([referencedByInputExecId, referencedByOutputExecId, name])
|
||||
}
|
||||
|
||||
// Webhook that is registered with a provider and propagates to one or more nodes
|
||||
model IntegrationWebhook {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime? @updatedAt
|
||||
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Restrict) // Webhooks must be deregistered before deleting
|
||||
|
||||
provider String // e.g. 'github'
|
||||
credentialsId String // relation to the credentials that the webhook was created with
|
||||
webhookType String // e.g. 'repo'
|
||||
resource String // e.g. 'Significant-Gravitas/AutoGPT'
|
||||
events String[] // e.g. ['created', 'updated']
|
||||
config Json
|
||||
secret String // crypto string, used to verify payload authenticity
|
||||
|
||||
providerWebhookId String // Webhook ID assigned by the provider
|
||||
|
||||
AgentNodes AgentNode[]
|
||||
}
|
||||
|
||||
model AnalyticsDetails {
|
||||
// PK uses gen_random_uuid() to allow the db inserts to happen outside of prisma
|
||||
// typical uuid() inserts are handled by prisma
|
||||
|
@ -26,6 +26,7 @@
|
||||
"@faker-js/faker": "^9.2.0",
|
||||
"@hookform/resolvers": "^3.9.1",
|
||||
"@next/third-parties": "^15.0.3",
|
||||
"@radix-ui/react-alert-dialog": "^1.1.2",
|
||||
"@radix-ui/react-avatar": "^1.1.1",
|
||||
"@radix-ui/react-checkbox": "^1.1.2",
|
||||
"@radix-ui/react-collapsible": "^1.1.1",
|
||||
|
@ -1,16 +1,16 @@
|
||||
"use client";
|
||||
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import FlowEditor from '@/components/Flow';
|
||||
import FlowEditor from "@/components/Flow";
|
||||
|
||||
export default function Home() {
|
||||
const query = useSearchParams();
|
||||
|
||||
return (
|
||||
<FlowEditor
|
||||
className="flow-container"
|
||||
flowID={query.get("flowID") ?? query.get("templateID") ?? undefined}
|
||||
template={!!query.get("templateID")}
|
||||
/>
|
||||
<FlowEditor
|
||||
className="flow-container"
|
||||
flowID={query.get("flowID") ?? query.get("templateID") ?? undefined}
|
||||
template={!!query.get("templateID")}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
@ -74,7 +74,7 @@
|
||||
}
|
||||
|
||||
.agpt-border-input {
|
||||
@apply border-input focus-visible:border-gray-400 focus-visible:outline-none;
|
||||
@apply border border-input focus-visible:border-gray-400 focus-visible:outline-none;
|
||||
}
|
||||
|
||||
.agpt-shadow-input {
|
||||
|
@ -4,7 +4,7 @@ import { useSupabase } from "@/components/SupabaseProvider";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import useUser from "@/hooks/useUser";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useCallback, useContext, useMemo } from "react";
|
||||
import { useCallback, useContext, useMemo, useState } from "react";
|
||||
import { FaSpinner } from "react-icons/fa";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
@ -21,6 +21,16 @@ import {
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { CredentialsProviderName } from "@/lib/autogpt-server-api";
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from "@/components/ui/alert-dialog";
|
||||
|
||||
export default function PrivatePage() {
|
||||
const { user, isLoading, error } = useUser();
|
||||
@ -29,15 +39,40 @@ export default function PrivatePage() {
|
||||
const providers = useContext(CredentialsProvidersContext);
|
||||
const { toast } = useToast();
|
||||
|
||||
const [confirmationDialogState, setConfirmationDialogState] = useState<
|
||||
| {
|
||||
open: true;
|
||||
message: string;
|
||||
onConfirm: () => void;
|
||||
onReject: () => void;
|
||||
}
|
||||
| { open: false }
|
||||
>({ open: false });
|
||||
|
||||
const removeCredentials = useCallback(
|
||||
async (provider: CredentialsProviderName, id: string) => {
|
||||
async (
|
||||
provider: CredentialsProviderName,
|
||||
id: string,
|
||||
force: boolean = false,
|
||||
) => {
|
||||
if (!providers || !providers[provider]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let result;
|
||||
try {
|
||||
const { revoked } = await providers[provider].deleteCredentials(id);
|
||||
if (revoked !== false) {
|
||||
result = await providers[provider].deleteCredentials(id, force);
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: "Something went wrong when deleting credentials: " + error,
|
||||
variant: "destructive",
|
||||
duration: 2000,
|
||||
});
|
||||
setConfirmationDialogState({ open: false });
|
||||
return;
|
||||
}
|
||||
if (result.deleted) {
|
||||
if (result.revoked) {
|
||||
toast({
|
||||
title: "Credentials deleted",
|
||||
duration: 2000,
|
||||
@ -49,11 +84,13 @@ export default function PrivatePage() {
|
||||
duration: 3000,
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: "Something went wrong when deleting credentials: " + error,
|
||||
variant: "destructive",
|
||||
duration: 2000,
|
||||
setConfirmationDialogState({ open: false });
|
||||
} else if (result.need_confirmation) {
|
||||
setConfirmationDialogState({
|
||||
open: true,
|
||||
message: result.message,
|
||||
onConfirm: () => removeCredentials(provider, id, true),
|
||||
onReject: () => setConfirmationDialogState({ open: false }),
|
||||
});
|
||||
}
|
||||
},
|
||||
@ -106,7 +143,9 @@ export default function PrivatePage() {
|
||||
return (
|
||||
<div className="mx-auto max-w-3xl md:py-8">
|
||||
<div className="flex items-center justify-between">
|
||||
<p>Hello {user.email}</p>
|
||||
<p>
|
||||
Hello <span data-testid="profile-email">{user.email}</span>
|
||||
</p>
|
||||
<Button onClick={() => supabase.auth.signOut()}>
|
||||
<LogOutIcon className="mr-1.5 size-4" />
|
||||
Log out
|
||||
@ -158,6 +197,36 @@ export default function PrivatePage() {
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
|
||||
<AlertDialog open={confirmationDialogState.open}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
{confirmationDialogState.open && confirmationDialogState.message}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onReject()
|
||||
}
|
||||
>
|
||||
Cancel
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
variant="destructive"
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onConfirm()
|
||||
}
|
||||
>
|
||||
Continue
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import {
|
||||
} from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { TextRenderer } from "@/components/ui/render";
|
||||
import { history } from "./history";
|
||||
import NodeHandle from "./NodeHandle";
|
||||
import {
|
||||
@ -38,6 +39,7 @@ import { getPrimaryCategoryColor } from "@/lib/utils";
|
||||
import { FlowContext } from "./Flow";
|
||||
import { Badge } from "./ui/badge";
|
||||
import NodeOutputs from "./NodeOutputs";
|
||||
import SchemaTooltip from "./SchemaTooltip";
|
||||
import { IconCoin } from "./ui/icons";
|
||||
import * as Separator from "@radix-ui/react-separator";
|
||||
import * as ContextMenu from "@radix-ui/react-context-menu";
|
||||
@ -166,7 +168,7 @@ export function CustomNode({
|
||||
<div key={key}>
|
||||
<NodeHandle
|
||||
keyName={key}
|
||||
isConnected={isHandleConnected(key)}
|
||||
isConnected={isOutputHandleConnected(key)}
|
||||
schema={schema.properties[key]}
|
||||
side="right"
|
||||
/>
|
||||
@ -205,16 +207,18 @@ export function CustomNode({
|
||||
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
const isHidden = propSchema.hidden;
|
||||
const isConnectable =
|
||||
// No input connection handles on INPUT and WEBHOOK blocks
|
||||
![BlockUIType.INPUT, BlockUIType.WEBHOOK].includes(nodeType) &&
|
||||
// No input connection handles for credentials
|
||||
propKey !== "credentials" &&
|
||||
// No input connection handles on INPUT blocks
|
||||
nodeType !== BlockUIType.INPUT &&
|
||||
// For OUTPUT blocks, only show the 'value' (hides 'name') input connection handle
|
||||
!(nodeType == BlockUIType.OUTPUT && propKey == "name");
|
||||
const isConnected = isInputHandleConnected(propKey);
|
||||
return (
|
||||
!isHidden &&
|
||||
(isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
|
||||
<div key={propKey} data-id={`input-handle-${propKey}`}>
|
||||
{isConnectable ? (
|
||||
@ -227,15 +231,15 @@ export function CustomNode({
|
||||
/>
|
||||
) : (
|
||||
propKey != "credentials" && (
|
||||
<span
|
||||
className="text-m green mb-0 text-gray-900"
|
||||
title={propSchema.description}
|
||||
>
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<div className="flex gap-1">
|
||||
<span className="text-m green mb-0 text-gray-900">
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<SchemaTooltip description={propSchema.description} />
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
{!isConnected && (
|
||||
{isConnected || (
|
||||
<NodeGenericInputField
|
||||
nodeId={id}
|
||||
propKey={getInputPropKey(propKey)}
|
||||
@ -298,21 +302,28 @@ export function CustomNode({
|
||||
setErrors({ ...errors });
|
||||
};
|
||||
|
||||
const isHandleConnected = (key: string) => {
|
||||
const isInputHandleConnected = (key: string) => {
|
||||
return (
|
||||
data.connections &&
|
||||
data.connections.some((conn: any) => {
|
||||
if (typeof conn === "string") {
|
||||
const [source, target] = conn.split(" -> ");
|
||||
return (
|
||||
(target.includes(key) && target.includes(data.title)) ||
|
||||
(source.includes(key) && source.includes(data.title))
|
||||
);
|
||||
const [_source, target] = conn.split(" -> ");
|
||||
return target.includes(key) && target.includes(data.title);
|
||||
}
|
||||
return (
|
||||
(conn.target === id && conn.targetHandle === key) ||
|
||||
(conn.source === id && conn.sourceHandle === key)
|
||||
);
|
||||
return conn.target === id && conn.targetHandle === key;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const isOutputHandleConnected = (key: string) => {
|
||||
return (
|
||||
data.connections &&
|
||||
data.connections.some((conn: any) => {
|
||||
if (typeof conn === "string") {
|
||||
const [source, _target] = conn.split(" -> ");
|
||||
return source.includes(key) && source.includes(data.title);
|
||||
}
|
||||
return conn.source === id && conn.sourceHandle === key;
|
||||
})
|
||||
);
|
||||
};
|
||||
@ -554,9 +565,13 @@ export function CustomNode({
|
||||
<div className="flex w-full flex-col">
|
||||
<div className="flex flex-row items-center justify-between">
|
||||
<div className="font-roboto flex items-center px-3 text-lg font-semibold">
|
||||
{beautifyString(
|
||||
data.blockType?.replace(/Block$/, "") || data.title,
|
||||
)}
|
||||
<TextRenderer
|
||||
value={beautifyString(
|
||||
data.blockType?.replace(/Block$/, "") || data.title,
|
||||
)}
|
||||
truncateLengthLimit={80}
|
||||
/>
|
||||
|
||||
<div className="px-2 text-xs text-gray-500">
|
||||
#{id.split("-")[0]}
|
||||
</div>
|
||||
|
@ -7,7 +7,6 @@ import getServerUser from "@/hooks/getServerUser";
|
||||
import ProfileDropdown from "./ProfileDropdown";
|
||||
import { IconCircleUser, IconMenu } from "@/components/ui/icons";
|
||||
import CreditButton from "@/components/nav/CreditButton";
|
||||
|
||||
import { NavBarButtons } from "./nav/NavBarButtons";
|
||||
|
||||
export async function NavBar() {
|
||||
@ -17,7 +16,7 @@ export async function NavBar() {
|
||||
);
|
||||
const { user } = await getServerUser();
|
||||
|
||||
return (
|
||||
return user ? (
|
||||
<header className="sticky top-0 z-50 mx-4 flex h-16 select-none items-center gap-4 border border-gray-300 bg-background p-3 md:rounded-b-2xl md:px-6 md:shadow">
|
||||
<div className="flex flex-1 items-center gap-4">
|
||||
<Sheet>
|
||||
@ -67,5 +66,19 @@ export async function NavBar() {
|
||||
{isAvailable && user && <ProfileDropdown />}
|
||||
</div>
|
||||
</header>
|
||||
) : (
|
||||
<nav className="flex w-full items-center p-2 pt-8">
|
||||
<div className="flex h-10 w-20 flex-1 flex-row items-center justify-center gap-2">
|
||||
<a href="https://agpt.co/">
|
||||
<Image
|
||||
src="/AUTOgpt_Logo_dark.png"
|
||||
alt="AutoGPT Logo"
|
||||
width={100}
|
||||
height={40}
|
||||
priority
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
);
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
const typeName: Record<string, string> = {
|
||||
string: "text",
|
||||
number: "number",
|
||||
integer: "integer",
|
||||
boolean: "true/false",
|
||||
object: "object",
|
||||
array: "list",
|
||||
|
@ -3,6 +3,7 @@ import { Card, CardContent, CardHeader } from "@/components/ui/card";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { TextRenderer } from "@/components/ui/render";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { beautifyString } from "@/lib/utils";
|
||||
import {
|
||||
@ -180,7 +181,7 @@ export const BlocksControl: React.FC<BlocksControlProps> = ({
|
||||
</CardHeader>
|
||||
<CardContent className="overflow-scroll border-t p-0">
|
||||
<ScrollArea
|
||||
className="h-[60vh] w-fit w-full"
|
||||
className="h-[60vh]"
|
||||
data-id="blocks-control-scroll-area"
|
||||
>
|
||||
{getFilteredBlockList().map((block) => (
|
||||
@ -202,13 +203,19 @@ export const BlocksControl: React.FC<BlocksControlProps> = ({
|
||||
className="block truncate pb-1 text-sm font-semibold"
|
||||
data-id={`block-name-${block.id}`}
|
||||
>
|
||||
{beautifyString(block.name).replace(/ Block$/, "")}
|
||||
<TextRenderer
|
||||
value={beautifyString(block.name).replace(
|
||||
/ Block$/,
|
||||
"",
|
||||
)}
|
||||
truncateLengthLimit={45}
|
||||
/>
|
||||
</span>
|
||||
<span className="block break-words text-xs font-normal text-gray-500">
|
||||
{/* Cap description at 100 characters max */}
|
||||
{block.description?.length > 100
|
||||
? block.description.slice(0, 100) + "..."
|
||||
: block.description}
|
||||
<span className="block break-all text-xs font-normal text-gray-500">
|
||||
<TextRenderer
|
||||
value={block.description}
|
||||
truncateLengthLimit={165}
|
||||
/>
|
||||
</span>
|
||||
</div>
|
||||
<div
|
||||
|
@ -115,6 +115,7 @@ export const SaveControl = ({
|
||||
value={agentName}
|
||||
onChange={(e) => onNameChange(e.target.value)}
|
||||
data-id="save-control-name-input"
|
||||
maxLength={100}
|
||||
/>
|
||||
<Label htmlFor="description">Description</Label>
|
||||
<Input
|
||||
@ -124,6 +125,7 @@ export const SaveControl = ({
|
||||
value={agentDescription}
|
||||
onChange={(e) => onDescriptionChange(e.target.value)}
|
||||
data-id="save-control-description-input"
|
||||
maxLength={500}
|
||||
/>
|
||||
{agentMeta?.version && (
|
||||
<>
|
||||
|
@ -11,22 +11,6 @@ code {
|
||||
monospace;
|
||||
}
|
||||
|
||||
input,
|
||||
textarea {
|
||||
background-color: #ffffff;
|
||||
color: #000000;
|
||||
border: 1px solid #555;
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
width: calc(100% - 18px);
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
input::placeholder,
|
||||
textarea::placeholder {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.modal {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
|
@ -3,6 +3,7 @@ import { cn } from "@/lib/utils";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import SchemaTooltip from "@/components/SchemaTooltip";
|
||||
import useCredentials from "@/hooks/useCredentials";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
@ -235,12 +236,10 @@ export const CredentialsInput: FC<{
|
||||
if (savedApiKeys.length === 0 && savedOAuthCredentials.length === 0) {
|
||||
return (
|
||||
<>
|
||||
<span
|
||||
className="text-m green mb-0 text-gray-900"
|
||||
title={schema.description}
|
||||
>
|
||||
Credentials
|
||||
</span>
|
||||
<div className="mb-2 flex gap-1">
|
||||
<span className="text-m green text-gray-900">Credentials</span>
|
||||
<SchemaTooltip description={schema.description} />
|
||||
</div>
|
||||
<div className={cn("flex flex-row space-x-2", className)}>
|
||||
{supportsOAuth2 && (
|
||||
<Button onClick={handleOAuthLogin}>
|
||||
|
@ -1,5 +1,6 @@
|
||||
import AutoGPTServerAPI, {
|
||||
APIKeyCredentials,
|
||||
CredentialsDeleteNeedConfirmationResponse,
|
||||
CredentialsDeleteResponse,
|
||||
CredentialsMetaResponse,
|
||||
CredentialsProviderName,
|
||||
@ -59,7 +60,12 @@ export type CredentialsProviderData = {
|
||||
createAPIKeyCredentials: (
|
||||
credentials: APIKeyCredentialsCreatable,
|
||||
) => Promise<CredentialsMetaResponse>;
|
||||
deleteCredentials: (id: string) => Promise<CredentialsDeleteResponse>;
|
||||
deleteCredentials: (
|
||||
id: string,
|
||||
force?: boolean,
|
||||
) => Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
>;
|
||||
};
|
||||
|
||||
export type CredentialsProvidersContextType = {
|
||||
@ -144,8 +150,14 @@ export default function CredentialsProvider({
|
||||
async (
|
||||
provider: CredentialsProviderName,
|
||||
id: string,
|
||||
): Promise<CredentialsDeleteResponse> => {
|
||||
const result = await api.deleteCredentials(provider, id);
|
||||
force: boolean = false,
|
||||
): Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
> => {
|
||||
const result = await api.deleteCredentials(provider, id, force);
|
||||
if (!result.deleted) {
|
||||
return result;
|
||||
}
|
||||
setProviders((prev) => {
|
||||
if (!prev || !prev[provider]) return prev;
|
||||
|
||||
@ -172,43 +184,64 @@ export default function CredentialsProvider({
|
||||
api.isAuthenticated().then((isAuthenticated) => {
|
||||
if (!isAuthenticated) return;
|
||||
|
||||
CREDENTIALS_PROVIDER_NAMES.forEach(
|
||||
(provider: CredentialsProviderName) => {
|
||||
api.listCredentials(provider).then((response) => {
|
||||
const { oauthCreds, apiKeys } = response.reduce<{
|
||||
api.listCredentials().then((response) => {
|
||||
const credentialsByProvider = response.reduce(
|
||||
(acc, cred) => {
|
||||
if (!acc[cred.provider]) {
|
||||
acc[cred.provider] = { oauthCreds: [], apiKeys: [] };
|
||||
}
|
||||
if (cred.type === "oauth2") {
|
||||
acc[cred.provider].oauthCreds.push(cred);
|
||||
} else if (cred.type === "api_key") {
|
||||
acc[cred.provider].apiKeys.push(cred);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<
|
||||
CredentialsProviderName,
|
||||
{
|
||||
oauthCreds: CredentialsMetaResponse[];
|
||||
apiKeys: CredentialsMetaResponse[];
|
||||
}>(
|
||||
(acc, cred) => {
|
||||
if (cred.type === "oauth2") {
|
||||
acc.oauthCreds.push(cred);
|
||||
} else if (cred.type === "api_key") {
|
||||
acc.apiKeys.push(cred);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{ oauthCreds: [], apiKeys: [] },
|
||||
);
|
||||
}
|
||||
>,
|
||||
);
|
||||
|
||||
setProviders((prev) => ({
|
||||
...prev,
|
||||
setProviders((prev) => ({
|
||||
...prev,
|
||||
...Object.entries(credentialsByProvider).reduce(
|
||||
(acc, [provider, { apiKeys, oauthCreds }]) => ({
|
||||
...acc,
|
||||
[provider]: {
|
||||
provider,
|
||||
providerName: providerDisplayNames[provider],
|
||||
providerName:
|
||||
providerDisplayNames[provider as CredentialsProviderName],
|
||||
savedApiKeys: apiKeys,
|
||||
savedOAuthCredentials: oauthCreds,
|
||||
oAuthCallback: (code: string, state_token: string) =>
|
||||
oAuthCallback(provider, code, state_token),
|
||||
oAuthCallback(
|
||||
provider as CredentialsProviderName,
|
||||
code,
|
||||
state_token,
|
||||
),
|
||||
createAPIKeyCredentials: (
|
||||
credentials: APIKeyCredentialsCreatable,
|
||||
) => createAPIKeyCredentials(provider, credentials),
|
||||
deleteCredentials: (id: string) =>
|
||||
deleteCredentials(provider, id),
|
||||
) =>
|
||||
createAPIKeyCredentials(
|
||||
provider as CredentialsProviderName,
|
||||
credentials,
|
||||
),
|
||||
deleteCredentials: (id: string, force: boolean = false) =>
|
||||
deleteCredentials(
|
||||
provider as CredentialsProviderName,
|
||||
id,
|
||||
force,
|
||||
),
|
||||
},
|
||||
}));
|
||||
});
|
||||
},
|
||||
);
|
||||
}),
|
||||
{},
|
||||
),
|
||||
}));
|
||||
});
|
||||
});
|
||||
}, [api, createAPIKeyCredentials, deleteCredentials, oAuthCallback]);
|
||||
|
||||
|
@ -2,6 +2,7 @@ import AutoGPTServerAPI, { GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import React, { useEffect, useMemo, useState } from "react";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { TextRenderer } from "@/components/ui/render";
|
||||
import Link from "next/link";
|
||||
import {
|
||||
Dialog,
|
||||
@ -94,7 +95,10 @@ export const AgentFlowList = ({
|
||||
});
|
||||
}}
|
||||
>
|
||||
{template.name}
|
||||
<TextRenderer
|
||||
value={template.name}
|
||||
truncateLengthLimit={30}
|
||||
/>
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
</>
|
||||
@ -162,7 +166,9 @@ export const AgentFlowList = ({
|
||||
onClick={() => onSelectFlow(flow)}
|
||||
data-state={selectedFlow?.id == flow.id ? "selected" : null}
|
||||
>
|
||||
<TableCell>{flow.name}</TableCell>
|
||||
<TableCell>
|
||||
<TextRenderer value={flow.name} truncateLengthLimit={30} />
|
||||
</TableCell>
|
||||
{/* <TableCell><FlowStatusBadge status={flow.status ?? "active"} /></TableCell> */}
|
||||
{/* <TableCell>
|
||||
{flow.updatedAt ?? "???"}
|
||||
|
@ -71,7 +71,7 @@ export const FlowRunInfo: React.FC<
|
||||
result: result.output_data?.output || undefined,
|
||||
})),
|
||||
);
|
||||
}, [api, flow.id, flow.version, flowRun.id]);
|
||||
}, [api, flow.id, flowRun.id]);
|
||||
|
||||
// Fetch graph and execution data
|
||||
useEffect(() => {
|
||||
@ -80,7 +80,7 @@ export const FlowRunInfo: React.FC<
|
||||
}
|
||||
|
||||
fetchBlockResults();
|
||||
}, [isOutputOpen, blockOutputs]);
|
||||
}, [isOutputOpen, blockOutputs, fetchBlockResults]);
|
||||
|
||||
if (flowRun.graphID != flow.id) {
|
||||
throw new Error(
|
||||
@ -90,7 +90,7 @@ export const FlowRunInfo: React.FC<
|
||||
|
||||
const handleStopRun = useCallback(() => {
|
||||
api.stopGraphExecution(flow.id, flowRun.id);
|
||||
}, [flow.id, flowRun.id]);
|
||||
}, [api, flow.id, flowRun.id]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
@ -12,6 +12,7 @@ import {
|
||||
} from "@/components/ui/table";
|
||||
import moment from "moment/moment";
|
||||
import { FlowRunStatusBadge } from "@/components/monitor/FlowRunStatusBadge";
|
||||
import { TextRenderer } from "../ui/render";
|
||||
|
||||
export const FlowRunsList: React.FC<{
|
||||
flows: GraphMeta[];
|
||||
@ -43,7 +44,10 @@ export const FlowRunsList: React.FC<{
|
||||
data-state={selectedRun?.id == run.id ? "selected" : null}
|
||||
>
|
||||
<TableCell>
|
||||
{flows.find((f) => f.id == run.graphID)!.name}
|
||||
<TextRenderer
|
||||
value={flows.find((f) => f.id == run.graphID)!.name}
|
||||
truncateLengthLimit={30}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>{moment(run.startTime).format("HH:mm")}</TableCell>
|
||||
<TableCell>
|
||||
|
@ -30,6 +30,7 @@ import {
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { TextRenderer } from "../ui/render";
|
||||
|
||||
interface SchedulesTableProps {
|
||||
schedules: Schedule[];
|
||||
@ -111,7 +112,7 @@ export const SchedulesTable = ({
|
||||
<SelectContent>
|
||||
{agents.map((agent, i) => (
|
||||
<SelectItem key={agent.id + i} value={agent.id}>
|
||||
{agent.name}
|
||||
<TextRenderer value={agent.name} truncateLengthLimit={30} />
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
|
@ -20,6 +20,14 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "./ui/select";
|
||||
import {
|
||||
MultiSelector,
|
||||
MultiSelectorContent,
|
||||
MultiSelectorInput,
|
||||
MultiSelectorItem,
|
||||
MultiSelectorList,
|
||||
MultiSelectorTrigger,
|
||||
} from "./ui/multiselect";
|
||||
import { LocalValuedInput } from "./ui/input";
|
||||
import NodeHandle from "./NodeHandle";
|
||||
import { ConnectionData } from "./CustomNode";
|
||||
@ -133,6 +141,37 @@ export const NodeGenericInputField: FC<{
|
||||
}
|
||||
|
||||
if ("properties" in propSchema) {
|
||||
// Render a multi-select for all-boolean sub-schemas with more than 3 properties
|
||||
if (
|
||||
Object.values(propSchema.properties).every(
|
||||
(subSchema) => "type" in subSchema && subSchema.type == "boolean",
|
||||
) &&
|
||||
Object.keys(propSchema.properties).length >= 3
|
||||
) {
|
||||
const options = Object.keys(propSchema.properties);
|
||||
const selectedKeys = Object.entries(currentValue || {})
|
||||
.filter(([_, v]) => v)
|
||||
.map(([k, _]) => k);
|
||||
return (
|
||||
<NodeMultiSelectInput
|
||||
selfKey={propKey}
|
||||
schema={propSchema}
|
||||
selection={selectedKeys}
|
||||
error={errors[propKey]}
|
||||
className={className}
|
||||
displayName={displayName}
|
||||
handleInputChange={(key, selection) => {
|
||||
handleInputChange(
|
||||
key,
|
||||
Object.fromEntries(
|
||||
options.map((option) => [option, selection.includes(option)]),
|
||||
),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<NodeObjectInputTree
|
||||
nodeId={nodeId}
|
||||
@ -595,6 +634,56 @@ const NodeArrayInput: FC<{
|
||||
);
|
||||
};
|
||||
|
||||
const NodeMultiSelectInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIOObjectSubSchema; // TODO: Support BlockIOArraySubSchema
|
||||
selection?: string[];
|
||||
error?: string;
|
||||
className?: string;
|
||||
displayName?: string;
|
||||
handleInputChange: NodeObjectInputTreeProps["handleInputChange"];
|
||||
}> = ({
|
||||
selfKey,
|
||||
schema,
|
||||
selection = [],
|
||||
error,
|
||||
className,
|
||||
displayName,
|
||||
handleInputChange,
|
||||
}) => {
|
||||
const options = Object.keys(schema.properties);
|
||||
|
||||
return (
|
||||
<div className={cn("flex flex-col", className)}>
|
||||
<MultiSelector
|
||||
className="nodrag"
|
||||
values={selection}
|
||||
onValuesChange={(v) => handleInputChange(selfKey, v)}
|
||||
>
|
||||
<MultiSelectorTrigger>
|
||||
<MultiSelectorInput
|
||||
placeholder={
|
||||
schema.placeholder ?? `Select ${displayName || schema.title}...`
|
||||
}
|
||||
/>
|
||||
</MultiSelectorTrigger>
|
||||
<MultiSelectorContent className="nowheel">
|
||||
<MultiSelectorList>
|
||||
{options
|
||||
.map((key) => ({ ...schema.properties[key], key }))
|
||||
.map(({ key, title, description }) => (
|
||||
<MultiSelectorItem key={key} value={key} title={description}>
|
||||
{title ?? key}
|
||||
</MultiSelectorItem>
|
||||
))}
|
||||
</MultiSelectorList>
|
||||
</MultiSelectorContent>
|
||||
</MultiSelector>
|
||||
{error && <span className="error-message">{error}</span>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const NodeStringInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIOStringSubSchema;
|
||||
@ -783,7 +872,7 @@ const NodeBooleanInput: FC<{
|
||||
defaultChecked={value}
|
||||
onCheckedChange={(v) => handleInputChange(selfKey, v)}
|
||||
/>
|
||||
<span className="ml-3">{displayName}</span>
|
||||
{displayName && <span className="ml-3">{displayName}</span>}
|
||||
</div>
|
||||
{error && <span className="error-message">{error}</span>}
|
||||
</div>
|
||||
|
143
autogpt_platform/frontend/src/components/ui/alert-dialog.tsx
Normal file
143
autogpt_platform/frontend/src/components/ui/alert-dialog.tsx
Normal file
@ -0,0 +1,143 @@
|
||||
"use client";
|
||||
|
||||
import * as React from "react";
|
||||
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { buttonVariants } from "@/components/ui/button";
|
||||
import { VariantProps } from "class-variance-authority";
|
||||
|
||||
const AlertDialog = AlertDialogPrimitive.Root;
|
||||
|
||||
const AlertDialogTrigger = AlertDialogPrimitive.Trigger;
|
||||
|
||||
const AlertDialogPortal = AlertDialogPrimitive.Portal;
|
||||
|
||||
const AlertDialogOverlay = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Overlay>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Overlay>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Overlay
|
||||
className={cn(
|
||||
"fixed inset-0 z-50 bg-black/80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
ref={ref}
|
||||
/>
|
||||
));
|
||||
AlertDialogOverlay.displayName = AlertDialogPrimitive.Overlay.displayName;
|
||||
|
||||
const AlertDialogContent = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Content>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPortal>
|
||||
<AlertDialogOverlay />
|
||||
<AlertDialogPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border border-neutral-200 bg-white p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] dark:border-neutral-800 dark:bg-neutral-950 sm:rounded-lg",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</AlertDialogPortal>
|
||||
));
|
||||
AlertDialogContent.displayName = AlertDialogPrimitive.Content.displayName;
|
||||
|
||||
const AlertDialogHeader = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col space-y-2 text-center sm:text-left",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
AlertDialogHeader.displayName = "AlertDialogHeader";
|
||||
|
||||
const AlertDialogFooter = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
AlertDialogFooter.displayName = "AlertDialogFooter";
|
||||
|
||||
const AlertDialogTitle = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Title>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Title>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Title
|
||||
ref={ref}
|
||||
className={cn("text-lg font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogTitle.displayName = AlertDialogPrimitive.Title.displayName;
|
||||
|
||||
const AlertDialogDescription = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Description>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Description>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Description
|
||||
ref={ref}
|
||||
className={cn("text-sm text-neutral-500 dark:text-neutral-400", className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogDescription.displayName =
|
||||
AlertDialogPrimitive.Description.displayName;
|
||||
|
||||
const AlertDialogAction = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Action>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Action> &
|
||||
VariantProps<typeof buttonVariants>
|
||||
>(({ className, variant, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Action
|
||||
ref={ref}
|
||||
className={cn(buttonVariants({ variant: variant }), className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogAction.displayName = AlertDialogPrimitive.Action.displayName;
|
||||
|
||||
const AlertDialogCancel = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Cancel>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Cancel>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Cancel
|
||||
ref={ref}
|
||||
className={cn(
|
||||
buttonVariants({ variant: "outline" }),
|
||||
"mt-2 sm:mt-0",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogCancel.displayName = AlertDialogPrimitive.Cancel.displayName;
|
||||
|
||||
export {
|
||||
AlertDialog,
|
||||
AlertDialogPortal,
|
||||
AlertDialogOverlay,
|
||||
AlertDialogTrigger,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogFooter,
|
||||
AlertDialogTitle,
|
||||
AlertDialogDescription,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
};
|
@ -144,7 +144,7 @@ const MultiSelector = forwardRef<HTMLDivElement, MultiSelectorProps>(
|
||||
ref={ref}
|
||||
onKeyDown={handleKeyDown}
|
||||
className={cn(
|
||||
"flex flex-col space-y-2 overflow-visible bg-transparent",
|
||||
"flex flex-col overflow-visible bg-transparent",
|
||||
className,
|
||||
)}
|
||||
dir={dir}
|
||||
@ -174,7 +174,7 @@ const MultiSelectorTrigger = forwardRef<
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex flex-wrap gap-1 rounded-lg border border-muted bg-background p-1 py-2",
|
||||
"agpt-border-input agpt-shadow-input flex flex-wrap gap-1 rounded-lg bg-background px-3 py-2 pl-1 text-sm",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
@ -183,7 +183,7 @@ const MultiSelectorTrigger = forwardRef<
|
||||
<Badge
|
||||
key={item}
|
||||
className={cn(
|
||||
"flex items-center gap-1 rounded-xl px-1",
|
||||
"flex items-center gap-1 rounded-xl px-1 pl-2",
|
||||
activeIndex === index && "ring-2 ring-muted-foreground",
|
||||
)}
|
||||
variant={"secondary"}
|
||||
@ -237,10 +237,10 @@ MultiSelectorInput.displayName = "MultiSelectorInput";
|
||||
const MultiSelectorContent = forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ children }, ref) => {
|
||||
>(({ children, className }, ref) => {
|
||||
const { open } = useMultiSelect();
|
||||
return (
|
||||
<div ref={ref} className="relative">
|
||||
<div ref={ref} className={cn("relative mt-2", className)}>
|
||||
{open && children}
|
||||
</div>
|
||||
);
|
||||
|
@ -76,15 +76,14 @@ const AudioRenderer: React.FC<{ audioUrl: string }> = ({ audioUrl }) => (
|
||||
</div>
|
||||
);
|
||||
|
||||
const TextRenderer: React.FC<{ value: any; truncateLongData?: boolean }> = ({
|
||||
value,
|
||||
truncateLongData,
|
||||
}) => {
|
||||
const maxChars = 100;
|
||||
export const TextRenderer: React.FC<{
|
||||
value: any;
|
||||
truncateLengthLimit?: number;
|
||||
}> = ({ value, truncateLengthLimit }) => {
|
||||
const text =
|
||||
typeof value === "object" ? JSON.stringify(value, null, 2) : String(value);
|
||||
return truncateLongData && text.length > maxChars
|
||||
? text.slice(0, maxChars) + "..."
|
||||
return truncateLengthLimit && text.length > truncateLengthLimit
|
||||
? text.slice(0, truncateLengthLimit) + "..."
|
||||
: text;
|
||||
};
|
||||
|
||||
@ -101,5 +100,10 @@ export const ContentRenderer: React.FC<{
|
||||
return <AudioRenderer audioUrl={value} />;
|
||||
}
|
||||
}
|
||||
return <TextRenderer value={value} truncateLongData={truncateLongData} />;
|
||||
return (
|
||||
<TextRenderer
|
||||
value={value}
|
||||
truncateLengthLimit={truncateLongData ? 100 : undefined}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
@ -14,7 +14,10 @@ const ScrollArea = React.forwardRef<
|
||||
className={cn("relative overflow-hidden", className)}
|
||||
{...props}
|
||||
>
|
||||
<ScrollAreaPrimitive.Viewport className="h-full w-full rounded-[inherit]">
|
||||
<ScrollAreaPrimitive.Viewport
|
||||
className="h-full w-full rounded-[inherit]"
|
||||
style={{ overflow: "scroll" }}
|
||||
>
|
||||
{children}
|
||||
</ScrollAreaPrimitive.Viewport>
|
||||
<ScrollBar />
|
||||
|
@ -1,24 +1,24 @@
|
||||
import { SupabaseClient } from "@supabase/supabase-js";
|
||||
import {
|
||||
AnalyticsMetrics,
|
||||
AnalyticsDetails,
|
||||
AnalyticsMetrics,
|
||||
APIKeyCredentials,
|
||||
Block,
|
||||
CredentialsDeleteNeedConfirmationResponse,
|
||||
CredentialsDeleteResponse,
|
||||
CredentialsMetaResponse,
|
||||
ExecutionMeta,
|
||||
Graph,
|
||||
GraphCreatable,
|
||||
GraphUpdateable,
|
||||
GraphExecuteResponse,
|
||||
GraphMeta,
|
||||
GraphMetaWithRuns,
|
||||
GraphExecuteResponse,
|
||||
ExecutionMeta,
|
||||
GraphUpdateable,
|
||||
NodeExecutionResult,
|
||||
OAuth2Credentials,
|
||||
User,
|
||||
ScheduleCreatable,
|
||||
ScheduleUpdateable,
|
||||
Schedule,
|
||||
ScheduleCreatable,
|
||||
User,
|
||||
} from "./types";
|
||||
|
||||
export default class BaseAutoGPTServerAPI {
|
||||
@ -212,8 +212,12 @@ export default class BaseAutoGPTServerAPI {
|
||||
);
|
||||
}
|
||||
|
||||
listCredentials(provider: string): Promise<CredentialsMetaResponse[]> {
|
||||
return this._get(`/integrations/${provider}/credentials`);
|
||||
listCredentials(provider?: string): Promise<CredentialsMetaResponse[]> {
|
||||
return this._get(
|
||||
provider
|
||||
? `/integrations/${provider}/credentials`
|
||||
: "/integrations/credentials",
|
||||
);
|
||||
}
|
||||
|
||||
getCredentials(
|
||||
@ -226,10 +230,14 @@ export default class BaseAutoGPTServerAPI {
|
||||
deleteCredentials(
|
||||
provider: string,
|
||||
id: string,
|
||||
): Promise<CredentialsDeleteResponse> {
|
||||
force: boolean = true,
|
||||
): Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
> {
|
||||
return this._request(
|
||||
"DELETE",
|
||||
`/integrations/${provider}/credentials/${id}`,
|
||||
force ? { force: true } : undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@ -271,13 +279,14 @@ export default class BaseAutoGPTServerAPI {
|
||||
?.access_token || "";
|
||||
|
||||
let url = this.baseUrl + path;
|
||||
if (method === "GET" && payload) {
|
||||
const payloadAsQuery = ["GET", "DELETE"].includes(method);
|
||||
if (payloadAsQuery && payload) {
|
||||
// For GET requests, use payload as query
|
||||
const queryParams = new URLSearchParams(payload);
|
||||
url += `?${queryParams.toString()}`;
|
||||
}
|
||||
|
||||
const hasRequestBody = method !== "GET" && payload !== undefined;
|
||||
const hasRequestBody = !payloadAsQuery && payload !== undefined;
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
|
@ -56,6 +56,7 @@ export type BlockIOSubSchemaMeta = {
|
||||
description?: string;
|
||||
placeholder?: string;
|
||||
advanced?: boolean;
|
||||
hidden?: boolean;
|
||||
};
|
||||
|
||||
export type BlockIOObjectSubSchema = BlockIOSubSchemaMeta & {
|
||||
@ -259,6 +260,7 @@ export type NodeExecutionResult = {
|
||||
/* Mirror of backend/server/integrations/router.py:CredentialsMetaResponse */
|
||||
export type CredentialsMetaResponse = {
|
||||
id: string;
|
||||
provider: CredentialsProviderName;
|
||||
type: CredentialsType;
|
||||
title?: string;
|
||||
scopes?: Array<string>;
|
||||
@ -271,6 +273,13 @@ export type CredentialsDeleteResponse = {
|
||||
revoked: boolean | null;
|
||||
};
|
||||
|
||||
/* Mirror of backend/server/integrations/router.py:CredentialsDeletionNeedsConfirmationResponse */
|
||||
export type CredentialsDeleteNeedConfirmationResponse = {
|
||||
deleted: false;
|
||||
need_confirmation: true;
|
||||
message: string;
|
||||
};
|
||||
|
||||
/* Mirror of backend/data/model.py:CredentialsMetaInput */
|
||||
export type CredentialsMetaInput = {
|
||||
id: string;
|
||||
@ -284,7 +293,7 @@ type BaseCredentials = {
|
||||
id: string;
|
||||
type: CredentialsType;
|
||||
title?: string;
|
||||
provider: string;
|
||||
provider: CredentialsProviderName;
|
||||
};
|
||||
|
||||
/* Mirror of autogpt_libs/supabase_integration_credentials_store/types.py:OAuth2Credentials */
|
||||
@ -317,6 +326,7 @@ export enum BlockUIType {
|
||||
INPUT = "Input",
|
||||
OUTPUT = "Output",
|
||||
NOTE = "Note",
|
||||
WEBHOOK = "Webhook",
|
||||
AGENT = "Agent",
|
||||
}
|
||||
|
||||
|
@ -41,6 +41,7 @@ export function getTypeTextColor(type: string | null): string {
|
||||
{
|
||||
string: "text-green-500",
|
||||
number: "text-blue-500",
|
||||
integer: "text-blue-500",
|
||||
boolean: "text-yellow-500",
|
||||
object: "text-purple-500",
|
||||
array: "text-indigo-500",
|
||||
@ -58,6 +59,7 @@ export function getTypeBgColor(type: string | null): string {
|
||||
{
|
||||
string: "border-green-500",
|
||||
number: "border-blue-500",
|
||||
integer: "border-blue-500",
|
||||
boolean: "border-yellow-500",
|
||||
object: "border-purple-500",
|
||||
array: "border-indigo-500",
|
||||
@ -74,6 +76,7 @@ export function getTypeColor(type: string | null): string {
|
||||
{
|
||||
string: "#22c55e",
|
||||
number: "#3b82f6",
|
||||
integer: "#3b82f6",
|
||||
boolean: "#eab308",
|
||||
object: "#a855f7",
|
||||
array: "#6366f1",
|
||||
|
@ -1,13 +1,12 @@
|
||||
import { test, expect } from "./fixtures";
|
||||
// auth.spec.ts
|
||||
import { test } from "./fixtures";
|
||||
|
||||
test.describe("Authentication", () => {
|
||||
test("user can login successfully", async ({ page, loginPage, testUser }) => {
|
||||
await page.goto("/login"); // Make sure we're on the login page
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
// expect to be redirected to the home page
|
||||
await expect(page).toHaveURL("/");
|
||||
// expect to see the Monitor text
|
||||
await expect(page.getByText("Monitor")).toBeVisible();
|
||||
await test.expect(page).toHaveURL("/");
|
||||
await test.expect(page.getByText("Monitor")).toBeVisible();
|
||||
});
|
||||
|
||||
test("user can logout successfully", async ({
|
||||
@ -15,17 +14,17 @@ test.describe("Authentication", () => {
|
||||
loginPage,
|
||||
testUser,
|
||||
}) => {
|
||||
await page.goto("/login"); // Make sure we're on the login page
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
|
||||
// Expect to be on the home page
|
||||
await expect(page).toHaveURL("/");
|
||||
await test.expect(page).toHaveURL("/");
|
||||
|
||||
// Click on the user menu
|
||||
await page.getByRole("button", { name: "CN" }).click();
|
||||
// Click on the logout menu item
|
||||
await page.getByRole("menuitem", { name: "Log out" }).click();
|
||||
// Expect to be redirected to the login page
|
||||
await expect(page).toHaveURL("/login");
|
||||
|
||||
await test.expect(page).toHaveURL("/login");
|
||||
});
|
||||
|
||||
test("login in, then out, then in again", async ({
|
||||
@ -33,14 +32,14 @@ test.describe("Authentication", () => {
|
||||
loginPage,
|
||||
testUser,
|
||||
}) => {
|
||||
await page.goto("/login"); // Make sure we're on the login page
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await page.goto("/");
|
||||
await page.getByRole("button", { name: "CN" }).click();
|
||||
await page.getByRole("menuitem", { name: "Log out" }).click();
|
||||
await expect(page).toHaveURL("/login");
|
||||
await test.expect(page).toHaveURL("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await expect(page).toHaveURL("/");
|
||||
await expect(page.getByText("Monitor")).toBeVisible();
|
||||
await test.expect(page).toHaveURL("/");
|
||||
await test.expect(page.getByText("Monitor")).toBeVisible();
|
||||
});
|
||||
});
|
||||
|
@ -1,18 +1,109 @@
|
||||
/* eslint-disable react-hooks/rules-of-hooks */
|
||||
import { test as base } from "@playwright/test";
|
||||
import { createTestUserFixture } from "./test-user.fixture";
|
||||
import { createLoginPageFixture } from "./login-page.fixture";
|
||||
import type { TestUser } from "./test-user.fixture";
|
||||
import { createClient, SupabaseClient } from "@supabase/supabase-js";
|
||||
import { faker } from "@faker-js/faker";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { TestUser } from "./test-user.fixture";
|
||||
import { LoginPage } from "../pages/login.page";
|
||||
|
||||
type Fixtures = {
|
||||
// Extend both worker state and test-specific fixtures
|
||||
type WorkerFixtures = {
|
||||
workerAuth: TestUser;
|
||||
};
|
||||
|
||||
type TestFixtures = {
|
||||
testUser: TestUser;
|
||||
loginPage: LoginPage;
|
||||
};
|
||||
|
||||
// Combine fixtures
|
||||
export const test = base.extend<Fixtures>({
|
||||
testUser: createTestUserFixture,
|
||||
loginPage: createLoginPageFixture,
|
||||
let supabase: SupabaseClient;
|
||||
|
||||
function getSupabaseAdmin() {
|
||||
if (!supabase) {
|
||||
supabase = createClient(
|
||||
process.env.SUPABASE_URL!,
|
||||
process.env.SUPABASE_SERVICE_ROLE_KEY!,
|
||||
{
|
||||
auth: {
|
||||
autoRefreshToken: false,
|
||||
persistSession: false,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
return supabase;
|
||||
}
|
||||
|
||||
export const test = base.extend<TestFixtures, WorkerFixtures>({
|
||||
// Define the worker-level fixture that creates and manages worker-specific auth
|
||||
workerAuth: [
|
||||
async ({}, use, workerInfo) => {
|
||||
const workerId = workerInfo.workerIndex;
|
||||
const fileName = path.resolve(
|
||||
process.cwd(),
|
||||
`.auth/worker-${workerId}.json`,
|
||||
);
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
const dirPath = path.dirname(fileName);
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
|
||||
let auth: TestUser;
|
||||
if (fs.existsSync(fileName)) {
|
||||
auth = JSON.parse(fs.readFileSync(fileName, "utf-8"));
|
||||
} else {
|
||||
// Generate new worker-specific test user
|
||||
auth = {
|
||||
email: `test.worker.${workerId}.${Date.now()}@example.com`,
|
||||
password: faker.internet.password({ length: 12 }),
|
||||
};
|
||||
|
||||
const supabase = getSupabaseAdmin();
|
||||
const {
|
||||
data: { user },
|
||||
error: signUpError,
|
||||
} = await supabase.auth.signUp({
|
||||
email: auth.email,
|
||||
password: auth.password,
|
||||
});
|
||||
|
||||
if (signUpError) {
|
||||
throw signUpError;
|
||||
}
|
||||
|
||||
auth.id = user?.id;
|
||||
fs.writeFileSync(fileName, JSON.stringify(auth));
|
||||
}
|
||||
|
||||
await use(auth);
|
||||
|
||||
// Cleanup code is commented out to preserve test users during development
|
||||
/*
|
||||
if (workerInfo.project.metadata.teardown) {
|
||||
if (auth.id) {
|
||||
await deleteTestUser(auth.id);
|
||||
}
|
||||
if (fs.existsSync(fileName)) {
|
||||
fs.unlinkSync(fileName);
|
||||
}
|
||||
}
|
||||
*/
|
||||
},
|
||||
{ scope: "worker" },
|
||||
],
|
||||
|
||||
// Define the test-level fixture that provides access to the worker auth
|
||||
testUser: async ({ workerAuth }, use) => {
|
||||
await use(workerAuth);
|
||||
},
|
||||
|
||||
// Update login page fixture to use worker auth by default
|
||||
loginPage: async ({ page }, use) => {
|
||||
await use(new LoginPage(page));
|
||||
},
|
||||
});
|
||||
|
||||
export { expect } from "@playwright/test";
|
||||
|
15
autogpt_platform/frontend/src/tests/pages/base.page.ts
Normal file
15
autogpt_platform/frontend/src/tests/pages/base.page.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { Page } from "@playwright/test";
|
||||
import { NavBar } from "./navbar.page";
|
||||
|
||||
export class BasePage {
|
||||
readonly navbar: NavBar;
|
||||
|
||||
constructor(protected page: Page) {
|
||||
this.navbar = new NavBar(page);
|
||||
}
|
||||
|
||||
async waitForPageLoad() {
|
||||
// Common page load waiting logic
|
||||
await this.page.waitForLoadState("networkidle", { timeout: 10000 });
|
||||
}
|
||||
}
|
51
autogpt_platform/frontend/src/tests/pages/navbar.page.ts
Normal file
51
autogpt_platform/frontend/src/tests/pages/navbar.page.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { Page } from "@playwright/test";
|
||||
|
||||
export class NavBar {
|
||||
constructor(private page: Page) {}
|
||||
|
||||
async clickProfileLink() {
|
||||
// await this.page.getByTestId("profile-link").click();
|
||||
|
||||
await this.page.getByRole("button", { name: "CN" }).click();
|
||||
await this.page.getByRole("menuitem", { name: "Profile" }).click();
|
||||
}
|
||||
|
||||
async clickMonitorLink() {
|
||||
await this.page.getByTestId("monitor-link").click();
|
||||
}
|
||||
|
||||
async clickBuildLink() {
|
||||
await this.page.getByTestId("build-link").click();
|
||||
}
|
||||
|
||||
async clickMarketplaceLink() {
|
||||
await this.page.getByTestId("marketplace-link").click();
|
||||
}
|
||||
|
||||
async getUserMenuButton() {
|
||||
return this.page.getByRole("button", { name: "CN" });
|
||||
}
|
||||
|
||||
async clickUserMenu() {
|
||||
await (await this.getUserMenuButton()).click();
|
||||
}
|
||||
|
||||
async logout() {
|
||||
await this.clickUserMenu();
|
||||
await this.page.getByRole("menuitem", { name: "Log out" }).click();
|
||||
}
|
||||
|
||||
async isLoggedIn(): Promise<boolean> {
|
||||
try {
|
||||
await (
|
||||
await this.getUserMenuButton()
|
||||
).waitFor({
|
||||
state: "visible",
|
||||
timeout: 5000,
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
38
autogpt_platform/frontend/src/tests/pages/profile.page.ts
Normal file
38
autogpt_platform/frontend/src/tests/pages/profile.page.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { Page } from "@playwright/test";
|
||||
import { BasePage } from "./base.page";
|
||||
|
||||
export class ProfilePage extends BasePage {
|
||||
constructor(page: Page) {
|
||||
super(page);
|
||||
}
|
||||
|
||||
async getDisplayedEmail(): Promise<string> {
|
||||
await this.waitForPageToLoad();
|
||||
const email = await this.page.getByTestId("profile-email").textContent();
|
||||
if (!email) {
|
||||
throw new Error("Email not found");
|
||||
}
|
||||
return email;
|
||||
}
|
||||
|
||||
async isLoaded(): Promise<boolean> {
|
||||
try {
|
||||
await this.waitForPageToLoad();
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error loading profile page", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForPageToLoad(): Promise<void> {
|
||||
await this.page.waitForLoadState("networkidle", { timeout: 60_000 });
|
||||
|
||||
await this.page.getByTestId("profile-email").waitFor({
|
||||
state: "visible",
|
||||
timeout: 60_000,
|
||||
});
|
||||
|
||||
await this.page.waitForLoadState("networkidle", { timeout: 60_000 });
|
||||
}
|
||||
}
|
57
autogpt_platform/frontend/src/tests/profile.spec.ts
Normal file
57
autogpt_platform/frontend/src/tests/profile.spec.ts
Normal file
@ -0,0 +1,57 @@
|
||||
// profile.spec.ts
|
||||
import { test } from "./fixtures";
|
||||
import { ProfilePage } from "./pages/profile.page";
|
||||
|
||||
test.describe("Profile", () => {
|
||||
let profilePage: ProfilePage;
|
||||
|
||||
test.beforeEach(async ({ page, loginPage, testUser }) => {
|
||||
profilePage = new ProfilePage(page);
|
||||
|
||||
// Start each test with login using worker auth
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await test.expect(page).toHaveURL("/");
|
||||
});
|
||||
|
||||
test("user can view their profile information", async ({
|
||||
page,
|
||||
testUser,
|
||||
}) => {
|
||||
await profilePage.navbar.clickProfileLink();
|
||||
// workaround for #8788
|
||||
// sleep for 10 seconds to allow page to load due to bug in our system
|
||||
await page.waitForTimeout(10000);
|
||||
await page.reload();
|
||||
await page.reload();
|
||||
await test.expect(profilePage.isLoaded()).resolves.toBeTruthy();
|
||||
await test.expect(page).toHaveURL(new RegExp("/profile"));
|
||||
|
||||
// Verify email matches test worker's email
|
||||
const displayedEmail = await profilePage.getDisplayedEmail();
|
||||
test.expect(displayedEmail).toBe(testUser.email);
|
||||
});
|
||||
|
||||
test("profile navigation is accessible from navbar", async ({ page }) => {
|
||||
await profilePage.navbar.clickProfileLink();
|
||||
await test.expect(page).toHaveURL(new RegExp("/profile"));
|
||||
// workaround for #8788
|
||||
await page.reload();
|
||||
await page.reload();
|
||||
await test.expect(profilePage.isLoaded()).resolves.toBeTruthy();
|
||||
});
|
||||
|
||||
test("profile displays user Credential providers", async ({ page }) => {
|
||||
await profilePage.navbar.clickProfileLink();
|
||||
|
||||
// await test
|
||||
// .expect(page.getByTestId("profile-section-personal"))
|
||||
// .toBeVisible();
|
||||
// await test
|
||||
// .expect(page.getByTestId("profile-section-settings"))
|
||||
// .toBeVisible();
|
||||
// await test
|
||||
// .expect(page.getByTestId("profile-section-security"))
|
||||
// .toBeVisible();
|
||||
});
|
||||
});
|
@ -2100,6 +2100,18 @@
|
||||
resolved "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz"
|
||||
integrity sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==
|
||||
|
||||
"@radix-ui/react-alert-dialog@^1.1.2":
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-alert-dialog/-/react-alert-dialog-1.1.2.tgz#ac3bb7f71f5cbb595d3d0949bb12b598c2a99981"
|
||||
integrity sha512-eGSlLzPhKO+TErxkiGcCZGuvbVMnLA1MTnyBksGOeGRGkxHiiJUujsjmNTdWTm4iHVSRaUao9/4Ur671auMghQ==
|
||||
dependencies:
|
||||
"@radix-ui/primitive" "1.1.0"
|
||||
"@radix-ui/react-compose-refs" "1.1.0"
|
||||
"@radix-ui/react-context" "1.1.1"
|
||||
"@radix-ui/react-dialog" "1.1.2"
|
||||
"@radix-ui/react-primitive" "2.0.0"
|
||||
"@radix-ui/react-slot" "1.1.0"
|
||||
|
||||
"@radix-ui/react-arrow@1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.0.tgz"
|
||||
@ -2182,7 +2194,7 @@
|
||||
resolved "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz"
|
||||
integrity sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==
|
||||
|
||||
"@radix-ui/react-dialog@^1.1.2":
|
||||
"@radix-ui/react-dialog@1.1.2", "@radix-ui/react-dialog@^1.1.2":
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-dialog/-/react-dialog-1.1.2.tgz#d9345575211d6f2d13e209e84aec9a8584b54d6c"
|
||||
integrity sha512-Yj4dZtqa2o+kG61fzB0H2qUvmwBA2oyQroGLyNtBj1beo1khoQ3q1a2AO8rrQYjd8256CO9+N8L9tvsS+bnIyA==
|
||||
@ -2428,13 +2440,20 @@
|
||||
dependencies:
|
||||
"@radix-ui/react-primitive" "2.0.0"
|
||||
|
||||
"@radix-ui/react-slot@1.1.0", "@radix-ui/react-slot@^1.1.0":
|
||||
"@radix-ui/react-slot@1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.0.tgz"
|
||||
integrity sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==
|
||||
dependencies:
|
||||
"@radix-ui/react-compose-refs" "1.1.0"
|
||||
|
||||
"@radix-ui/react-slot@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.1.0.tgz#7c5e48c36ef5496d97b08f1357bb26ed7c714b84"
|
||||
integrity sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==
|
||||
dependencies:
|
||||
"@radix-ui/react-compose-refs" "1.1.0"
|
||||
|
||||
"@radix-ui/react-switch@^1.1.1":
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-switch/-/react-switch-1.1.1.tgz#1401658c24d66a18610f18793afbaa7fedf5429a"
|
||||
|
40
autogpt_platform/market/poetry.lock
generated
40
autogpt_platform/market/poetry.lock
generated
@ -1058,29 +1058,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.4"
|
||||
version = "0.8.0"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
|
||||
{file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
|
||||
{file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
|
||||
{file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
|
||||
{file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
|
||||
{file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"},
|
||||
{file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"},
|
||||
{file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"},
|
||||
{file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"},
|
||||
{file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1298,4 +1298,4 @@ watchmedo = ["PyYAML (>=3.10)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "985f87e9d6e2b7232f880a476c69c626bc4227156d8a57d8f1867236b215f82f"
|
||||
content-hash = "89a2655b6c666f40a0319881580bc447aea78febee65a04eebf73fd092e2147e"
|
||||
|
@ -28,7 +28,7 @@ pytest-asyncio = "^0.24.0"
|
||||
|
||||
pytest-watcher = "^0.4.3"
|
||||
requests = "^2.32.3"
|
||||
ruff = "^0.7.4"
|
||||
ruff = "^0.8.0"
|
||||
pyright = "^1.1.389"
|
||||
isort = "^5.13.2"
|
||||
black = "^24.10.0"
|
||||
|
@ -180,7 +180,6 @@ class AnthropicProvider(BaseChatModelProvider[AnthropicModelName, AnthropicSetti
|
||||
"""Create a completion using the Anthropic API."""
|
||||
anthropic_messages, completion_kwargs = self._get_chat_completion_args(
|
||||
prompt_messages=model_prompt,
|
||||
model=model_name,
|
||||
functions=functions,
|
||||
max_output_tokens=max_output_tokens,
|
||||
**kwargs,
|
||||
|
@ -55,7 +55,7 @@ This will generate the Prisma client for PostgreSQL. You will also need to run t
|
||||
|
||||
```bash
|
||||
cd autogpt_platform/
|
||||
docker compose up -d
|
||||
docker compose up -d --build
|
||||
```
|
||||
|
||||
You can then run the migrations from the `backend` directory.
|
||||
|
@ -90,7 +90,7 @@ To run the backend services, follow these steps:
|
||||
|
||||
* Run the backend services:
|
||||
```
|
||||
docker compose up -d
|
||||
docker compose up -d --build
|
||||
```
|
||||
This command will start all the necessary backend services defined in the `docker-compose.combined.yml` file in detached mode.
|
||||
|
||||
|
@ -83,7 +83,7 @@ Follow these steps to create and test a new block:
|
||||
|
||||
In this case, we're mocking the `get_request` method to always return a dictionary with an 'extract' key, simulating a successful API response. This allows us to test the block's logic without making actual network requests, which could be slow, unreliable, or rate-limited.
|
||||
|
||||
5. **Implement the `run` method with error handling:**, this should contain the main logic of the block:
|
||||
5. **Implement the `run` method with error handling.** This should contain the main logic of the block:
|
||||
|
||||
```python
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
@ -234,7 +234,7 @@ All our existing handlers and the base class can be found [here][OAuth2 handlers
|
||||
|
||||
Every handler must implement the following parts of the [`BaseOAuthHandler`] interface:
|
||||
|
||||
```python title="autogpt_platform/backend/backend/integrations/oauth/base.py"
|
||||
```python title="backend/integrations/oauth/base.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler1"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler2"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler3"
|
||||
@ -249,13 +249,13 @@ Aside from implementing the `OAuthHandler` itself, adding a handler into the sys
|
||||
|
||||
- Adding the handler class to `HANDLERS_BY_NAME` under [`integrations/oauth/__init__.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/__init__.py)
|
||||
|
||||
```python title="autogpt_platform/backend/backend/integrations/oauth/__init__.py"
|
||||
```python title="backend/integrations/oauth/__init__.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/__init__.py:HANDLERS_BY_NAMEExample"
|
||||
```
|
||||
|
||||
- Adding `{provider}_client_id` and `{provider}_client_secret` to the application's `Secrets` under [`util/settings.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/util/settings.py)
|
||||
|
||||
```python title="autogpt_platform/backend/backend/util/settings.py"
|
||||
```python title="backend/util/settings.py"
|
||||
--8<-- "autogpt_platform/backend/backend/util/settings.py:OAuthServerCredentialsExample"
|
||||
```
|
||||
|
||||
@ -286,13 +286,13 @@ Finally you will need to add the provider to the `CredentialsType` enum in [`fro
|
||||
|
||||
- GitHub blocks with API key + OAuth2 support: [`blocks/github`](https://github.com/Significant-Gravitas/AutoGPT/tree/master/autogpt_platform/backend/backend/blocks/github/)
|
||||
|
||||
```python title="blocks/github/issues.py"
|
||||
```python title="backend/blocks/github/issues.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/issues.py:GithubCommentBlockExample"
|
||||
```
|
||||
|
||||
- GitHub OAuth2 handler: [`integrations/oauth/github.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/github.py)
|
||||
|
||||
```python title="blocks/github/github.py"
|
||||
```python title="backend/integrations/oauth/github.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/github.py:GithubOAuthHandlerExample"
|
||||
```
|
||||
|
||||
@ -300,18 +300,148 @@ Finally you will need to add the provider to the `CredentialsType` enum in [`fro
|
||||
|
||||
- Google OAuth2 handler: [`integrations/oauth/google.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/google.py)
|
||||
|
||||
```python title="integrations/oauth/google.py"
|
||||
```python title="backend/integrations/oauth/google.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/google.py:GoogleOAuthHandlerExample"
|
||||
```
|
||||
|
||||
You can see that google has defined a `DEFAULT_SCOPES` variable, this is used to set the scopes that are requested no matter what the user asks for.
|
||||
|
||||
```python title="blocks/google/_auth.py"
|
||||
```python title="backend/blocks/google/_auth.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/google/_auth.py:GoogleOAuthIsConfigured"
|
||||
```
|
||||
|
||||
You can also see that `GOOGLE_OAUTH_IS_CONFIGURED` is used to disable the blocks that require OAuth if the oauth is not configured. This is in the `__init__` method of each block. This is because there is no api key fallback for google blocks so we need to make sure that the oauth is configured before we allow the user to use the blocks.
|
||||
|
||||
### Webhook-triggered Blocks
|
||||
|
||||
Webhook-triggered blocks allow your agent to respond to external events in real-time.
|
||||
These blocks are triggered by incoming webhooks from third-party services
|
||||
rather than being executed manually.
|
||||
|
||||
Creating and running a webhook-triggered block involves three main components:
|
||||
|
||||
- The block itself, which specifies:
|
||||
- Inputs for the user to select a resource and events to subscribe to
|
||||
- A `credentials` input with the scopes needed to manage webhooks
|
||||
- Logic to turn the webhook payload into outputs for the webhook block
|
||||
- The `WebhooksManager` for the corresponding webhook service provider, which handles:
|
||||
- (De)registering webhooks with the provider
|
||||
- Parsing and validating incoming webhook payloads
|
||||
- The credentials system for the corresponding service provider, which may include an `OAuthHandler`
|
||||
|
||||
There is more going on under the hood, e.g. to store and retrieve webhooks and their
|
||||
links to nodes, but to add a webhook-triggered block you shouldn't need to make changes
|
||||
to those parts of the system.
|
||||
|
||||
#### Creating a Webhook-triggered Block
|
||||
|
||||
To create a webhook-triggered block, follow these additional steps on top of the basic block creation process:
|
||||
|
||||
1. **Define `webhook_config`** in your block's `__init__` method.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubPullRequestTriggerBlock</code></summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-webhook_config"
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><code>BlockWebhookConfig</code> definition</summary>
|
||||
|
||||
```python title="backend/data/block.py"
|
||||
--8<-- "autogpt_platform/backend/backend/data/block.py:BlockWebhookConfig"
|
||||
```
|
||||
</details>
|
||||
|
||||
2. **Define event filter input** in your block's Input schema.
|
||||
This allows the user to select which specific types of events will trigger the block in their agent.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubPullRequestTriggerBlock</code></summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-event-filter"
|
||||
```
|
||||
</details>
|
||||
|
||||
- The name of the input field (`events` in this case) must match `webhook_config.event_filter_input`.
|
||||
- The event filter itself must be a Pydantic model with only boolean fields.
|
||||
|
||||
4. **Include payload field** in your block's Input schema.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubTriggerBase</code></summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-payload-field"
|
||||
```
|
||||
</details>
|
||||
|
||||
5. **Define `credentials` input** in your block's Input schema.
|
||||
- Its scopes must be sufficient to manage a user's webhooks through the provider's API
|
||||
- See [Blocks with authentication](#blocks-with-authentication) for further details
|
||||
|
||||
6. **Process webhook payload** and output relevant parts of it in your block's `run` method.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubPullRequestTriggerBlock</code></summary>
|
||||
|
||||
```python
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "payload", input_data.payload
|
||||
yield "sender", input_data.payload["sender"]
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "number", input_data.payload["number"]
|
||||
yield "pull_request", input_data.payload["pull_request"]
|
||||
```
|
||||
|
||||
Note that the `credentials` parameter can be omitted if the credentials
|
||||
aren't used at block runtime, like in the example.
|
||||
</details>
|
||||
|
||||
#### Adding a Webhooks Manager
|
||||
|
||||
To add support for a new webhook provider, you'll need to create a WebhooksManager that implements the `BaseWebhooksManager` interface:
|
||||
|
||||
```python title="backend/integrations/webhooks/base.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager1"
|
||||
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager2"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager3"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager4"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager5"
|
||||
```
|
||||
|
||||
And add a reference to your `WebhooksManager` class in `WEBHOOK_MANAGERS_BY_NAME`:
|
||||
|
||||
```python title="backend/integrations/webhooks/__init__.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/__init__.py:WEBHOOK_MANAGERS_BY_NAME"
|
||||
```
|
||||
|
||||
#### Example: GitHub Webhook Integration
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
GitHub Webhook triggers: <a href="https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/blocks/github/triggers.py"><code>blocks/github/triggers.py</code></a>
|
||||
</summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:GithubTriggerExample"
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
GitHub Webhooks Manager: <a href="https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/webhooks/github.py"><code>integrations/webhooks/github.py</code></a>
|
||||
</summary>
|
||||
|
||||
```python title="backend/integrations/webhooks/github.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/github.py:GithubWebhooksManager"
|
||||
```
|
||||
</details>
|
||||
|
||||
## Key Points to Remember
|
||||
|
||||
- **Unique ID**: Give your block a unique ID in the **init** method.
|
||||
|
Loading…
Reference in New Issue
Block a user