mirror of
https://github.com/Significant-Gravitas/Auto-GPT.git
synced 2025-01-08 11:57:32 +08:00
Merge branch 'dev' of github.com:Significant-Gravitas/AutoGPT into dev
This commit is contained in:
commit
f8b00e55d0
@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
|
@ -47,7 +47,7 @@ jobs:
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
@ -117,7 +117,7 @@ jobs:
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
|
@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: classic/
|
||||
file: Dockerfile.autogpt
|
||||
|
1
.github/workflows/codeql.yml
vendored
1
.github/workflows/codeql.yml
vendored
@ -16,6 +16,7 @@ on:
|
||||
branches: [ "master", "release-*", "dev" ]
|
||||
pull_request:
|
||||
branches: [ "master", "release-*", "dev" ]
|
||||
merge_group:
|
||||
schedule:
|
||||
- cron: '15 4 * * 0'
|
||||
|
||||
|
1
.github/workflows/platform-backend-ci.yml
vendored
1
.github/workflows/platform-backend-ci.yml
vendored
@ -11,6 +11,7 @@ on:
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
group: ${{ format('backend-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
|
||||
|
1
.github/workflows/platform-frontend-ci.yml
vendored
1
.github/workflows/platform-frontend-ci.yml
vendored
@ -10,6 +10,7 @@ on:
|
||||
paths:
|
||||
- ".github/workflows/platform-frontend-ci.yml"
|
||||
- "autogpt_platform/frontend/**"
|
||||
merge_group:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
1
.github/workflows/platform-market-ci.yml
vendored
1
.github/workflows/platform-market-ci.yml
vendored
@ -11,6 +11,7 @@ on:
|
||||
paths:
|
||||
- ".github/workflows/platform-market-ci.yml"
|
||||
- "autogpt_platform/market/**"
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
group: ${{ format('backend-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
|
||||
|
1
.github/workflows/repo-workflow-checker.yml
vendored
1
.github/workflows/repo-workflow-checker.yml
vendored
@ -2,6 +2,7 @@ name: Repo - PR Status Checker
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
merge_group:
|
||||
|
||||
jobs:
|
||||
status-check:
|
||||
|
@ -7,13 +7,18 @@ from typing import Dict, List, Tuple
|
||||
|
||||
CHECK_INTERVAL = 30
|
||||
|
||||
|
||||
def get_environment_variables() -> Tuple[str, str, str, str, str]:
|
||||
"""Retrieve and return necessary environment variables."""
|
||||
try:
|
||||
with open(os.environ["GITHUB_EVENT_PATH"]) as f:
|
||||
event = json.load(f)
|
||||
|
||||
sha = event["pull_request"]["head"]["sha"]
|
||||
# Handle both PR and merge group events
|
||||
if "pull_request" in event:
|
||||
sha = event["pull_request"]["head"]["sha"]
|
||||
else:
|
||||
sha = os.environ["GITHUB_SHA"]
|
||||
|
||||
return (
|
||||
os.environ["GITHUB_API_URL"],
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -171,3 +171,5 @@ ig*
|
||||
.github_access_token
|
||||
LICENSE.rtf
|
||||
autogpt_platform/backend/settings.py
|
||||
/.auth
|
||||
/autogpt_platform/frontend/.auth
|
||||
|
@ -35,7 +35,7 @@ The AutoGPT frontend is where users interact with our powerful AI automation pla
|
||||
|
||||
**Monitoring and Analytics:** Keep track of your agents' performance and gain insights to continually improve your automation processes.
|
||||
|
||||
[Read this guide](https://docs.agpt.co/server/new_blocks/) to learn how to build your own custom blocks.
|
||||
[Read this guide](https://docs.agpt.co/platform/new_blocks/) to learn how to build your own custom blocks.
|
||||
|
||||
### 💽 AutoGPT Server
|
||||
|
||||
|
@ -1,14 +1,17 @@
|
||||
from typing import NamedTuple
|
||||
import secrets
|
||||
import hashlib
|
||||
import secrets
|
||||
from typing import NamedTuple
|
||||
|
||||
|
||||
class APIKeyContainer(NamedTuple):
|
||||
"""Container for API key parts."""
|
||||
|
||||
raw: str
|
||||
prefix: str
|
||||
postfix: str
|
||||
hash: str
|
||||
|
||||
|
||||
class APIKeyManager:
|
||||
PREFIX: str = "agpt_"
|
||||
PREFIX_LENGTH: int = 8
|
||||
@ -19,9 +22,9 @@ class APIKeyManager:
|
||||
raw_key = f"{self.PREFIX}{secrets.token_urlsafe(32)}"
|
||||
return APIKeyContainer(
|
||||
raw=raw_key,
|
||||
prefix=raw_key[:self.PREFIX_LENGTH],
|
||||
postfix=raw_key[-self.POSTFIX_LENGTH:],
|
||||
hash=hashlib.sha256(raw_key.encode()).hexdigest()
|
||||
prefix=raw_key[: self.PREFIX_LENGTH],
|
||||
postfix=raw_key[-self.POSTFIX_LENGTH :],
|
||||
hash=hashlib.sha256(raw_key.encode()).hexdigest(),
|
||||
)
|
||||
|
||||
def verify_api_key(self, provided_key: str, stored_hash: str) -> bool:
|
||||
|
@ -1,8 +1,8 @@
|
||||
import fastapi
|
||||
|
||||
from .middleware import auth_middleware
|
||||
from .models import User, DEFAULT_USER_ID, DEFAULT_EMAIL
|
||||
from .config import Settings
|
||||
from .middleware import auth_middleware
|
||||
from .models import DEFAULT_USER_ID, User
|
||||
|
||||
|
||||
def requires_user(payload: dict = fastapi.Depends(auth_middleware)) -> User:
|
||||
|
@ -0,0 +1,167 @@
|
||||
import asyncio
|
||||
import contextlib
|
||||
import logging
|
||||
from functools import wraps
|
||||
from typing import Any, Awaitable, Callable, Dict, Optional, TypeVar, Union, cast
|
||||
|
||||
import ldclient
|
||||
from fastapi import HTTPException
|
||||
from ldclient import Context, LDClient
|
||||
from ldclient.config import Config
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from .config import SETTINGS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
P = ParamSpec("P")
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def get_client() -> LDClient:
|
||||
"""Get the LaunchDarkly client singleton."""
|
||||
return ldclient.get()
|
||||
|
||||
|
||||
def initialize_launchdarkly() -> None:
|
||||
sdk_key = SETTINGS.launch_darkly_sdk_key
|
||||
logger.debug(
|
||||
f"Initializing LaunchDarkly with SDK key: {'present' if sdk_key else 'missing'}"
|
||||
)
|
||||
|
||||
if not sdk_key:
|
||||
logger.warning("LaunchDarkly SDK key not configured")
|
||||
return
|
||||
|
||||
config = Config(sdk_key)
|
||||
ldclient.set_config(config)
|
||||
|
||||
if ldclient.get().is_initialized():
|
||||
logger.info("LaunchDarkly client initialized successfully")
|
||||
else:
|
||||
logger.error("LaunchDarkly client failed to initialize")
|
||||
|
||||
|
||||
def shutdown_launchdarkly() -> None:
|
||||
"""Shutdown the LaunchDarkly client."""
|
||||
if ldclient.get().is_initialized():
|
||||
ldclient.get().close()
|
||||
logger.info("LaunchDarkly client closed successfully")
|
||||
|
||||
|
||||
def create_context(
|
||||
user_id: str, additional_attributes: Optional[Dict[str, Any]] = None
|
||||
) -> Context:
|
||||
"""Create LaunchDarkly context with optional additional attributes."""
|
||||
builder = Context.builder(str(user_id)).kind("user")
|
||||
if additional_attributes:
|
||||
for key, value in additional_attributes.items():
|
||||
builder.set(key, value)
|
||||
return builder.build()
|
||||
|
||||
|
||||
def feature_flag(
|
||||
flag_key: str,
|
||||
default: bool = False,
|
||||
) -> Callable[
|
||||
[Callable[P, Union[T, Awaitable[T]]]], Callable[P, Union[T, Awaitable[T]]]
|
||||
]:
|
||||
"""
|
||||
Decorator for feature flag protected endpoints.
|
||||
"""
|
||||
|
||||
def decorator(
|
||||
func: Callable[P, Union[T, Awaitable[T]]]
|
||||
) -> Callable[P, Union[T, Awaitable[T]]]:
|
||||
@wraps(func)
|
||||
async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||
try:
|
||||
user_id = kwargs.get("user_id")
|
||||
if not user_id:
|
||||
raise ValueError("user_id is required")
|
||||
|
||||
if not get_client().is_initialized():
|
||||
logger.warning(
|
||||
f"LaunchDarkly not initialized, using default={default}"
|
||||
)
|
||||
is_enabled = default
|
||||
else:
|
||||
context = create_context(str(user_id))
|
||||
is_enabled = get_client().variation(flag_key, context, default)
|
||||
|
||||
if not is_enabled:
|
||||
raise HTTPException(status_code=404, detail="Feature not available")
|
||||
|
||||
result = func(*args, **kwargs)
|
||||
if asyncio.iscoroutine(result):
|
||||
return await result
|
||||
return cast(T, result)
|
||||
except Exception as e:
|
||||
logger.error(f"Error evaluating feature flag {flag_key}: {e}")
|
||||
raise
|
||||
|
||||
@wraps(func)
|
||||
def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||
try:
|
||||
user_id = kwargs.get("user_id")
|
||||
if not user_id:
|
||||
raise ValueError("user_id is required")
|
||||
|
||||
if not get_client().is_initialized():
|
||||
logger.warning(
|
||||
f"LaunchDarkly not initialized, using default={default}"
|
||||
)
|
||||
is_enabled = default
|
||||
else:
|
||||
context = create_context(str(user_id))
|
||||
is_enabled = get_client().variation(flag_key, context, default)
|
||||
|
||||
if not is_enabled:
|
||||
raise HTTPException(status_code=404, detail="Feature not available")
|
||||
|
||||
return cast(T, func(*args, **kwargs))
|
||||
except Exception as e:
|
||||
logger.error(f"Error evaluating feature flag {flag_key}: {e}")
|
||||
raise
|
||||
|
||||
return cast(
|
||||
Callable[P, Union[T, Awaitable[T]]],
|
||||
async_wrapper if asyncio.iscoroutinefunction(func) else sync_wrapper,
|
||||
)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def percentage_rollout(
|
||||
flag_key: str,
|
||||
default: bool = False,
|
||||
) -> Callable[
|
||||
[Callable[P, Union[T, Awaitable[T]]]], Callable[P, Union[T, Awaitable[T]]]
|
||||
]:
|
||||
"""Decorator for percentage-based rollouts."""
|
||||
return feature_flag(flag_key, default)
|
||||
|
||||
|
||||
def beta_feature(
|
||||
flag_key: Optional[str] = None,
|
||||
unauthorized_response: Any = {"message": "Not available in beta"},
|
||||
) -> Callable[
|
||||
[Callable[P, Union[T, Awaitable[T]]]], Callable[P, Union[T, Awaitable[T]]]
|
||||
]:
|
||||
"""Decorator for beta features."""
|
||||
actual_key = f"beta-{flag_key}" if flag_key else "beta"
|
||||
return feature_flag(actual_key, False)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def mock_flag_variation(flag_key: str, return_value: Any):
|
||||
"""Context manager for testing feature flags."""
|
||||
original_variation = get_client().variation
|
||||
get_client().variation = lambda key, context, default: (
|
||||
return_value if key == flag_key else original_variation(key, context, default)
|
||||
)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
get_client().variation = original_variation
|
@ -0,0 +1,44 @@
|
||||
import pytest
|
||||
from autogpt_libs.feature_flag.client import feature_flag, mock_flag_variation
|
||||
from ldclient import LDClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ld_client(mocker):
|
||||
client = mocker.Mock(spec=LDClient)
|
||||
mocker.patch("ldclient.get", return_value=client)
|
||||
client.is_initialized.return_value = True
|
||||
return client
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_feature_flag_enabled(ld_client):
|
||||
ld_client.variation.return_value = True
|
||||
|
||||
@feature_flag("test-flag")
|
||||
async def test_function(user_id: str):
|
||||
return "success"
|
||||
|
||||
result = test_function(user_id="test-user")
|
||||
assert result == "success"
|
||||
ld_client.variation.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_feature_flag_unauthorized_response(ld_client):
|
||||
ld_client.variation.return_value = False
|
||||
|
||||
@feature_flag("test-flag")
|
||||
async def test_function(user_id: str):
|
||||
return "success"
|
||||
|
||||
result = test_function(user_id="test-user")
|
||||
assert result == {"error": "disabled"}
|
||||
|
||||
|
||||
def test_mock_flag_variation(ld_client):
|
||||
with mock_flag_variation("test-flag", True):
|
||||
assert ld_client.variation("test-flag", None, False)
|
||||
|
||||
with mock_flag_variation("test-flag", False):
|
||||
assert ld_client.variation("test-flag", None, False)
|
@ -0,0 +1,15 @@
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
launch_darkly_sdk_key: str = Field(
|
||||
default="",
|
||||
description="The Launch Darkly SDK key",
|
||||
validation_alias="LAUNCH_DARKLY_SDK_KEY"
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(case_sensitive=True, extra="ignore")
|
||||
|
||||
|
||||
SETTINGS = Settings()
|
@ -6,6 +6,7 @@ from pathlib import Path
|
||||
|
||||
from pydantic import Field, field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
from .filters import BelowLevelFilter
|
||||
from .formatters import AGPTFormatter, StructuredLoggingFormatter
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from .store import SupabaseIntegrationCredentialsStore
|
||||
from .types import Credentials, APIKeyCredentials, OAuth2Credentials
|
||||
from .types import APIKeyCredentials, Credentials, OAuth2Credentials
|
||||
|
||||
__all__ = [
|
||||
"SupabaseIntegrationCredentialsStore",
|
||||
|
@ -5,12 +5,14 @@ from typing import TYPE_CHECKING
|
||||
from pydantic import SecretStr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from redis import Redis
|
||||
from backend.executor.database import DatabaseManager
|
||||
from redis import Redis
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from autogpt_libs.utils.synchronize import RedisKeyedMutex
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from .types import (
|
||||
APIKeyCredentials,
|
||||
Credentials,
|
||||
@ -19,8 +21,6 @@ from .types import (
|
||||
UserIntegrations,
|
||||
)
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
settings = Settings()
|
||||
|
||||
revid_credentials = APIKeyCredentials(
|
||||
@ -305,5 +305,5 @@ class SupabaseIntegrationCredentialsStore:
|
||||
return integrations
|
||||
|
||||
def locked_user_integrations(self, user_id: str):
|
||||
key = (self.db_manager, f"user:{user_id}", "integrations")
|
||||
key = (f"user:{user_id}", "integrations")
|
||||
return self.locks.locked(key)
|
||||
|
@ -1,5 +1,5 @@
|
||||
from typing import Callable, TypeVar, ParamSpec
|
||||
import threading
|
||||
from typing import Callable, ParamSpec, TypeVar
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
134
autogpt_platform/autogpt_libs/poetry.lock
generated
134
autogpt_platform/autogpt_libs/poetry.lock
generated
@ -854,6 +854,17 @@ doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linke
|
||||
perf = ["ipython"]
|
||||
test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
description = "brain-dead simple config-ini parsing"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
|
||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multidict"
|
||||
version = "6.1.0"
|
||||
@ -984,6 +995,21 @@ files = [
|
||||
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.5.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
|
||||
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "postgrest"
|
||||
version = "0.18.0"
|
||||
@ -1224,6 +1250,63 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte
|
||||
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
|
||||
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.3.3"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
|
||||
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=1.5,<2"
|
||||
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.24.0"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"},
|
||||
{file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=8.2,<9"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
|
||||
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.14.0"
|
||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
|
||||
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=6.2.5"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.9.0.post0"
|
||||
@ -1324,29 +1407,29 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.4"
|
||||
version = "0.8.0"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
|
||||
{file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
|
||||
{file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
|
||||
{file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
|
||||
{file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
|
||||
{file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"},
|
||||
{file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"},
|
||||
{file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"},
|
||||
{file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"},
|
||||
{file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1435,6 +1518,17 @@ files = [
|
||||
[package.dependencies]
|
||||
httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.1.0"
|
||||
description = "A lil' TOML parser"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
|
||||
{file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.12.2"
|
||||
@ -1750,4 +1844,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "48184ad1281689c7743b8ca23135a647dc52257d54702d88b043fe31fe27ff27"
|
||||
content-hash = "856e38a5c7e1af2144942a78afaf00c7fc947ba763e90472f594b394d81cf73e"
|
||||
|
@ -13,13 +13,15 @@ google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.9.2"
|
||||
pydantic-settings = "^2.6.1"
|
||||
pyjwt = "^2.10.0"
|
||||
pytest-asyncio = "^0.24.0"
|
||||
pytest-mock = "^3.14.0"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.10.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
redis = "^5.2.0"
|
||||
ruff = "^0.7.4"
|
||||
ruff = "^0.8.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
@ -28,8 +28,15 @@ SUPABASE_URL=http://localhost:8000
|
||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
# For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow for integrations to work.
|
||||
FRONTEND_BASE_URL=http://localhost:3000
|
||||
## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow
|
||||
## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set.
|
||||
# FRONTEND_BASE_URL=http://localhost:3000
|
||||
|
||||
## PLATFORM_BASE_URL must be set to a *publicly accessible* URL pointing to your backend
|
||||
## to use the platform's webhook-related functionality.
|
||||
## If you are developing locally, you can use something like ngrok to get a publc URL
|
||||
## and tunnel it to your locally running backend.
|
||||
PLATFORM_BASE_URL=https://your-public-url-here
|
||||
|
||||
## == INTEGRATION CREDENTIALS == ##
|
||||
# Each set of server side credentials is required for the corresponding 3rd party
|
||||
|
@ -60,13 +60,6 @@ for block_cls in all_subclasses(Block):
|
||||
input_schema = block.input_schema.model_fields
|
||||
output_schema = block.output_schema.model_fields
|
||||
|
||||
# Prevent duplicate field name in input_schema and output_schema
|
||||
duplicate_field_names = set(input_schema.keys()) & set(output_schema.keys())
|
||||
if duplicate_field_names:
|
||||
raise ValueError(
|
||||
f"{block.name} has duplicate field names in input_schema and output_schema: {duplicate_field_names}"
|
||||
)
|
||||
|
||||
# Make sure `error` field is a string in the output schema
|
||||
if "error" in output_schema and output_schema["error"].annotation is not str:
|
||||
raise ValueError(
|
||||
|
@ -27,7 +27,7 @@ def get_executor_manager_client():
|
||||
|
||||
@thread_cached
|
||||
def get_event_bus():
|
||||
from backend.data.queue import RedisExecutionEventBus
|
||||
from backend.data.execution import RedisExecutionEventBus
|
||||
|
||||
return RedisExecutionEventBus()
|
||||
|
||||
|
@ -75,11 +75,17 @@ class ConditionBlock(Block):
|
||||
|
||||
value1 = input_data.value1
|
||||
if isinstance(value1, str):
|
||||
value1 = float(value1.strip())
|
||||
try:
|
||||
value1 = float(value1.strip())
|
||||
except ValueError:
|
||||
value1 = value1.strip()
|
||||
|
||||
value2 = input_data.value2
|
||||
if isinstance(value2, str):
|
||||
value2 = float(value2.strip())
|
||||
try:
|
||||
value2 = float(value2.strip())
|
||||
except ValueError:
|
||||
value2 = value2.strip()
|
||||
|
||||
yes_value = input_data.yes_value if input_data.yes_value is not None else value1
|
||||
no_value = input_data.no_value if input_data.no_value is not None else value2
|
||||
|
@ -0,0 +1,43 @@
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class WordCharacterCountBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(
|
||||
description="Input text to count words and characters",
|
||||
placeholder="Enter your text here",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
word_count: int = SchemaField(description="Number of words in the input text")
|
||||
character_count: int = SchemaField(
|
||||
description="Number of characters in the input text"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the counting operation failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ab2a782d-22cf-4587-8a70-55b59b3f9f90",
|
||||
description="Counts the number of words and characters in a given text.",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=WordCharacterCountBlock.Input,
|
||||
output_schema=WordCharacterCountBlock.Output,
|
||||
test_input={"text": "Hello, how are you?"},
|
||||
test_output=[("word_count", 4), ("character_count", 19)],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
text = input_data.text
|
||||
word_count = len(text.split())
|
||||
character_count = len(text)
|
||||
|
||||
yield "word_count", word_count
|
||||
yield "character_count", character_count
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
@ -35,9 +35,9 @@ def _get_headers(credentials: GithubCredentials) -> dict[str, str]:
|
||||
}
|
||||
|
||||
|
||||
def get_api(credentials: GithubCredentials) -> Requests:
|
||||
def get_api(credentials: GithubCredentials, convert_urls: bool = True) -> Requests:
|
||||
return Requests(
|
||||
trusted_origins=["https://api.github.com", "https://github.com"],
|
||||
extra_url_validator=_convert_to_api_url,
|
||||
extra_url_validator=_convert_to_api_url if convert_urls else None,
|
||||
extra_headers=_get_headers(credentials),
|
||||
)
|
||||
|
@ -0,0 +1,700 @@
|
||||
{
|
||||
"action": "synchronize",
|
||||
"number": 8358,
|
||||
"pull_request": {
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358",
|
||||
"id": 2128918491,
|
||||
"node_id": "PR_kwDOJKSTjM5-5Lfb",
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358",
|
||||
"diff_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.diff",
|
||||
"patch_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.patch",
|
||||
"issue_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358",
|
||||
"number": 8358,
|
||||
"state": "open",
|
||||
"locked": false,
|
||||
"title": "feat(platform, blocks): Webhook-triggered blocks",
|
||||
"user": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"body": "- Resolves #8352\r\n\r\n## Changes 🏗️\r\n\r\n- feat(blocks): Add GitHub Pull Request Trigger block\r\n\r\n### feat(platform): Add support for Webhook-triggered blocks\r\n- ⚠️ Add `PLATFORM_BASE_URL` setting\r\n\r\n- Add webhook config option and `BlockType.WEBHOOK` to `Block`\r\n - Add check to `Block.__init__` to enforce type and shape of webhook event filter\r\n - Add check to `Block.__init__` to enforce `payload` input on webhook blocks\r\n\r\n- Add `Webhook` model + CRUD functions in `backend.data.integrations` to represent webhooks created by our system\r\n - Add `IntegrationWebhook` to DB schema + reference `AgentGraphNode.webhook_id`\r\n - Add `set_node_webhook(..)` in `backend.data.graph`\r\n\r\n- Add webhook-related endpoints:\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ingress` endpoint, to receive webhook payloads, and for all associated nodes create graph executions\r\n - Add `Node.is_triggered_by_event_type(..)` helper method\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ping` endpoint, to allow testing a webhook\r\n - Add `WebhookEvent` + pub/sub functions in `backend.data.integrations`\r\n\r\n- Add `backend.integrations.webhooks` module, including:\r\n - `graph_lifecycle_hooks`, e.g. `on_graph_activate(..)`, to handle corresponding webhook creation etc.\r\n - Add calls to these hooks in the graph create/update endpoints\r\n - `BaseWebhooksManager` + `GithubWebhooksManager` to handle creating + registering, removing + deregistering, and retrieving existing webhooks, and validating incoming payloads\r\n\r\n### Other improvements\r\n- fix(blocks): Allow having an input and output pin with the same name\r\n- feat(blocks): Allow hiding inputs (e.g. `payload`) with `SchemaField(hidden=True)`\r\n- feat(backend/data): Add `graph_id`, `graph_version` to `Node`; `user_id` to `GraphMeta`\r\n - Add `Creatable` versions of `Node`, `GraphMeta` and `Graph` without these properties\r\n - Add `graph_from_creatable(..)` helper function in `backend.data.graph`\r\n- refactor(backend/data): Make `RedisEventQueue` generic\r\n- refactor(frontend): Deduplicate & clean up code for different block types in `generateInputHandles(..)` in `CustomNode`\r\n- refactor(backend): Remove unused subgraph functionality\r\n\r\n## How it works\r\n- When a graph is created, the `on_graph_activate` and `on_node_activate` hooks are called on the graph and its nodes\r\n- If a webhook-triggered node has presets for all the relevant inputs, `on_node_activate` will get/create a suitable webhook and link it by setting `AgentGraphNode.webhook_id`\r\n - `on_node_activate` uses `webhook_manager.get_suitable_webhook(..)`, which tries to find a suitable webhook (with matching requirements) or creates it if none exists yet\r\n- When a graph is deactivated (in favor of a newer/other version) or deleted, `on_graph_deactivate` and `on_node_deactivate` are called on the graph and its nodes to clean up webhooks that are no longer in use\r\n- When a valid webhook payload is received, two things happen:\r\n 1. It is broadcast on the Redis channel `webhooks/{webhook_id}/{event_type}`\r\n 2. Graph executions are initiated for all nodes triggered by this webhook\r\n\r\n## TODO\r\n- [ ] #8537\r\n- [x] #8538\r\n- [ ] #8357\r\n- [ ] ~~#8554~~ can be done in a follow-up PR\r\n- [ ] Test test test!\r\n- [ ] Add note on `repo` input of webhook blocks that the credentials used must have the right permissions for the given organization/repo\r\n- [x] Implement proper detection and graceful handling of webhook creation failing due to insufficient permissions. This should give a clear message to the user to e.g. \"give the app access to this organization in your settings\".\r\n- [ ] Nice-to-have: make a button on webhook blocks to trigger a ping and check its result. The API endpoints for this is already implemented.",
|
||||
"created_at": "2024-10-16T22:13:47Z",
|
||||
"updated_at": "2024-11-11T18:34:54Z",
|
||||
"closed_at": null,
|
||||
"merged_at": null,
|
||||
"merge_commit_sha": "cbfd0cdd8db52cdd5a3b7ce088fc0ab4617a652e",
|
||||
"assignee": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"assignees": [
|
||||
{
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
],
|
||||
"requested_reviewers": [
|
||||
{
|
||||
"login": "kcze",
|
||||
"id": 34861343,
|
||||
"node_id": "MDQ6VXNlcjM0ODYxMzQz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/34861343?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/kcze",
|
||||
"html_url": "https://github.com/kcze",
|
||||
"followers_url": "https://api.github.com/users/kcze/followers",
|
||||
"following_url": "https://api.github.com/users/kcze/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/kcze/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/kcze/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/kcze/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/kcze/orgs",
|
||||
"repos_url": "https://api.github.com/users/kcze/repos",
|
||||
"events_url": "https://api.github.com/users/kcze/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/kcze/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
],
|
||||
"requested_teams": [
|
||||
{
|
||||
"name": "DevOps",
|
||||
"id": 9547361,
|
||||
"node_id": "T_kwDOB8roIc4Aka5h",
|
||||
"slug": "devops",
|
||||
"description": "",
|
||||
"privacy": "closed",
|
||||
"notification_setting": "notifications_enabled",
|
||||
"url": "https://api.github.com/organizations/130738209/team/9547361",
|
||||
"html_url": "https://github.com/orgs/Significant-Gravitas/teams/devops",
|
||||
"members_url": "https://api.github.com/organizations/130738209/team/9547361/members{/member}",
|
||||
"repositories_url": "https://api.github.com/organizations/130738209/team/9547361/repos",
|
||||
"permission": "pull",
|
||||
"parent": null
|
||||
}
|
||||
],
|
||||
"labels": [
|
||||
{
|
||||
"id": 5272676214,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABOkandg",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/documentation",
|
||||
"name": "documentation",
|
||||
"color": "0075ca",
|
||||
"default": true,
|
||||
"description": "Improvements or additions to documentation"
|
||||
},
|
||||
{
|
||||
"id": 5410633769,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABQn-4KQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/size/xl",
|
||||
"name": "size/xl",
|
||||
"color": "E751DD",
|
||||
"default": false,
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"id": 6892322271,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABmtB93w",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/Review%20effort%20[1-5]:%204",
|
||||
"name": "Review effort [1-5]: 4",
|
||||
"color": "d1bcf9",
|
||||
"default": false,
|
||||
"description": null
|
||||
},
|
||||
{
|
||||
"id": 7218433025,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABrkCMAQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/frontend",
|
||||
"name": "platform/frontend",
|
||||
"color": "033C07",
|
||||
"default": false,
|
||||
"description": "AutoGPT Platform - Front end"
|
||||
},
|
||||
{
|
||||
"id": 7219356193,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABrk6iIQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/backend",
|
||||
"name": "platform/backend",
|
||||
"color": "ededed",
|
||||
"default": false,
|
||||
"description": "AutoGPT Platform - Back end"
|
||||
},
|
||||
{
|
||||
"id": 7515330106,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABv_LWOg",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/blocks",
|
||||
"name": "platform/blocks",
|
||||
"color": "eb5757",
|
||||
"default": false,
|
||||
"description": null
|
||||
}
|
||||
],
|
||||
"milestone": null,
|
||||
"draft": false,
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits",
|
||||
"review_comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments",
|
||||
"review_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"head": {
|
||||
"label": "Significant-Gravitas:reinier/open-1961-implement-github-on-pull-request-block",
|
||||
"ref": "reinier/open-1961-implement-github-on-pull-request-block",
|
||||
"sha": "8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"user": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"repo": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"allow_squash_merge": true,
|
||||
"allow_merge_commit": false,
|
||||
"allow_rebase_merge": false,
|
||||
"allow_auto_merge": true,
|
||||
"delete_branch_on_merge": true,
|
||||
"allow_update_branch": true,
|
||||
"use_squash_pr_title_as_default": true,
|
||||
"squash_merge_commit_message": "COMMIT_MESSAGES",
|
||||
"squash_merge_commit_title": "PR_TITLE",
|
||||
"merge_commit_message": "BLANK",
|
||||
"merge_commit_title": "PR_TITLE"
|
||||
}
|
||||
},
|
||||
"base": {
|
||||
"label": "Significant-Gravitas:dev",
|
||||
"ref": "dev",
|
||||
"sha": "0b5b95eff5e18c1e162d2b30b66a7be2bed1cbc2",
|
||||
"user": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"repo": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"allow_squash_merge": true,
|
||||
"allow_merge_commit": false,
|
||||
"allow_rebase_merge": false,
|
||||
"allow_auto_merge": true,
|
||||
"delete_branch_on_merge": true,
|
||||
"allow_update_branch": true,
|
||||
"use_squash_pr_title_as_default": true,
|
||||
"squash_merge_commit_message": "COMMIT_MESSAGES",
|
||||
"squash_merge_commit_title": "PR_TITLE",
|
||||
"merge_commit_message": "BLANK",
|
||||
"merge_commit_title": "PR_TITLE"
|
||||
}
|
||||
},
|
||||
"_links": {
|
||||
"self": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358"
|
||||
},
|
||||
"html": {
|
||||
"href": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358"
|
||||
},
|
||||
"issue": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358"
|
||||
},
|
||||
"comments": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments"
|
||||
},
|
||||
"review_comments": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments"
|
||||
},
|
||||
"review_comment": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}"
|
||||
},
|
||||
"commits": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits"
|
||||
},
|
||||
"statuses": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0"
|
||||
}
|
||||
},
|
||||
"author_association": "MEMBER",
|
||||
"auto_merge": null,
|
||||
"active_lock_reason": null,
|
||||
"merged": false,
|
||||
"mergeable": null,
|
||||
"rebaseable": null,
|
||||
"mergeable_state": "unknown",
|
||||
"merged_by": null,
|
||||
"comments": 12,
|
||||
"review_comments": 29,
|
||||
"maintainer_can_modify": false,
|
||||
"commits": 62,
|
||||
"additions": 1674,
|
||||
"deletions": 331,
|
||||
"changed_files": 36
|
||||
},
|
||||
"before": "f40aef87672203f47bbbd53f83fae0964c5624da",
|
||||
"after": "8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"repository": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"custom_properties": {
|
||||
|
||||
}
|
||||
},
|
||||
"organization": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"url": "https://api.github.com/orgs/Significant-Gravitas",
|
||||
"repos_url": "https://api.github.com/orgs/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/orgs/Significant-Gravitas/events",
|
||||
"hooks_url": "https://api.github.com/orgs/Significant-Gravitas/hooks",
|
||||
"issues_url": "https://api.github.com/orgs/Significant-Gravitas/issues",
|
||||
"members_url": "https://api.github.com/orgs/Significant-Gravitas/members{/member}",
|
||||
"public_members_url": "https://api.github.com/orgs/Significant-Gravitas/public_members{/member}",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"description": ""
|
||||
},
|
||||
"enterprise": {
|
||||
"id": 149607,
|
||||
"slug": "significant-gravitas",
|
||||
"name": "Significant Gravitas",
|
||||
"node_id": "E_kgDOAAJIZw",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/b/149607?v=4",
|
||||
"description": "The creators of AutoGPT",
|
||||
"website_url": "discord.gg/autogpt",
|
||||
"html_url": "https://github.com/enterprises/significant-gravitas",
|
||||
"created_at": "2024-04-18T17:43:53Z",
|
||||
"updated_at": "2024-10-23T16:59:55Z"
|
||||
},
|
||||
"sender": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
}
|
@ -46,15 +46,27 @@ class GithubCommentBlock(Block):
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=GithubCommentBlock.Input,
|
||||
output_schema=GithubCommentBlock.Output,
|
||||
test_input={
|
||||
"issue_url": "https://github.com/owner/repo/issues/1",
|
||||
"comment": "This is a test comment.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_input=[
|
||||
{
|
||||
"issue_url": "https://github.com/owner/repo/issues/1",
|
||||
"comment": "This is a test comment.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
{
|
||||
"issue_url": "https://github.com/owner/repo/pull/1",
|
||||
"comment": "This is a test comment.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", 1337),
|
||||
("url", "https://github.com/owner/repo/issues/1#issuecomment-1337"),
|
||||
("id", 1337),
|
||||
(
|
||||
"url",
|
||||
"https://github.com/owner/repo/issues/1#issuecomment-1337",
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"post_comment": lambda *args, **kwargs: (
|
||||
@ -70,6 +82,8 @@ class GithubCommentBlock(Block):
|
||||
) -> tuple[int, str]:
|
||||
api = get_api(credentials)
|
||||
data = {"body": body_text}
|
||||
if "pull" in issue_url:
|
||||
issue_url = issue_url.replace("pull", "issues")
|
||||
comments_url = issue_url + "/comments"
|
||||
response = api.post(comments_url, json=data)
|
||||
comment = response.json()
|
||||
@ -234,9 +248,12 @@ class GithubReadIssueBlock(Block):
|
||||
credentials,
|
||||
input_data.issue_url,
|
||||
)
|
||||
yield "title", title
|
||||
yield "body", body
|
||||
yield "user", user
|
||||
if title:
|
||||
yield "title", title
|
||||
if body:
|
||||
yield "body", body
|
||||
if user:
|
||||
yield "user", user
|
||||
|
||||
|
||||
class GithubListIssuesBlock(Block):
|
||||
|
156
autogpt_platform/backend/backend/blocks/github/triggers.py
Normal file
156
autogpt_platform/backend/backend/blocks/github/triggers.py
Normal file
@ -0,0 +1,156 @@
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GithubCredentialsField,
|
||||
GithubCredentialsInput,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# --8<-- [start:GithubTriggerExample]
|
||||
class GitHubTriggerBase:
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
|
||||
repo: str = SchemaField(
|
||||
description=(
|
||||
"Repository to subscribe to.\n\n"
|
||||
"**Note:** Make sure your GitHub credentials have permissions "
|
||||
"to create webhooks on this repo."
|
||||
),
|
||||
placeholder="{owner}/{repo}",
|
||||
)
|
||||
# --8<-- [start:example-payload-field]
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
# --8<-- [end:example-payload-field]
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
description="The complete webhook payload that was received from GitHub. "
|
||||
"Includes information about the affected resource (e.g. pull request), "
|
||||
"the event, and the user who triggered the event."
|
||||
)
|
||||
triggered_by_user: dict = SchemaField(
|
||||
description="Object representing the GitHub user who triggered the event"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the payload could not be processed"
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "payload", input_data.payload
|
||||
yield "triggered_by_user", input_data.payload["sender"]
|
||||
|
||||
|
||||
class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block):
|
||||
EXAMPLE_PAYLOAD_FILE = (
|
||||
Path(__file__).parent / "example_payloads" / "pull_request.synchronize.json"
|
||||
)
|
||||
|
||||
# --8<-- [start:example-event-filter]
|
||||
class Input(GitHubTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request
|
||||
"""
|
||||
|
||||
opened: bool = False
|
||||
edited: bool = False
|
||||
closed: bool = False
|
||||
reopened: bool = False
|
||||
synchronize: bool = False
|
||||
assigned: bool = False
|
||||
unassigned: bool = False
|
||||
labeled: bool = False
|
||||
unlabeled: bool = False
|
||||
converted_to_draft: bool = False
|
||||
locked: bool = False
|
||||
unlocked: bool = False
|
||||
enqueued: bool = False
|
||||
dequeued: bool = False
|
||||
milestoned: bool = False
|
||||
demilestoned: bool = False
|
||||
ready_for_review: bool = False
|
||||
review_requested: bool = False
|
||||
review_request_removed: bool = False
|
||||
auto_merge_enabled: bool = False
|
||||
auto_merge_disabled: bool = False
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events", description="The events to subscribe to"
|
||||
)
|
||||
# --8<-- [end:example-event-filter]
|
||||
|
||||
class Output(GitHubTriggerBase.Output):
|
||||
event: str = SchemaField(
|
||||
description="The PR event that triggered the webhook (e.g. 'opened')"
|
||||
)
|
||||
number: int = SchemaField(description="The number of the affected pull request")
|
||||
pull_request: dict = SchemaField(
|
||||
description="Object representing the affected pull request"
|
||||
)
|
||||
pull_request_url: str = SchemaField(
|
||||
description="The URL of the affected pull request"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(self.EXAMPLE_PAYLOAD_FILE.read_text())
|
||||
|
||||
super().__init__(
|
||||
id="6c60ec01-8128-419e-988f-96a063ee2fea",
|
||||
description="This block triggers on pull request events and outputs the event type and payload.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT},
|
||||
input_schema=GithubPullRequestTriggerBlock.Input,
|
||||
output_schema=GithubPullRequestTriggerBlock.Output,
|
||||
# --8<-- [start:example-webhook_config]
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider="github",
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
event_format="pull_request.{event}",
|
||||
),
|
||||
# --8<-- [end:example-webhook_config]
|
||||
test_input={
|
||||
"repo": "Significant-Gravitas/AutoGPT",
|
||||
"events": {"opened": True, "synchronize": True},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"payload": example_payload,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("payload", example_payload),
|
||||
("triggered_by_user", example_payload["sender"]),
|
||||
("event", example_payload["action"]),
|
||||
("number", example_payload["number"]),
|
||||
("pull_request", example_payload["pull_request"]),
|
||||
("pull_request_url", example_payload["pull_request"]["html_url"]),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
yield from super().run(input_data, **kwargs)
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "number", input_data.payload["number"]
|
||||
yield "pull_request", input_data.payload["pull_request"]
|
||||
yield "pull_request_url", input_data.payload["pull_request"]["html_url"]
|
||||
|
||||
|
||||
# --8<-- [end:GithubTriggerExample]
|
@ -526,7 +526,7 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
class AITextGeneratorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
prompt: str = SchemaField(
|
||||
description="The prompt to send to the language model.",
|
||||
description="The prompt to send to the language model. You can use any of the {keys} from Prompt Values to fill in the prompt with values from the prompt values dictionary by putting them in curly braces.",
|
||||
placeholder="Enter your prompt here...",
|
||||
)
|
||||
model: LlmModel = SchemaField(
|
||||
|
@ -20,9 +20,12 @@ from prisma.models import AgentBlock
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.util import json
|
||||
from backend.util.settings import Config
|
||||
|
||||
from .model import CREDENTIALS_FIELD_NAME, ContributorDetails, CredentialsMetaInput
|
||||
|
||||
app_config = Config()
|
||||
|
||||
BlockData = tuple[str, Any] # Input & Output data should be a tuple of (name, data).
|
||||
BlockInput = dict[str, Any] # Input: 1 input pin consumes 1 data.
|
||||
BlockOutput = Generator[BlockData, None, None] # Output: 1 output pin produces n data.
|
||||
@ -34,6 +37,7 @@ class BlockType(Enum):
|
||||
INPUT = "Input"
|
||||
OUTPUT = "Output"
|
||||
NOTE = "Note"
|
||||
WEBHOOK = "Webhook"
|
||||
AGENT = "Agent"
|
||||
|
||||
|
||||
@ -94,15 +98,7 @@ class BlockSchema(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def validate_data(cls, data: BlockInput) -> str | None:
|
||||
"""
|
||||
Validate the data against the schema.
|
||||
Returns the validation error message if the data does not match the schema.
|
||||
"""
|
||||
try:
|
||||
jsonschema.validate(data, cls.jsonschema())
|
||||
return None
|
||||
except jsonschema.ValidationError as e:
|
||||
return str(e)
|
||||
return json.validate_with_jsonschema(schema=cls.jsonschema(), data=data)
|
||||
|
||||
@classmethod
|
||||
def validate_field(cls, field_name: str, data: BlockInput) -> str | None:
|
||||
@ -185,6 +181,41 @@ class EmptySchema(BlockSchema):
|
||||
pass
|
||||
|
||||
|
||||
# --8<-- [start:BlockWebhookConfig]
|
||||
class BlockWebhookConfig(BaseModel):
|
||||
provider: str
|
||||
"""The service provider that the webhook connects to"""
|
||||
|
||||
webhook_type: str
|
||||
"""
|
||||
Identifier for the webhook type. E.g. GitHub has repo and organization level hooks.
|
||||
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
resource_format: str
|
||||
"""
|
||||
Template string for the resource that a block instance subscribes to.
|
||||
Fields will be filled from the block's inputs (except `payload`).
|
||||
|
||||
Example: `f"{repo}/pull_requests"` (note: not how it's actually implemented)
|
||||
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
event_filter_input: str
|
||||
"""Name of the block's event filter input."""
|
||||
|
||||
event_format: str = "{event}"
|
||||
"""
|
||||
Template string for the event(s) that a block instance subscribes to.
|
||||
Applied individually to each event selected in the event filter input.
|
||||
|
||||
Example: `"pull_request.{event}"` -> `"pull_request.opened"`
|
||||
"""
|
||||
# --8<-- [end:BlockWebhookConfig]
|
||||
|
||||
|
||||
class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
def __init__(
|
||||
self,
|
||||
@ -201,6 +232,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
disabled: bool = False,
|
||||
static_output: bool = False,
|
||||
block_type: BlockType = BlockType.STANDARD,
|
||||
webhook_config: Optional[BlockWebhookConfig] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the block with the given schema.
|
||||
@ -231,9 +263,38 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
self.contributors = contributors or set()
|
||||
self.disabled = disabled
|
||||
self.static_output = static_output
|
||||
self.block_type = block_type
|
||||
self.block_type = block_type if not webhook_config else BlockType.WEBHOOK
|
||||
self.webhook_config = webhook_config
|
||||
self.execution_stats = {}
|
||||
|
||||
if self.webhook_config:
|
||||
# Enforce shape of webhook event filter
|
||||
event_filter_field = self.input_schema.model_fields[
|
||||
self.webhook_config.event_filter_input
|
||||
]
|
||||
if not (
|
||||
isinstance(event_filter_field.annotation, type)
|
||||
and issubclass(event_filter_field.annotation, BaseModel)
|
||||
and all(
|
||||
field.annotation is bool
|
||||
for field in event_filter_field.annotation.model_fields.values()
|
||||
)
|
||||
):
|
||||
raise NotImplementedError(
|
||||
f"{self.name} has an invalid webhook event selector: "
|
||||
"field must be a BaseModel and all its fields must be boolean"
|
||||
)
|
||||
|
||||
# Enforce presence of 'payload' input
|
||||
if "payload" not in self.input_schema.model_fields:
|
||||
raise TypeError(
|
||||
f"{self.name} is webhook-triggered but has no 'payload' input"
|
||||
)
|
||||
|
||||
# Disable webhook-triggered block if webhook functionality not available
|
||||
if not app_config.platform_base_url:
|
||||
self.disabled = True
|
||||
|
||||
@classmethod
|
||||
def create(cls: Type["Block"]) -> "Block":
|
||||
return cls()
|
||||
|
@ -11,6 +11,8 @@ from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.util.settings import Config
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
class UserCreditBase(ABC):
|
||||
def __init__(self, num_user_credits_refill: int):
|
||||
@ -202,7 +204,6 @@ class DisabledUserCredit(UserCreditBase):
|
||||
|
||||
|
||||
def get_user_credit_model() -> UserCreditBase:
|
||||
config = Config()
|
||||
if config.enable_credit.lower() == "true":
|
||||
return UserCredit(config.num_user_credits_refill)
|
||||
else:
|
||||
|
@ -1,7 +1,7 @@
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from multiprocessing import Manager
|
||||
from typing import Any, Generic, TypeVar
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, TypeVar
|
||||
|
||||
from prisma.enums import AgentExecutionStatus
|
||||
from prisma.models import (
|
||||
@ -14,7 +14,9 @@ from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import BlockData, BlockInput, CompletedBlockOutput
|
||||
from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus, RedisEventBus
|
||||
from backend.util import json, mock
|
||||
from backend.util.settings import Config
|
||||
|
||||
|
||||
class GraphExecution(BaseModel):
|
||||
@ -271,7 +273,6 @@ async def update_graph_execution_stats(
|
||||
graph_exec_id: str,
|
||||
stats: dict[str, Any],
|
||||
) -> ExecutionResult:
|
||||
|
||||
status = ExecutionStatus.FAILED if stats.get("error") else ExecutionStatus.COMPLETED
|
||||
res = await AgentGraphExecution.prisma().update(
|
||||
where={"id": graph_exec_id},
|
||||
@ -471,3 +472,42 @@ async def get_incomplete_executions(
|
||||
include=EXECUTION_RESULT_INCLUDE,
|
||||
)
|
||||
return [ExecutionResult.from_db(execution) for execution in executions]
|
||||
|
||||
|
||||
# --------------------- Event Bus --------------------- #
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
class RedisExecutionEventBus(RedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
def publish(self, res: ExecutionResult):
|
||||
self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}")
|
||||
|
||||
def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> Generator[ExecutionResult, None, None]:
|
||||
for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
|
||||
class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
async def publish(self, res: ExecutionResult):
|
||||
await self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}")
|
||||
|
||||
async def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> AsyncGenerator[ExecutionResult, None]:
|
||||
async for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
@ -3,7 +3,7 @@ import logging
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Literal, Type
|
||||
from typing import Any, Literal, Optional, Type
|
||||
|
||||
import prisma
|
||||
from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink
|
||||
@ -12,12 +12,14 @@ from pydantic.fields import computed_field
|
||||
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock
|
||||
from backend.data.block import BlockInput, BlockType, get_block, get_blocks
|
||||
from backend.data.db import BaseDbModel, transaction
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
|
||||
from backend.util import json
|
||||
|
||||
from .block import BlockInput, BlockType, get_block, get_blocks
|
||||
from .db import BaseDbModel, transaction
|
||||
from .execution import ExecutionStatus
|
||||
from .includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
|
||||
from .integrations import Webhook
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -50,20 +52,51 @@ class Node(BaseDbModel):
|
||||
input_links: list[Link] = []
|
||||
output_links: list[Link] = []
|
||||
|
||||
webhook_id: Optional[str] = None
|
||||
|
||||
|
||||
class NodeModel(Node):
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
|
||||
webhook: Optional[Webhook] = None
|
||||
|
||||
@staticmethod
|
||||
def from_db(node: AgentNode):
|
||||
if not node.AgentBlock:
|
||||
raise ValueError(f"Invalid node {node.id}, invalid AgentBlock.")
|
||||
obj = Node(
|
||||
obj = NodeModel(
|
||||
id=node.id,
|
||||
block_id=node.AgentBlock.id,
|
||||
input_default=json.loads(node.constantInput, target_type=dict[str, Any]),
|
||||
metadata=json.loads(node.metadata, target_type=dict[str, Any]),
|
||||
graph_id=node.agentGraphId,
|
||||
graph_version=node.agentGraphVersion,
|
||||
webhook_id=node.webhookId,
|
||||
webhook=Webhook.from_db(node.Webhook) if node.Webhook else None,
|
||||
)
|
||||
obj.input_links = [Link.from_db(link) for link in node.Input or []]
|
||||
obj.output_links = [Link.from_db(link) for link in node.Output or []]
|
||||
return obj
|
||||
|
||||
def is_triggered_by_event_type(self, event_type: str) -> bool:
|
||||
if not (block := get_block(self.block_id)):
|
||||
raise ValueError(f"Block #{self.block_id} not found for node #{self.id}")
|
||||
if not block.webhook_config:
|
||||
raise TypeError("This method can't be used on non-webhook blocks")
|
||||
event_filter = self.input_default.get(block.webhook_config.event_filter_input)
|
||||
if not event_filter:
|
||||
raise ValueError(f"Event filter is not configured on node #{self.id}")
|
||||
return event_type in [
|
||||
block.webhook_config.event_format.format(event=k)
|
||||
for k in event_filter
|
||||
if event_filter[k] is True
|
||||
]
|
||||
|
||||
|
||||
# Fix 2-way reference Node <-> Webhook
|
||||
Webhook.model_rebuild()
|
||||
|
||||
|
||||
class GraphExecution(BaseDbModel):
|
||||
execution_id: str
|
||||
@ -110,33 +143,6 @@ class Graph(BaseDbModel):
|
||||
nodes: list[Node] = []
|
||||
links: list[Link] = []
|
||||
|
||||
@staticmethod
|
||||
def _generate_schema(
|
||||
type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input],
|
||||
data: list[dict],
|
||||
) -> dict[str, Any]:
|
||||
props = []
|
||||
for p in data:
|
||||
try:
|
||||
props.append(type_class(**p))
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid {type_class}: {p}, {e}")
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
p.name: {
|
||||
"secret": p.secret,
|
||||
"advanced": p.advanced,
|
||||
"title": p.title or p.name,
|
||||
**({"description": p.description} if p.description else {}),
|
||||
**({"default": p.value} if p.value is not None else {}),
|
||||
}
|
||||
for p in props
|
||||
},
|
||||
"required": [p.name for p in props if p.value is None],
|
||||
}
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def input_schema(self) -> dict[str, Any]:
|
||||
@ -165,6 +171,38 @@ class Graph(BaseDbModel):
|
||||
],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _generate_schema(
|
||||
type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input],
|
||||
data: list[dict],
|
||||
) -> dict[str, Any]:
|
||||
props = []
|
||||
for p in data:
|
||||
try:
|
||||
props.append(type_class(**p))
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid {type_class}: {p}, {e}")
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
p.name: {
|
||||
"secret": p.secret,
|
||||
"advanced": p.advanced,
|
||||
"title": p.title or p.name,
|
||||
**({"description": p.description} if p.description else {}),
|
||||
**({"default": p.value} if p.value is not None else {}),
|
||||
}
|
||||
for p in props
|
||||
},
|
||||
"required": [p.name for p in props if p.value is None],
|
||||
}
|
||||
|
||||
|
||||
class GraphModel(Graph):
|
||||
user_id: str
|
||||
nodes: list[NodeModel] = [] # type: ignore
|
||||
|
||||
@property
|
||||
def starting_nodes(self) -> list[Node]:
|
||||
outbound_nodes = {link.sink_id for link in self.links}
|
||||
@ -291,36 +329,39 @@ class Graph(BaseDbModel):
|
||||
GraphExecution.from_db(execution)
|
||||
for execution in graph.AgentGraphExecution or []
|
||||
]
|
||||
nodes = graph.AgentNodes or []
|
||||
|
||||
return Graph(
|
||||
return GraphModel(
|
||||
id=graph.id,
|
||||
user_id=graph.userId,
|
||||
version=graph.version,
|
||||
is_active=graph.isActive,
|
||||
is_template=graph.isTemplate,
|
||||
name=graph.name or "",
|
||||
description=graph.description or "",
|
||||
executions=executions,
|
||||
nodes=[Graph._process_node(node, hide_credentials) for node in nodes],
|
||||
nodes=[
|
||||
GraphModel._process_node(node, hide_credentials)
|
||||
for node in graph.AgentNodes or []
|
||||
],
|
||||
links=list(
|
||||
{
|
||||
Link.from_db(link)
|
||||
for node in nodes
|
||||
for node in graph.AgentNodes or []
|
||||
for link in (node.Input or []) + (node.Output or [])
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _process_node(node: AgentNode, hide_credentials: bool) -> Node:
|
||||
node_dict = node.model_dump()
|
||||
def _process_node(node: AgentNode, hide_credentials: bool) -> NodeModel:
|
||||
node_dict = {field: getattr(node, field) for field in node.model_fields}
|
||||
if hide_credentials and "constantInput" in node_dict:
|
||||
constant_input = json.loads(
|
||||
node_dict["constantInput"], target_type=dict[str, Any]
|
||||
)
|
||||
constant_input = Graph._hide_credentials_in_input(constant_input)
|
||||
constant_input = GraphModel._hide_credentials_in_input(constant_input)
|
||||
node_dict["constantInput"] = json.dumps(constant_input)
|
||||
return Node.from_db(AgentNode(**node_dict))
|
||||
return NodeModel.from_db(AgentNode(**node_dict))
|
||||
|
||||
@staticmethod
|
||||
def _hide_credentials_in_input(input_data: dict[str, Any]) -> dict[str, Any]:
|
||||
@ -328,7 +369,7 @@ class Graph(BaseDbModel):
|
||||
result = {}
|
||||
for key, value in input_data.items():
|
||||
if isinstance(value, dict):
|
||||
result[key] = Graph._hide_credentials_in_input(value)
|
||||
result[key] = GraphModel._hide_credentials_in_input(value)
|
||||
elif isinstance(value, str) and any(
|
||||
sensitive_key in key.lower() for sensitive_key in sensitive_keys
|
||||
):
|
||||
@ -339,22 +380,37 @@ class Graph(BaseDbModel):
|
||||
return result
|
||||
|
||||
|
||||
# --------------------- Model functions --------------------- #
|
||||
# --------------------- CRUD functions --------------------- #
|
||||
|
||||
|
||||
async def get_node(node_id: str) -> Node:
|
||||
async def get_node(node_id: str) -> NodeModel:
|
||||
node = await AgentNode.prisma().find_unique_or_raise(
|
||||
where={"id": node_id},
|
||||
include=AGENT_NODE_INCLUDE,
|
||||
)
|
||||
return Node.from_db(node)
|
||||
return NodeModel.from_db(node)
|
||||
|
||||
|
||||
async def set_node_webhook(node_id: str, webhook_id: str | None) -> NodeModel:
|
||||
node = await AgentNode.prisma().update(
|
||||
where={"id": node_id},
|
||||
data=(
|
||||
{"Webhook": {"connect": {"id": webhook_id}}}
|
||||
if webhook_id
|
||||
else {"Webhook": {"disconnect": True}}
|
||||
),
|
||||
include=AGENT_NODE_INCLUDE,
|
||||
)
|
||||
if not node:
|
||||
raise ValueError(f"Node #{node_id} not found")
|
||||
return NodeModel.from_db(node)
|
||||
|
||||
|
||||
async def get_graphs(
|
||||
user_id: str,
|
||||
include_executions: bool = False,
|
||||
filter_by: Literal["active", "template"] | None = "active",
|
||||
) -> list[Graph]:
|
||||
) -> list[GraphModel]:
|
||||
"""
|
||||
Retrieves graph metadata objects.
|
||||
Default behaviour is to get all currently active graphs.
|
||||
@ -365,7 +421,7 @@ async def get_graphs(
|
||||
user_id: The ID of the user that owns the graph.
|
||||
|
||||
Returns:
|
||||
list[Graph]: A list of objects representing the retrieved graph metadata.
|
||||
list[GraphModel]: A list of objects representing the retrieved graphs.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {}
|
||||
|
||||
@ -386,7 +442,7 @@ async def get_graphs(
|
||||
include=graph_include,
|
||||
)
|
||||
|
||||
return [Graph.from_db(graph) for graph in graphs]
|
||||
return [GraphModel.from_db(graph) for graph in graphs]
|
||||
|
||||
|
||||
async def get_graph(
|
||||
@ -395,7 +451,7 @@ async def get_graph(
|
||||
template: bool = False,
|
||||
user_id: str | None = None,
|
||||
hide_credentials: bool = False,
|
||||
) -> Graph | None:
|
||||
) -> GraphModel | None:
|
||||
"""
|
||||
Retrieves a graph from the DB.
|
||||
Defaults to the version with `is_active` if `version` is not passed,
|
||||
@ -420,38 +476,35 @@ async def get_graph(
|
||||
include=AGENT_GRAPH_INCLUDE,
|
||||
order={"version": "desc"},
|
||||
)
|
||||
return Graph.from_db(graph, hide_credentials) if graph else None
|
||||
return GraphModel.from_db(graph, hide_credentials) if graph else None
|
||||
|
||||
|
||||
async def set_graph_active_version(graph_id: str, version: int, user_id: str) -> None:
|
||||
# Check if the graph belongs to the user
|
||||
graph = await AgentGraph.prisma().find_first(
|
||||
# Activate the requested version if it exists and is owned by the user.
|
||||
updated_count = await AgentGraph.prisma().update_many(
|
||||
data={"isActive": True},
|
||||
where={
|
||||
"id": graph_id,
|
||||
"version": version,
|
||||
"userId": user_id,
|
||||
}
|
||||
)
|
||||
if not graph:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user")
|
||||
|
||||
updated_graph = await AgentGraph.prisma().update(
|
||||
data={"isActive": True},
|
||||
where={
|
||||
"graphVersionId": {"id": graph_id, "version": version},
|
||||
},
|
||||
)
|
||||
if not updated_graph:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found")
|
||||
if updated_count == 0:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user")
|
||||
|
||||
# Deactivate all other versions
|
||||
# Deactivate all other versions.
|
||||
await AgentGraph.prisma().update_many(
|
||||
data={"isActive": False},
|
||||
where={"id": graph_id, "version": {"not": version}, "userId": user_id},
|
||||
where={
|
||||
"id": graph_id,
|
||||
"version": {"not": version},
|
||||
"userId": user_id,
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]:
|
||||
async def get_graph_all_versions(graph_id: str, user_id: str) -> list[GraphModel]:
|
||||
graph_versions = await AgentGraph.prisma().find_many(
|
||||
where={"id": graph_id, "userId": user_id},
|
||||
order={"version": "desc"},
|
||||
@ -461,7 +514,7 @@ async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]:
|
||||
if not graph_versions:
|
||||
return []
|
||||
|
||||
return [Graph.from_db(graph) for graph in graph_versions]
|
||||
return [GraphModel.from_db(graph) for graph in graph_versions]
|
||||
|
||||
|
||||
async def delete_graph(graph_id: str, user_id: str) -> int:
|
||||
@ -473,7 +526,7 @@ async def delete_graph(graph_id: str, user_id: str) -> int:
|
||||
return entries_count
|
||||
|
||||
|
||||
async def create_graph(graph: Graph, user_id: str) -> Graph:
|
||||
async def create_graph(graph: Graph, user_id: str) -> GraphModel:
|
||||
async with transaction() as tx:
|
||||
await __create_graph(tx, graph, user_id)
|
||||
|
||||
@ -534,6 +587,32 @@ async def __create_graph(tx, graph: Graph, user_id: str):
|
||||
# ------------------------ UTILITIES ------------------------ #
|
||||
|
||||
|
||||
def make_graph_model(creatable_graph: Graph, user_id: str) -> GraphModel:
|
||||
"""
|
||||
Convert a Graph to a GraphModel, setting graph_id and graph_version on all nodes.
|
||||
|
||||
Args:
|
||||
creatable_graph (Graph): The creatable graph to convert.
|
||||
user_id (str): The ID of the user creating the graph.
|
||||
|
||||
Returns:
|
||||
GraphModel: The converted Graph object.
|
||||
"""
|
||||
# Create a new Graph object, inheriting properties from CreatableGraph
|
||||
return GraphModel(
|
||||
**creatable_graph.model_dump(exclude={"nodes"}),
|
||||
user_id=user_id,
|
||||
nodes=[
|
||||
NodeModel(
|
||||
**creatable_node.model_dump(),
|
||||
graph_id=creatable_graph.id,
|
||||
graph_version=creatable_graph.version,
|
||||
)
|
||||
for creatable_node in creatable_graph.nodes
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
async def fix_llm_provider_credentials():
|
||||
"""Fix node credentials with provider `llm`"""
|
||||
from autogpt_libs.supabase_integration_credentials_store import (
|
||||
@ -547,16 +626,14 @@ async def fix_llm_provider_credentials():
|
||||
|
||||
broken_nodes = await prisma.get_client().query_raw(
|
||||
"""
|
||||
SELECT "User".id user_id,
|
||||
SELECT graph."userId" user_id,
|
||||
node.id node_id,
|
||||
node."constantInput" node_preset_input
|
||||
FROM platform."AgentNode" node
|
||||
LEFT JOIN platform."AgentGraph" graph
|
||||
ON node."agentGraphId" = graph.id
|
||||
LEFT JOIN platform."User" "User"
|
||||
ON graph."userId" = "User".id
|
||||
WHERE node."constantInput"::jsonb->'credentials'->>'provider' = 'llm'
|
||||
ORDER BY user_id;
|
||||
ORDER BY graph."userId";
|
||||
"""
|
||||
)
|
||||
logger.info(f"Fixing LLM credential inputs on {len(broken_nodes)} nodes")
|
||||
|
@ -3,6 +3,7 @@ import prisma
|
||||
AGENT_NODE_INCLUDE: prisma.types.AgentNodeInclude = {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"Webhook": True,
|
||||
"AgentBlock": True,
|
||||
}
|
||||
|
||||
@ -27,3 +28,7 @@ GRAPH_EXECUTION_INCLUDE: prisma.types.AgentGraphExecutionInclude = {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
INTEGRATION_WEBHOOK_INCLUDE: prisma.types.IntegrationWebhookInclude = {
|
||||
"AgentNodes": {"include": AGENT_NODE_INCLUDE} # type: ignore
|
||||
}
|
||||
|
168
autogpt_platform/backend/backend/data/integrations.py
Normal file
168
autogpt_platform/backend/backend/data/integrations.py
Normal file
@ -0,0 +1,168 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, AsyncGenerator, Optional
|
||||
|
||||
from prisma import Json
|
||||
from prisma.models import IntegrationWebhook
|
||||
from pydantic import Field
|
||||
|
||||
from backend.data.includes import INTEGRATION_WEBHOOK_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus
|
||||
|
||||
from .db import BaseDbModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .graph import NodeModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Webhook(BaseDbModel):
|
||||
user_id: str
|
||||
provider: str
|
||||
credentials_id: str
|
||||
webhook_type: str
|
||||
resource: str
|
||||
events: list[str]
|
||||
config: dict = Field(default_factory=dict)
|
||||
secret: str
|
||||
|
||||
provider_webhook_id: str
|
||||
|
||||
attached_nodes: Optional[list["NodeModel"]] = None
|
||||
|
||||
@staticmethod
|
||||
def from_db(webhook: IntegrationWebhook):
|
||||
from .graph import NodeModel
|
||||
|
||||
return Webhook(
|
||||
id=webhook.id,
|
||||
user_id=webhook.userId,
|
||||
provider=webhook.provider,
|
||||
credentials_id=webhook.credentialsId,
|
||||
webhook_type=webhook.webhookType,
|
||||
resource=webhook.resource,
|
||||
events=webhook.events,
|
||||
config=dict(webhook.config),
|
||||
secret=webhook.secret,
|
||||
provider_webhook_id=webhook.providerWebhookId,
|
||||
attached_nodes=(
|
||||
[NodeModel.from_db(node) for node in webhook.AgentNodes]
|
||||
if webhook.AgentNodes is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# --------------------- CRUD functions --------------------- #
|
||||
|
||||
|
||||
async def create_webhook(webhook: Webhook) -> Webhook:
|
||||
created_webhook = await IntegrationWebhook.prisma().create(
|
||||
data={
|
||||
"id": webhook.id,
|
||||
"userId": webhook.user_id,
|
||||
"provider": webhook.provider,
|
||||
"credentialsId": webhook.credentials_id,
|
||||
"webhookType": webhook.webhook_type,
|
||||
"resource": webhook.resource,
|
||||
"events": webhook.events,
|
||||
"config": Json(webhook.config),
|
||||
"secret": webhook.secret,
|
||||
"providerWebhookId": webhook.provider_webhook_id,
|
||||
}
|
||||
)
|
||||
return Webhook.from_db(created_webhook)
|
||||
|
||||
|
||||
async def get_webhook(webhook_id: str) -> Webhook:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhook = await IntegrationWebhook.prisma().find_unique_or_raise(
|
||||
where={"id": webhook_id},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return Webhook.from_db(webhook)
|
||||
|
||||
|
||||
async def get_all_webhooks(credentials_id: str) -> list[Webhook]:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhooks = await IntegrationWebhook.prisma().find_many(
|
||||
where={"credentialsId": credentials_id},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return [Webhook.from_db(webhook) for webhook in webhooks]
|
||||
|
||||
|
||||
async def find_webhook(
|
||||
credentials_id: str, webhook_type: str, resource: str, events: list[str]
|
||||
) -> Webhook | None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhook = await IntegrationWebhook.prisma().find_first(
|
||||
where={
|
||||
"credentialsId": credentials_id,
|
||||
"webhookType": webhook_type,
|
||||
"resource": resource,
|
||||
"events": {"has_every": events},
|
||||
},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return Webhook.from_db(webhook) if webhook else None
|
||||
|
||||
|
||||
async def update_webhook_config(webhook_id: str, updated_config: dict) -> Webhook:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
_updated_webhook = await IntegrationWebhook.prisma().update(
|
||||
where={"id": webhook_id},
|
||||
data={"config": Json(updated_config)},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
if _updated_webhook is None:
|
||||
raise ValueError(f"Webhook #{webhook_id} not found")
|
||||
return Webhook.from_db(_updated_webhook)
|
||||
|
||||
|
||||
async def delete_webhook(webhook_id: str) -> None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
deleted = await IntegrationWebhook.prisma().delete(where={"id": webhook_id})
|
||||
if not deleted:
|
||||
raise ValueError(f"Webhook #{webhook_id} not found")
|
||||
|
||||
|
||||
# --------------------- WEBHOOK EVENTS --------------------- #
|
||||
|
||||
|
||||
class WebhookEvent(BaseDbModel):
|
||||
provider: str
|
||||
webhook_id: str
|
||||
event_type: str
|
||||
payload: dict
|
||||
|
||||
|
||||
class WebhookEventBus(AsyncRedisEventBus[WebhookEvent]):
|
||||
Model = WebhookEvent
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return "webhooks"
|
||||
|
||||
async def publish(self, event: WebhookEvent):
|
||||
await self.publish_event(event, f"{event.webhook_id}/{event.event_type}")
|
||||
|
||||
async def listen(
|
||||
self, webhook_id: str, event_type: Optional[str] = None
|
||||
) -> AsyncGenerator[WebhookEvent, None]:
|
||||
async for event in self.listen_events(f"{webhook_id}/{event_type or '*'}"):
|
||||
yield event
|
||||
|
||||
|
||||
event_bus = WebhookEventBus()
|
||||
|
||||
|
||||
async def publish_webhook_event(event: WebhookEvent):
|
||||
await event_bus.publish(event)
|
||||
|
||||
|
||||
async def listen_for_webhook_event(
|
||||
webhook_id: str, event_type: Optional[str] = None
|
||||
) -> WebhookEvent | None:
|
||||
async for event in event_bus.listen(webhook_id, event_type):
|
||||
return event # Only one event is expected
|
@ -113,6 +113,7 @@ def SchemaField(
|
||||
advanced: Optional[bool] = None,
|
||||
secret: bool = False,
|
||||
exclude: bool = False,
|
||||
hidden: Optional[bool] = None,
|
||||
**kwargs,
|
||||
) -> T:
|
||||
json_extra = {
|
||||
@ -121,6 +122,7 @@ def SchemaField(
|
||||
"placeholder": placeholder,
|
||||
"secret": secret,
|
||||
"advanced": advanced,
|
||||
"hidden": hidden,
|
||||
}.items()
|
||||
if v is not None
|
||||
}
|
||||
|
@ -9,11 +9,8 @@ from redis.asyncio.client import PubSub as AsyncPubSub
|
||||
from redis.client import PubSub
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.execution import ExecutionResult
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = Config()
|
||||
|
||||
|
||||
class DateTimeEncoder(json.JSONEncoder):
|
||||
@ -36,7 +33,7 @@ class BaseRedisEventBus(Generic[M], ABC):
|
||||
|
||||
def _serialize_message(self, item: M, channel_key: str) -> tuple[str, str]:
|
||||
message = json.dumps(item.model_dump(), cls=DateTimeEncoder)
|
||||
channel_name = f"{self.event_bus_name}-{channel_key}"
|
||||
channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
logger.info(f"[{channel_name}] Publishing an event to Redis {message}")
|
||||
return message, channel_name
|
||||
|
||||
@ -54,7 +51,7 @@ class BaseRedisEventBus(Generic[M], ABC):
|
||||
def _subscribe(
|
||||
self, connection: redis.Redis | redis.AsyncRedis, channel_key: str
|
||||
) -> tuple[PubSub | AsyncPubSub, str]:
|
||||
channel_name = f"{self.event_bus_name}-{channel_key}"
|
||||
channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
pubsub = connection.pubsub()
|
||||
return pubsub, channel_name
|
||||
|
||||
@ -108,37 +105,3 @@ class AsyncRedisEventBus(BaseRedisEventBus[M], ABC):
|
||||
async for message in pubsub.listen():
|
||||
if event := self._deserialize_message(message, channel_key):
|
||||
yield event
|
||||
|
||||
|
||||
class RedisExecutionEventBus(RedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
def publish(self, res: ExecutionResult):
|
||||
self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}")
|
||||
|
||||
def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> Generator[ExecutionResult, None, None]:
|
||||
for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
|
||||
class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
async def publish(self, res: ExecutionResult):
|
||||
await self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}")
|
||||
|
||||
async def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> AsyncGenerator[ExecutionResult, None]:
|
||||
async for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
@ -4,6 +4,7 @@ from typing import Any, Callable, Concatenate, Coroutine, ParamSpec, TypeVar, ca
|
||||
from backend.data.credit import get_user_credit_model
|
||||
from backend.data.execution import (
|
||||
ExecutionResult,
|
||||
RedisExecutionEventBus,
|
||||
create_graph_execution,
|
||||
get_execution_results,
|
||||
get_incomplete_executions,
|
||||
@ -15,18 +16,18 @@ from backend.data.execution import (
|
||||
upsert_execution_output,
|
||||
)
|
||||
from backend.data.graph import get_graph, get_node
|
||||
from backend.data.queue import RedisExecutionEventBus
|
||||
from backend.data.user import (
|
||||
get_user_integrations,
|
||||
get_user_metadata,
|
||||
update_user_integrations,
|
||||
update_user_metadata,
|
||||
)
|
||||
from backend.util.service import AppService, expose
|
||||
from backend.util.service import AppService, expose, register_pydantic_serializers
|
||||
from backend.util.settings import Config
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
config = Config()
|
||||
|
||||
|
||||
class DatabaseManager(AppService):
|
||||
@ -38,7 +39,7 @@ class DatabaseManager(AppService):
|
||||
|
||||
@classmethod
|
||||
def get_port(cls) -> int:
|
||||
return Config().database_api_port
|
||||
return config.database_api_port
|
||||
|
||||
@expose
|
||||
def send_execution_update(self, execution_result: ExecutionResult):
|
||||
@ -55,6 +56,9 @@ class DatabaseManager(AppService):
|
||||
res = self.run_and_wait(coroutine)
|
||||
return res
|
||||
|
||||
# Register serializers for annotations on bare function
|
||||
register_pydantic_serializers(f)
|
||||
|
||||
return wrapper
|
||||
|
||||
# Executions
|
||||
|
@ -18,6 +18,7 @@ if TYPE_CHECKING:
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
from backend.data import redis
|
||||
from backend.data.block import Block, BlockData, BlockInput, BlockType, get_block
|
||||
from backend.data.execution import (
|
||||
@ -29,7 +30,7 @@ from backend.data.execution import (
|
||||
merge_execution_input,
|
||||
parse_execution_output,
|
||||
)
|
||||
from backend.data.graph import Graph, Link, Node
|
||||
from backend.data.graph import GraphModel, Link, Node
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME, CredentialsMetaInput
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.util import json
|
||||
@ -135,7 +136,6 @@ def execute_node(
|
||||
logger.error(f"Block {node.block_id} not found.")
|
||||
return
|
||||
|
||||
# Sanity check: validate the execution input.
|
||||
log_metadata = LogMetadata(
|
||||
user_id=user_id,
|
||||
graph_eid=graph_exec_id,
|
||||
@ -144,11 +144,20 @@ def execute_node(
|
||||
node_id=node_id,
|
||||
block_name=node_block.name,
|
||||
)
|
||||
|
||||
# Sanity check: validate the execution input.
|
||||
input_data, error = validate_exec(node, data.data, resolve_input=False)
|
||||
if input_data is None:
|
||||
log_metadata.error(f"Skip execution, input validation error: {error}")
|
||||
db_client.upsert_execution_output(node_exec_id, "error", error)
|
||||
update_execution(ExecutionStatus.FAILED)
|
||||
return
|
||||
|
||||
# Re-shape the input data for agent block.
|
||||
# AgentExecutorBlock specially separate the node input_data & its input_default.
|
||||
if isinstance(node_block, AgentExecutorBlock):
|
||||
input_data = {**node.input_default, "data": input_data}
|
||||
|
||||
# Execute the node
|
||||
input_data_str = json.dumps(input_data)
|
||||
input_size = len(input_data_str)
|
||||
@ -177,7 +186,7 @@ def execute_node(
|
||||
input_data, **extra_exec_kwargs
|
||||
):
|
||||
output_size += len(json.dumps(output_data))
|
||||
log_metadata.info("Node produced output", output_name=output_data)
|
||||
log_metadata.info("Node produced output", **{output_name: output_data})
|
||||
db_client.upsert_execution_output(node_exec_id, output_name, output_data)
|
||||
|
||||
for execution in _enqueue_next_nodes(
|
||||
@ -244,7 +253,6 @@ def _enqueue_next_nodes(
|
||||
graph_id: str,
|
||||
log_metadata: LogMetadata,
|
||||
) -> list[NodeExecution]:
|
||||
|
||||
def add_enqueued_execution(
|
||||
node_exec_id: str, node_id: str, data: BlockInput
|
||||
) -> NodeExecution:
|
||||
@ -376,31 +384,46 @@ def validate_exec(
|
||||
if not node_block:
|
||||
return None, f"Block for {node.block_id} not found."
|
||||
|
||||
error_prefix = f"Input data missing for {node_block.name}:"
|
||||
if isinstance(node_block, AgentExecutorBlock):
|
||||
# Validate the execution metadata for the agent executor block.
|
||||
try:
|
||||
exec_data = AgentExecutorBlock.Input(**node.input_default)
|
||||
except Exception as e:
|
||||
return None, f"Input data doesn't match {node_block.name}: {str(e)}"
|
||||
|
||||
# Validation input
|
||||
input_schema = exec_data.input_schema
|
||||
required_fields = set(input_schema["required"])
|
||||
input_default = exec_data.data
|
||||
else:
|
||||
# Convert non-matching data types to the expected input schema.
|
||||
for name, data_type in node_block.input_schema.__annotations__.items():
|
||||
if (value := data.get(name)) and (type(value) is not data_type):
|
||||
data[name] = convert(value, data_type)
|
||||
|
||||
# Validation input
|
||||
input_schema = node_block.input_schema.jsonschema()
|
||||
required_fields = node_block.input_schema.get_required_fields()
|
||||
input_default = node.input_default
|
||||
|
||||
# Input data (without default values) should contain all required fields.
|
||||
error_prefix = f"Input data missing or mismatch for `{node_block.name}`:"
|
||||
input_fields_from_nodes = {link.sink_name for link in node.input_links}
|
||||
if not input_fields_from_nodes.issubset(data):
|
||||
return None, f"{error_prefix} {input_fields_from_nodes - set(data)}"
|
||||
|
||||
# Merge input data with default values and resolve dynamic dict/list/object pins.
|
||||
data = {**node.input_default, **data}
|
||||
data = {**input_default, **data}
|
||||
if resolve_input:
|
||||
data = merge_execution_input(data)
|
||||
|
||||
# Input data post-merge should contain all required fields from the schema.
|
||||
input_fields_from_schema = node_block.input_schema.get_required_fields()
|
||||
if not input_fields_from_schema.issubset(data):
|
||||
return None, f"{error_prefix} {input_fields_from_schema - set(data)}"
|
||||
|
||||
# Convert non-matching data types to the expected input schema.
|
||||
for name, data_type in node_block.input_schema.__annotations__.items():
|
||||
if (value := data.get(name)) and (type(value) is not data_type):
|
||||
data[name] = convert(value, data_type)
|
||||
if not required_fields.issubset(data):
|
||||
return None, f"{error_prefix} {required_fields - set(data)}"
|
||||
|
||||
# Last validation: Validate the input values against the schema.
|
||||
if error := node_block.input_schema.validate_data(data):
|
||||
error_message = f"Input data doesn't match {node_block.name}: {error}"
|
||||
if error := json.validate_with_jsonschema(schema=input_schema, data=data):
|
||||
error_message = f"{error_prefix} {error}"
|
||||
logger.error(error_message)
|
||||
return None, error_message
|
||||
|
||||
@ -689,7 +712,6 @@ class Executor:
|
||||
|
||||
|
||||
class ExecutionManager(AppService):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.use_redis = True
|
||||
@ -751,7 +773,7 @@ class ExecutionManager(AppService):
|
||||
user_id: str,
|
||||
graph_version: int | None = None,
|
||||
) -> GraphExecution:
|
||||
graph: Graph | None = self.db_client.get_graph(
|
||||
graph: GraphModel | None = self.db_client.get_graph(
|
||||
graph_id=graph_id, user_id=user_id, version=graph_version
|
||||
)
|
||||
if not graph:
|
||||
@ -775,6 +797,15 @@ class ExecutionManager(AppService):
|
||||
if name and name in data:
|
||||
input_data = {"value": data[name]}
|
||||
|
||||
# Extract webhook payload, and assign it to the input pin
|
||||
webhook_payload_key = f"webhook_{node.webhook_id}_payload"
|
||||
if (
|
||||
block.block_type == BlockType.WEBHOOK
|
||||
and node.webhook_id
|
||||
and webhook_payload_key in data
|
||||
):
|
||||
input_data = {"payload": data[webhook_payload_key]}
|
||||
|
||||
input_data, error = validate_exec(node, input_data)
|
||||
if input_data is None:
|
||||
raise ValueError(error)
|
||||
@ -852,7 +883,7 @@ class ExecutionManager(AppService):
|
||||
)
|
||||
self.db_client.send_execution_update(exec_update)
|
||||
|
||||
def _validate_node_input_credentials(self, graph: Graph, user_id: str):
|
||||
def _validate_node_input_credentials(self, graph: GraphModel, user_id: str):
|
||||
"""Checks all credentials for all nodes of the graph"""
|
||||
|
||||
for node in graph.nodes:
|
||||
|
@ -38,6 +38,7 @@ def _extract_schema_from_url(database_url) -> tuple[str, str]:
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = Config()
|
||||
|
||||
|
||||
def log(msg, **kwargs):
|
||||
@ -96,7 +97,7 @@ class ExecutionScheduler(AppService):
|
||||
|
||||
@classmethod
|
||||
def get_port(cls) -> int:
|
||||
return Config().execution_scheduler_port
|
||||
return config.execution_scheduler_port
|
||||
|
||||
@property
|
||||
@thread_cached
|
||||
|
@ -11,6 +11,7 @@ from redis.lock import Lock as RedisLock
|
||||
|
||||
from backend.data import redis
|
||||
from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -129,7 +130,6 @@ class IntegrationCredentialsManager:
|
||||
|
||||
def _acquire_lock(self, user_id: str, credentials_id: str, *args: str) -> RedisLock:
|
||||
key = (
|
||||
self.store.db_manager,
|
||||
f"user:{user_id}",
|
||||
f"credentials:{credentials_id}",
|
||||
*args,
|
||||
@ -157,12 +157,14 @@ def _get_provider_oauth_handler(provider_name: str) -> BaseOAuthHandler:
|
||||
client_id = getattr(settings.secrets, f"{provider_name}_client_id")
|
||||
client_secret = getattr(settings.secrets, f"{provider_name}_client_secret")
|
||||
if not (client_id and client_secret):
|
||||
raise Exception( # TODO: ConfigError
|
||||
raise MissingConfigError(
|
||||
f"Integration with provider '{provider_name}' is not configured",
|
||||
)
|
||||
|
||||
handler_class = HANDLERS_BY_NAME[provider_name]
|
||||
frontend_base_url = settings.config.frontend_base_url
|
||||
frontend_base_url = (
|
||||
settings.config.frontend_base_url or settings.config.platform_base_url
|
||||
)
|
||||
return handler_class(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
|
@ -0,0 +1,7 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ProviderName(str, Enum):
|
||||
GITHUB = "github"
|
||||
GOOGLE = "google"
|
||||
NOTION = "notion"
|
@ -0,0 +1,17 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .github import GithubWebhooksManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
# --8<-- [start:WEBHOOK_MANAGERS_BY_NAME]
|
||||
WEBHOOK_MANAGERS_BY_NAME: dict[str, type["BaseWebhooksManager"]] = {
|
||||
handler.PROVIDER_NAME: handler
|
||||
for handler in [
|
||||
GithubWebhooksManager,
|
||||
]
|
||||
}
|
||||
# --8<-- [end:WEBHOOK_MANAGERS_BY_NAME]
|
||||
|
||||
__all__ = ["WEBHOOK_MANAGERS_BY_NAME"]
|
163
autogpt_platform/backend/backend/integrations/webhooks/base.py
Normal file
163
autogpt_platform/backend/backend/integrations/webhooks/base.py
Normal file
@ -0,0 +1,163 @@
|
||||
import logging
|
||||
import secrets
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import ClassVar, Generic, TypeVar
|
||||
from uuid import uuid4
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import Credentials
|
||||
from fastapi import Request
|
||||
from strenum import StrEnum
|
||||
|
||||
from backend.data import integrations
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
app_config = Config()
|
||||
|
||||
WT = TypeVar("WT", bound=StrEnum)
|
||||
|
||||
|
||||
class BaseWebhooksManager(ABC, Generic[WT]):
|
||||
# --8<-- [start:BaseWebhooksManager1]
|
||||
PROVIDER_NAME: ClassVar[str]
|
||||
# --8<-- [end:BaseWebhooksManager1]
|
||||
|
||||
WebhookType: WT
|
||||
|
||||
async def get_suitable_webhook(
|
||||
self,
|
||||
user_id: str,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
) -> integrations.Webhook:
|
||||
if not app_config.platform_base_url:
|
||||
raise MissingConfigError(
|
||||
"PLATFORM_BASE_URL must be set to use Webhook functionality"
|
||||
)
|
||||
|
||||
if webhook := await integrations.find_webhook(
|
||||
credentials.id, webhook_type, resource, events
|
||||
):
|
||||
return webhook
|
||||
return await self._create_webhook(
|
||||
user_id, credentials, webhook_type, resource, events
|
||||
)
|
||||
|
||||
async def prune_webhook_if_dangling(
|
||||
self, webhook_id: str, credentials: Credentials
|
||||
) -> bool:
|
||||
webhook = await integrations.get_webhook(webhook_id)
|
||||
if webhook.attached_nodes is None:
|
||||
raise ValueError("Error retrieving webhook including attached nodes")
|
||||
if webhook.attached_nodes:
|
||||
# Don't prune webhook if in use
|
||||
return False
|
||||
|
||||
await self._deregister_webhook(webhook, credentials)
|
||||
await integrations.delete_webhook(webhook.id)
|
||||
return True
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager3]
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
async def validate_payload(
|
||||
cls, webhook: integrations.Webhook, request: Request
|
||||
) -> tuple[dict, str]:
|
||||
"""
|
||||
Validates an incoming webhook request and returns its payload and type.
|
||||
|
||||
Params:
|
||||
webhook: Object representing the configured webhook and its properties in our system.
|
||||
request: Incoming FastAPI `Request`
|
||||
|
||||
Returns:
|
||||
dict: The validated payload
|
||||
str: The event type associated with the payload
|
||||
"""
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager3]
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager5]
|
||||
async def trigger_ping(self, webhook: integrations.Webhook) -> None:
|
||||
"""
|
||||
Triggers a ping to the given webhook.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: if the provider doesn't support pinging
|
||||
"""
|
||||
# --8<-- [end:BaseWebhooksManager5]
|
||||
raise NotImplementedError(f"{self.__class__.__name__} doesn't support pinging")
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager2]
|
||||
@abstractmethod
|
||||
async def _register_webhook(
|
||||
self,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
ingress_url: str,
|
||||
secret: str,
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Registers a new webhook with the provider.
|
||||
|
||||
Params:
|
||||
credentials: The credentials with which to create the webhook
|
||||
webhook_type: The provider-specific webhook type to create
|
||||
resource: The resource to receive events for
|
||||
events: The events to subscribe to
|
||||
ingress_url: The ingress URL for webhook payloads
|
||||
secret: Secret used to verify webhook payloads
|
||||
|
||||
Returns:
|
||||
str: Webhook ID assigned by the provider
|
||||
config: Provider-specific configuration for the webhook
|
||||
"""
|
||||
...
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager2]
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager4]
|
||||
@abstractmethod
|
||||
async def _deregister_webhook(
|
||||
self, webhook: integrations.Webhook, credentials: Credentials
|
||||
) -> None: ...
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager4]
|
||||
|
||||
async def _create_webhook(
|
||||
self,
|
||||
user_id: str,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
) -> integrations.Webhook:
|
||||
id = str(uuid4())
|
||||
secret = secrets.token_hex(32)
|
||||
provider_name = self.PROVIDER_NAME
|
||||
ingress_url = (
|
||||
f"{app_config.platform_base_url}/api/integrations/{provider_name}"
|
||||
f"/webhooks/{id}/ingress"
|
||||
)
|
||||
provider_webhook_id, config = await self._register_webhook(
|
||||
credentials, webhook_type, resource, events, ingress_url, secret
|
||||
)
|
||||
return await integrations.create_webhook(
|
||||
integrations.Webhook(
|
||||
id=id,
|
||||
user_id=user_id,
|
||||
provider=provider_name,
|
||||
credentials_id=credentials.id,
|
||||
webhook_type=webhook_type,
|
||||
resource=resource,
|
||||
events=events,
|
||||
provider_webhook_id=provider_webhook_id,
|
||||
config=config,
|
||||
secret=secret,
|
||||
)
|
||||
)
|
175
autogpt_platform/backend/backend/integrations/webhooks/github.py
Normal file
175
autogpt_platform/backend/backend/integrations/webhooks/github.py
Normal file
@ -0,0 +1,175 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import Credentials
|
||||
from fastapi import HTTPException, Request
|
||||
from strenum import StrEnum
|
||||
|
||||
from backend.data import integrations
|
||||
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# --8<-- [start:GithubWebhooksManager]
|
||||
class GithubWebhookType(StrEnum):
|
||||
REPO = "repo"
|
||||
|
||||
|
||||
class GithubWebhooksManager(BaseWebhooksManager):
|
||||
PROVIDER_NAME = "github"
|
||||
|
||||
WebhookType = GithubWebhookType
|
||||
|
||||
GITHUB_API_URL = "https://api.github.com"
|
||||
GITHUB_API_DEFAULT_HEADERS = {"Accept": "application/vnd.github.v3+json"}
|
||||
|
||||
@classmethod
|
||||
async def validate_payload(
|
||||
cls, webhook: integrations.Webhook, request: Request
|
||||
) -> tuple[dict, str]:
|
||||
if not (event_type := request.headers.get("X-GitHub-Event")):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="X-GitHub-Event header is missing!"
|
||||
)
|
||||
|
||||
if not (signature_header := request.headers.get("X-Hub-Signature-256")):
|
||||
raise HTTPException(
|
||||
status_code=403, detail="X-Hub-Signature-256 header is missing!"
|
||||
)
|
||||
|
||||
payload_body = await request.body()
|
||||
hash_object = hmac.new(
|
||||
webhook.secret.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256
|
||||
)
|
||||
expected_signature = "sha256=" + hash_object.hexdigest()
|
||||
|
||||
if not hmac.compare_digest(expected_signature, signature_header):
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Request signatures didn't match!"
|
||||
)
|
||||
|
||||
payload = await request.json()
|
||||
if action := payload.get("action"):
|
||||
event_type += f".{action}"
|
||||
|
||||
return payload, event_type
|
||||
|
||||
async def trigger_ping(self, webhook: integrations.Webhook) -> None:
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": f"Bearer {webhook.config.get('access_token')}",
|
||||
}
|
||||
|
||||
repo, github_hook_id = webhook.resource, webhook.provider_webhook_id
|
||||
ping_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{github_hook_id}/pings"
|
||||
|
||||
response = requests.post(ping_url, headers=headers)
|
||||
|
||||
if response.status_code != 204:
|
||||
error_msg = extract_github_error_msg(response)
|
||||
raise ValueError(f"Failed to ping GitHub webhook: {error_msg}")
|
||||
|
||||
async def _register_webhook(
|
||||
self,
|
||||
credentials: Credentials,
|
||||
webhook_type: GithubWebhookType,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
ingress_url: str,
|
||||
secret: str,
|
||||
) -> tuple[str, dict]:
|
||||
if webhook_type == self.WebhookType.REPO and resource.count("/") > 1:
|
||||
raise ValueError("Invalid repo format: expected 'owner/repo'")
|
||||
|
||||
# Extract main event, e.g. `pull_request.opened` -> `pull_request`
|
||||
github_events = list({event.split(".")[0] for event in events})
|
||||
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": credentials.bearer(),
|
||||
}
|
||||
webhook_data = {
|
||||
"name": "web",
|
||||
"active": True,
|
||||
"events": github_events,
|
||||
"config": {
|
||||
"url": ingress_url,
|
||||
"content_type": "json",
|
||||
"insecure_ssl": "0",
|
||||
"secret": secret,
|
||||
},
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{self.GITHUB_API_URL}/repos/{resource}/hooks",
|
||||
headers=headers,
|
||||
json=webhook_data,
|
||||
)
|
||||
|
||||
if response.status_code != 201:
|
||||
error_msg = extract_github_error_msg(response)
|
||||
if "not found" in error_msg.lower():
|
||||
error_msg = (
|
||||
f"{error_msg} "
|
||||
"(Make sure the GitHub account or API key has 'repo' or "
|
||||
f"webhook create permissions to '{resource}')"
|
||||
)
|
||||
raise ValueError(f"Failed to create GitHub webhook: {error_msg}")
|
||||
|
||||
webhook_id = response.json()["id"]
|
||||
config = response.json()["config"]
|
||||
|
||||
return str(webhook_id), config
|
||||
|
||||
async def _deregister_webhook(
|
||||
self, webhook: integrations.Webhook, credentials: Credentials
|
||||
) -> None:
|
||||
webhook_type = self.WebhookType(webhook.webhook_type)
|
||||
if webhook.credentials_id != credentials.id:
|
||||
raise ValueError(
|
||||
f"Webhook #{webhook.id} does not belong to credentials {credentials.id}"
|
||||
)
|
||||
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": credentials.bearer(),
|
||||
}
|
||||
|
||||
if webhook_type == self.WebhookType.REPO:
|
||||
repo = webhook.resource
|
||||
delete_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{webhook.provider_webhook_id}" # noqa
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unsupported webhook type '{webhook.webhook_type}'"
|
||||
)
|
||||
|
||||
response = requests.delete(delete_url, headers=headers)
|
||||
|
||||
if response.status_code not in [204, 404]:
|
||||
# 204 means successful deletion, 404 means the webhook was already deleted
|
||||
error_msg = extract_github_error_msg(response)
|
||||
raise ValueError(f"Failed to delete GitHub webhook: {error_msg}")
|
||||
|
||||
# If we reach here, the webhook was successfully deleted or didn't exist
|
||||
|
||||
|
||||
# --8<-- [end:GithubWebhooksManager]
|
||||
|
||||
|
||||
def extract_github_error_msg(response: requests.Response) -> str:
|
||||
error_msgs = []
|
||||
resp = response.json()
|
||||
if resp.get("message"):
|
||||
error_msgs.append(resp["message"])
|
||||
if resp.get("errors"):
|
||||
error_msgs.extend(f"* {err.get('message', err)}" for err in resp["errors"])
|
||||
if resp.get("error"):
|
||||
if isinstance(resp["error"], dict):
|
||||
error_msgs.append(resp["error"].get("message", resp["error"]))
|
||||
else:
|
||||
error_msgs.append(resp["error"])
|
||||
return "\n".join(error_msgs)
|
@ -0,0 +1,198 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Callable, Optional, cast
|
||||
|
||||
from backend.data.block import get_block
|
||||
from backend.data.graph import set_node_webhook
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import Credentials
|
||||
|
||||
from backend.data.graph import GraphModel, NodeModel
|
||||
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def on_graph_activate(
|
||||
graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"]
|
||||
):
|
||||
"""
|
||||
Hook to be called when a graph is activated/created.
|
||||
|
||||
⚠️ Assuming node entities are not re-used between graph versions, ⚠️
|
||||
this hook calls `on_node_activate` on all nodes in this graph.
|
||||
|
||||
Params:
|
||||
get_credentials: `credentials_id` -> Credentials
|
||||
"""
|
||||
# Compare nodes in new_graph_version with previous_graph_version
|
||||
updated_nodes = []
|
||||
for new_node in graph.nodes:
|
||||
node_credentials = None
|
||||
if creds_meta := new_node.input_default.get(CREDENTIALS_FIELD_NAME):
|
||||
node_credentials = get_credentials(creds_meta["id"])
|
||||
if not node_credentials:
|
||||
raise ValueError(
|
||||
f"Node #{new_node.id} updated with non-existent "
|
||||
f"credentials #{node_credentials}"
|
||||
)
|
||||
|
||||
updated_node = await on_node_activate(
|
||||
graph.user_id, new_node, credentials=node_credentials
|
||||
)
|
||||
updated_nodes.append(updated_node)
|
||||
|
||||
graph.nodes = updated_nodes
|
||||
return graph
|
||||
|
||||
|
||||
async def on_graph_deactivate(
|
||||
graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"]
|
||||
):
|
||||
"""
|
||||
Hook to be called when a graph is deactivated/deleted.
|
||||
|
||||
⚠️ Assuming node entities are not re-used between graph versions, ⚠️
|
||||
this hook calls `on_node_deactivate` on all nodes in `graph`.
|
||||
|
||||
Params:
|
||||
get_credentials: `credentials_id` -> Credentials
|
||||
"""
|
||||
updated_nodes = []
|
||||
for node in graph.nodes:
|
||||
node_credentials = None
|
||||
if creds_meta := node.input_default.get(CREDENTIALS_FIELD_NAME):
|
||||
node_credentials = get_credentials(creds_meta["id"])
|
||||
if not node_credentials:
|
||||
logger.error(
|
||||
f"Node #{node.id} referenced non-existent "
|
||||
f"credentials #{creds_meta['id']}"
|
||||
)
|
||||
|
||||
updated_node = await on_node_deactivate(node, credentials=node_credentials)
|
||||
updated_nodes.append(updated_node)
|
||||
|
||||
graph.nodes = updated_nodes
|
||||
return graph
|
||||
|
||||
|
||||
async def on_node_activate(
|
||||
user_id: str,
|
||||
node: "NodeModel",
|
||||
*,
|
||||
credentials: Optional["Credentials"] = None,
|
||||
) -> "NodeModel":
|
||||
"""Hook to be called when the node is activated/created"""
|
||||
|
||||
block = get_block(node.block_id)
|
||||
if not block:
|
||||
raise ValueError(
|
||||
f"Node #{node.id} is instance of unknown block #{node.block_id}"
|
||||
)
|
||||
|
||||
if not block.webhook_config:
|
||||
return node
|
||||
|
||||
logger.debug(
|
||||
f"Activating webhook node #{node.id} with config {block.webhook_config}"
|
||||
)
|
||||
|
||||
webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]()
|
||||
|
||||
try:
|
||||
resource = block.webhook_config.resource_format.format(**node.input_default)
|
||||
except KeyError:
|
||||
resource = None
|
||||
logger.debug(
|
||||
f"Constructed resource string {resource} from input {node.input_default}"
|
||||
)
|
||||
|
||||
event_filter_input_name = block.webhook_config.event_filter_input
|
||||
has_everything_for_webhook = (
|
||||
resource is not None
|
||||
and CREDENTIALS_FIELD_NAME in node.input_default
|
||||
and event_filter_input_name in node.input_default
|
||||
and any(is_on for is_on in node.input_default[event_filter_input_name].values())
|
||||
)
|
||||
|
||||
if has_everything_for_webhook and resource:
|
||||
logger.debug(f"Node #{node} has everything for a webhook!")
|
||||
if not credentials:
|
||||
credentials_meta = node.input_default[CREDENTIALS_FIELD_NAME]
|
||||
raise ValueError(
|
||||
f"Cannot set up webhook for node #{node.id}: "
|
||||
f"credentials #{credentials_meta['id']} not available"
|
||||
)
|
||||
|
||||
# Shape of the event filter is enforced in Block.__init__
|
||||
event_filter = cast(dict, node.input_default[event_filter_input_name])
|
||||
events = [
|
||||
block.webhook_config.event_format.format(event=event)
|
||||
for event, enabled in event_filter.items()
|
||||
if enabled is True
|
||||
]
|
||||
logger.debug(f"Webhook events to subscribe to: {', '.join(events)}")
|
||||
|
||||
# Find/make and attach a suitable webhook to the node
|
||||
new_webhook = await webhooks_manager.get_suitable_webhook(
|
||||
user_id,
|
||||
credentials,
|
||||
block.webhook_config.webhook_type,
|
||||
resource,
|
||||
events,
|
||||
)
|
||||
logger.debug(f"Acquired webhook: {new_webhook}")
|
||||
return await set_node_webhook(node.id, new_webhook.id)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
async def on_node_deactivate(
|
||||
node: "NodeModel",
|
||||
*,
|
||||
credentials: Optional["Credentials"] = None,
|
||||
webhooks_manager: Optional["BaseWebhooksManager"] = None,
|
||||
) -> "NodeModel":
|
||||
"""Hook to be called when node is deactivated/deleted"""
|
||||
|
||||
logger.debug(f"Deactivating node #{node.id}")
|
||||
block = get_block(node.block_id)
|
||||
if not block:
|
||||
raise ValueError(
|
||||
f"Node #{node.id} is instance of unknown block #{node.block_id}"
|
||||
)
|
||||
|
||||
if not block.webhook_config:
|
||||
return node
|
||||
|
||||
webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]()
|
||||
|
||||
if node.webhook_id:
|
||||
logger.debug(f"Node #{node.id} has webhook_id {node.webhook_id}")
|
||||
if not node.webhook:
|
||||
logger.error(f"Node #{node.id} has webhook_id but no webhook object")
|
||||
raise ValueError("node.webhook not included")
|
||||
|
||||
# Detach webhook from node
|
||||
logger.debug(f"Detaching webhook from node #{node.id}")
|
||||
updated_node = await set_node_webhook(node.id, None)
|
||||
|
||||
# Prune and deregister the webhook if it is no longer used anywhere
|
||||
logger.debug("Pruning and deregistering webhook if dangling")
|
||||
webhook = node.webhook
|
||||
if credentials:
|
||||
logger.debug(f"Pruning webhook #{webhook.id} with credentials")
|
||||
await webhooks_manager.prune_webhook_if_dangling(webhook.id, credentials)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Cannot deregister webhook #{webhook.id}: credentials "
|
||||
f"#{webhook.credentials_id} not available "
|
||||
f"({webhook.provider} webhook ID: {webhook.provider_webhook_id})"
|
||||
)
|
||||
return updated_node
|
||||
|
||||
logger.debug(f"Node #{node.id} has no webhook_id, returning")
|
||||
return node
|
@ -1,5 +1,5 @@
|
||||
from backend.app import run_processes
|
||||
from backend.executor import ExecutionScheduler
|
||||
from backend.executor import DatabaseManager, ExecutionScheduler
|
||||
from backend.server.rest_api import AgentServer
|
||||
|
||||
|
||||
@ -8,6 +8,7 @@ def main():
|
||||
Run all the processes required for the AutoGPT-server REST API.
|
||||
"""
|
||||
run_processes(
|
||||
DatabaseManager(),
|
||||
ExecutionScheduler(),
|
||||
AgentServer(),
|
||||
)
|
||||
|
@ -10,8 +10,20 @@ from autogpt_libs.supabase_integration_credentials_store.types import (
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
|
||||
from backend.data.graph import set_node_webhook
|
||||
from backend.data.integrations import (
|
||||
WebhookEvent,
|
||||
get_all_webhooks,
|
||||
get_webhook,
|
||||
listen_for_webhook_event,
|
||||
publish_webhook_event,
|
||||
)
|
||||
from backend.executor.manager import ExecutionManager
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
|
||||
from backend.util.exceptions import NeedConfirmation
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from ..utils import get_user_id
|
||||
@ -53,6 +65,7 @@ def login(
|
||||
|
||||
class CredentialsMetaResponse(BaseModel):
|
||||
id: str
|
||||
provider: str
|
||||
type: CredentialsType
|
||||
title: str | None
|
||||
scopes: list[str] | None
|
||||
@ -107,6 +120,7 @@ def callback(
|
||||
)
|
||||
return CredentialsMetaResponse(
|
||||
id=credentials.id,
|
||||
provider=credentials.provider,
|
||||
type=credentials.type,
|
||||
title=credentials.title,
|
||||
scopes=credentials.scopes,
|
||||
@ -114,8 +128,26 @@ def callback(
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{provider}/credentials")
|
||||
@router.get("/credentials")
|
||||
def list_credentials(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[CredentialsMetaResponse]:
|
||||
credentials = creds_manager.store.get_all_creds(user_id)
|
||||
return [
|
||||
CredentialsMetaResponse(
|
||||
id=cred.id,
|
||||
provider=cred.provider,
|
||||
type=cred.type,
|
||||
title=cred.title,
|
||||
scopes=cred.scopes if isinstance(cred, OAuth2Credentials) else None,
|
||||
username=cred.username if isinstance(cred, OAuth2Credentials) else None,
|
||||
)
|
||||
for cred in credentials
|
||||
]
|
||||
|
||||
|
||||
@router.get("/{provider}/credentials")
|
||||
def list_credentials_by_provider(
|
||||
provider: Annotated[str, Path(title="The provider to list credentials for")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[CredentialsMetaResponse]:
|
||||
@ -123,6 +155,7 @@ def list_credentials(
|
||||
return [
|
||||
CredentialsMetaResponse(
|
||||
id=cred.id,
|
||||
provider=cred.provider,
|
||||
type=cred.type,
|
||||
title=cred.title,
|
||||
scopes=cred.scopes if isinstance(cred, OAuth2Credentials) else None,
|
||||
@ -183,13 +216,22 @@ class CredentialsDeletionResponse(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class CredentialsDeletionNeedsConfirmationResponse(BaseModel):
|
||||
deleted: Literal[False] = False
|
||||
need_confirmation: Literal[True] = True
|
||||
message: str
|
||||
|
||||
|
||||
@router.delete("/{provider}/credentials/{cred_id}")
|
||||
def delete_credentials(
|
||||
async def delete_credentials(
|
||||
request: Request,
|
||||
provider: Annotated[str, Path(title="The provider to delete credentials for")],
|
||||
cred_id: Annotated[str, Path(title="The ID of the credentials to delete")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> CredentialsDeletionResponse:
|
||||
force: Annotated[
|
||||
bool, Query(title="Whether to proceed if any linked webhooks are still in use")
|
||||
] = False,
|
||||
) -> CredentialsDeletionResponse | CredentialsDeletionNeedsConfirmationResponse:
|
||||
creds = creds_manager.store.get_creds_by_id(user_id, cred_id)
|
||||
if not creds:
|
||||
raise HTTPException(status_code=404, detail="Credentials not found")
|
||||
@ -198,6 +240,11 @@ def delete_credentials(
|
||||
status_code=404, detail="Credentials do not match the specified provider"
|
||||
)
|
||||
|
||||
try:
|
||||
await remove_all_webhooks_for_credentials(creds, force)
|
||||
except NeedConfirmation as e:
|
||||
return CredentialsDeletionNeedsConfirmationResponse(message=str(e))
|
||||
|
||||
creds_manager.delete(user_id, cred_id)
|
||||
|
||||
tokens_revoked = None
|
||||
@ -208,7 +255,98 @@ def delete_credentials(
|
||||
return CredentialsDeletionResponse(revoked=tokens_revoked)
|
||||
|
||||
|
||||
# -------- UTILITIES --------- #
|
||||
# ------------------------- WEBHOOK STUFF -------------------------- #
|
||||
|
||||
|
||||
# ⚠️ Note
|
||||
# No user auth check because this endpoint is for webhook ingress and relies on
|
||||
# validation by the provider-specific `WebhooksManager`.
|
||||
@router.post("/{provider}/webhooks/{webhook_id}/ingress")
|
||||
async def webhook_ingress_generic(
|
||||
request: Request,
|
||||
provider: Annotated[str, Path(title="Provider where the webhook was registered")],
|
||||
webhook_id: Annotated[str, Path(title="Our ID for the webhook")],
|
||||
):
|
||||
logger.debug(f"Received {provider} webhook ingress for ID {webhook_id}")
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]()
|
||||
webhook = await get_webhook(webhook_id)
|
||||
logger.debug(f"Webhook #{webhook_id}: {webhook}")
|
||||
payload, event_type = await webhook_manager.validate_payload(webhook, request)
|
||||
logger.debug(f"Validated {provider} {event_type} event with payload {payload}")
|
||||
|
||||
webhook_event = WebhookEvent(
|
||||
provider=provider,
|
||||
webhook_id=webhook_id,
|
||||
event_type=event_type,
|
||||
payload=payload,
|
||||
)
|
||||
await publish_webhook_event(webhook_event)
|
||||
logger.debug(f"Webhook event published: {webhook_event}")
|
||||
|
||||
if not webhook.attached_nodes:
|
||||
return
|
||||
|
||||
executor = get_service_client(ExecutionManager)
|
||||
for node in webhook.attached_nodes:
|
||||
logger.debug(f"Webhook-attached node: {node}")
|
||||
if not node.is_triggered_by_event_type(event_type):
|
||||
logger.debug(f"Node #{node.id} doesn't trigger on event {event_type}")
|
||||
continue
|
||||
logger.debug(f"Executing graph #{node.graph_id} node #{node.id}")
|
||||
executor.add_execution(
|
||||
node.graph_id,
|
||||
data={f"webhook_{webhook_id}_payload": payload},
|
||||
user_id=webhook.user_id,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{provider}/webhooks/{webhook_id}/ping")
|
||||
async def webhook_ping(
|
||||
provider: Annotated[str, Path(title="Provider where the webhook was registered")],
|
||||
webhook_id: Annotated[str, Path(title="Our ID for the webhook")],
|
||||
user_id: Annotated[str, Depends(get_user_id)], # require auth
|
||||
):
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]()
|
||||
webhook = await get_webhook(webhook_id)
|
||||
|
||||
await webhook_manager.trigger_ping(webhook)
|
||||
if not await listen_for_webhook_event(webhook_id, event_type="ping"):
|
||||
raise HTTPException(status_code=500, detail="Webhook ping event not received")
|
||||
|
||||
|
||||
# --------------------------- UTILITIES ---------------------------- #
|
||||
|
||||
|
||||
async def remove_all_webhooks_for_credentials(
|
||||
credentials: Credentials, force: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Remove and deregister all webhooks that were registered using the given credentials.
|
||||
|
||||
Params:
|
||||
credentials: The credentials for which to remove the associated webhooks.
|
||||
force: Whether to proceed if any of the webhooks are still in use.
|
||||
|
||||
Raises:
|
||||
NeedConfirmation: If any of the webhooks are still in use and `force` is `False`
|
||||
"""
|
||||
webhooks = await get_all_webhooks(credentials.id)
|
||||
if any(w.attached_nodes for w in webhooks) and not force:
|
||||
raise NeedConfirmation(
|
||||
"Some webhooks linked to these credentials are still in use by an agent"
|
||||
)
|
||||
for webhook in webhooks:
|
||||
# Unlink all nodes
|
||||
for node in webhook.attached_nodes or []:
|
||||
await set_node_webhook(node.id, None)
|
||||
|
||||
# Prune the webhook
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[credentials.provider]()
|
||||
success = await webhook_manager.prune_webhook_if_dangling(
|
||||
webhook.id, credentials
|
||||
)
|
||||
if not success:
|
||||
logger.warning(f"Webhook #{webhook.id} failed to prune")
|
||||
|
||||
|
||||
def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHandler:
|
||||
@ -226,7 +364,11 @@ def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHa
|
||||
)
|
||||
|
||||
handler_class = HANDLERS_BY_NAME[provider_name]
|
||||
frontend_base_url = settings.config.frontend_base_url or str(req.base_url)
|
||||
frontend_base_url = (
|
||||
settings.config.frontend_base_url
|
||||
or settings.config.platform_base_url
|
||||
or str(req.base_url)
|
||||
)
|
||||
return handler_class(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
|
@ -6,6 +6,10 @@ import fastapi
|
||||
import fastapi.responses
|
||||
import starlette.middleware.cors
|
||||
import uvicorn
|
||||
from autogpt_libs.feature_flag.client import (
|
||||
initialize_launchdarkly,
|
||||
shutdown_launchdarkly,
|
||||
)
|
||||
|
||||
import backend.data.block
|
||||
import backend.data.db
|
||||
@ -18,6 +22,8 @@ import backend.util.settings
|
||||
settings = backend.util.settings.Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logging.getLogger("autogpt_libs").setLevel(logging.INFO)
|
||||
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def lifespan_context(app: fastapi.FastAPI):
|
||||
@ -25,25 +31,12 @@ async def lifespan_context(app: fastapi.FastAPI):
|
||||
await backend.data.block.initialize_blocks()
|
||||
await backend.data.user.migrate_and_encrypt_user_integrations()
|
||||
await backend.data.graph.fix_llm_provider_credentials()
|
||||
initialize_launchdarkly()
|
||||
yield
|
||||
shutdown_launchdarkly()
|
||||
await backend.data.db.disconnect()
|
||||
|
||||
|
||||
def handle_internal_http_error(status_code: int = 500, log_error: bool = True):
|
||||
def handler(request: fastapi.Request, exc: Exception):
|
||||
if log_error:
|
||||
logger.exception(f"{request.method} {request.url.path} failed: {exc}")
|
||||
return fastapi.responses.JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"detail": str(exc),
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
docs_url = (
|
||||
"/docs"
|
||||
if settings.config.app_env == backend.util.settings.AppEnvironment.LOCAL
|
||||
@ -62,8 +55,24 @@ app = fastapi.FastAPI(
|
||||
docs_url=docs_url,
|
||||
)
|
||||
|
||||
|
||||
def handle_internal_http_error(status_code: int = 500, log_error: bool = True):
|
||||
def handler(request: fastapi.Request, exc: Exception):
|
||||
if log_error:
|
||||
logger.exception(f"{request.method} {request.url.path} failed: {exc}")
|
||||
return fastapi.responses.JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"detail": str(exc),
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
app.add_exception_handler(ValueError, handle_internal_http_error(400))
|
||||
app.add_exception_handler(500, handle_internal_http_error(500))
|
||||
app.add_exception_handler(Exception, handle_internal_http_error(500))
|
||||
app.include_router(backend.server.routers.v1.v1_router, tags=["v1"])
|
||||
|
||||
|
||||
@ -91,9 +100,7 @@ class AgentServer(backend.util.service.AppProcess):
|
||||
async def test_execute_graph(
|
||||
graph_id: str, node_input: dict[typing.Any, typing.Any], user_id: str
|
||||
):
|
||||
return await backend.server.routers.v1.execute_graph(
|
||||
graph_id, node_input, user_id
|
||||
)
|
||||
return backend.server.routers.v1.execute_graph(graph_id, node_input, user_id)
|
||||
|
||||
@staticmethod
|
||||
async def test_create_graph(
|
||||
|
@ -1,10 +1,11 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import Annotated, Any, List
|
||||
from typing import TYPE_CHECKING, Annotated, Any, Sequence
|
||||
|
||||
import pydantic
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
from autogpt_libs.feature_flag.client import feature_flag
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from typing_extensions import Optional, TypedDict
|
||||
@ -30,6 +31,11 @@ from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import get_block_costs, get_user_credit_model
|
||||
from backend.data.user import get_or_create_user
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler, scheduler
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.webhooks.graph_lifecycle_hooks import (
|
||||
on_graph_activate,
|
||||
on_graph_deactivate,
|
||||
)
|
||||
from backend.server.model import (
|
||||
CreateAPIKeyRequest,
|
||||
CreateAPIKeyResponse,
|
||||
@ -41,6 +47,9 @@ from backend.server.utils import get_user_id
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import Credentials
|
||||
|
||||
|
||||
@thread_cached
|
||||
def execution_manager_client() -> ExecutionManager:
|
||||
@ -54,6 +63,7 @@ def execution_scheduler_client() -> ExecutionScheduler:
|
||||
|
||||
settings = Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
integration_creds_manager = IntegrationCredentialsManager()
|
||||
|
||||
|
||||
_user_credit_model = get_user_credit_model()
|
||||
@ -62,14 +72,10 @@ _user_credit_model = get_user_credit_model()
|
||||
v1_router = APIRouter(prefix="/api")
|
||||
|
||||
|
||||
v1_router.dependencies.append(Depends(auth_middleware))
|
||||
|
||||
|
||||
v1_router.include_router(
|
||||
backend.server.integrations.router.router,
|
||||
prefix="/integrations",
|
||||
tags=["integrations"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
v1_router.include_router(
|
||||
@ -97,13 +103,17 @@ async def get_or_create_user_route(user_data: dict = Depends(auth_middleware)):
|
||||
|
||||
|
||||
@v1_router.get(path="/blocks", tags=["blocks"], dependencies=[Depends(auth_middleware)])
|
||||
def get_graph_blocks() -> list[dict[Any, Any]]:
|
||||
def get_graph_blocks() -> Sequence[dict[Any, Any]]:
|
||||
blocks = [block() for block in backend.data.block.get_blocks().values()]
|
||||
costs = get_block_costs()
|
||||
return [{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks]
|
||||
|
||||
|
||||
@v1_router.post(path="/blocks/{block_id}/execute", tags=["blocks"])
|
||||
@v1_router.post(
|
||||
path="/blocks/{block_id}/execute",
|
||||
tags=["blocks"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
def execute_graph_block(block_id: str, data: BlockInput) -> CompletedBlockOutput:
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
@ -141,7 +151,7 @@ class DeleteGraphResponse(TypedDict):
|
||||
async def get_graphs(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
with_runs: bool = False,
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(
|
||||
include_executions=with_runs, filter_by="active", user_id=user_id
|
||||
)
|
||||
@ -181,13 +191,61 @@ async def get_graph(
|
||||
)
|
||||
async def get_graph_all_versions(
|
||||
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
|
||||
if not graphs:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
return graphs
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = graph_db.make_graph_model(create_graph.graph, user_id)
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
graph = await graph_db.create_graph(graph, user_id=user_id)
|
||||
graph = await on_graph_activate(
|
||||
graph,
|
||||
get_credentials=lambda id: integration_creds_manager.get(user_id, id),
|
||||
)
|
||||
return graph
|
||||
|
||||
|
||||
@v1_router.delete(
|
||||
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
@ -224,33 +282,41 @@ async def update_graph(
|
||||
latest_version_graph = next(
|
||||
v for v in existing_versions if v.version == latest_version_number
|
||||
)
|
||||
current_active_version = next((v for v in existing_versions if v.is_active), None)
|
||||
if latest_version_graph.is_template != graph.is_template:
|
||||
raise HTTPException(
|
||||
400, detail="Changing is_template on an existing graph is forbidden"
|
||||
)
|
||||
graph.is_active = not graph.is_template
|
||||
graph = graph_db.make_graph_model(graph, user_id)
|
||||
graph.reassign_ids(user_id=user_id)
|
||||
|
||||
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
if new_graph_version.is_active:
|
||||
|
||||
def get_credentials(credentials_id: str) -> "Credentials | None":
|
||||
return integration_creds_manager.get(user_id, credentials_id)
|
||||
|
||||
# Handle activation of the new graph first to ensure continuity
|
||||
new_graph_version = await on_graph_activate(
|
||||
new_graph_version,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id, version=new_graph_version.version, user_id=user_id
|
||||
)
|
||||
if current_active_version:
|
||||
# Handle deactivation of the previously active version
|
||||
await on_graph_deactivate(
|
||||
current_active_version,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
|
||||
return new_graph_version
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.put(
|
||||
path="/graphs/{graph_id}/versions/active",
|
||||
tags=["graphs"],
|
||||
@ -262,13 +328,34 @@ async def set_graph_active_version(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
):
|
||||
new_active_version = request_body.active_graph_version
|
||||
if not await graph_db.get_graph(graph_id, new_active_version, user_id=user_id):
|
||||
new_active_graph = await graph_db.get_graph(
|
||||
graph_id, new_active_version, user_id=user_id
|
||||
)
|
||||
if not new_active_graph:
|
||||
raise HTTPException(404, f"Graph #{graph_id} v{new_active_version} not found")
|
||||
|
||||
current_active_graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
|
||||
def get_credentials(credentials_id: str) -> "Credentials | None":
|
||||
return integration_creds_manager.get(user_id, credentials_id)
|
||||
|
||||
# Handle activation of the new graph first to ensure continuity
|
||||
await on_graph_activate(
|
||||
new_active_graph,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id,
|
||||
version=request_body.active_graph_version,
|
||||
version=new_active_version,
|
||||
user_id=user_id,
|
||||
)
|
||||
if current_active_graph and current_active_graph.version != new_active_version:
|
||||
# Handle deactivation of the previously active version
|
||||
await on_graph_deactivate(
|
||||
current_active_graph,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
@ -276,7 +363,7 @@ async def set_graph_active_version(
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def execute_graph(
|
||||
def execute_graph(
|
||||
graph_id: str,
|
||||
node_input: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
@ -298,7 +385,7 @@ async def execute_graph(
|
||||
)
|
||||
async def stop_graph_run(
|
||||
graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
) -> Sequence[execution_db.ExecutionResult]:
|
||||
if not await execution_db.get_graph_execution(graph_exec_id, user_id):
|
||||
raise HTTPException(404, detail=f"Agent execution #{graph_exec_id} not found")
|
||||
|
||||
@ -319,7 +406,7 @@ async def list_graph_runs(
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_version: int | None = None,
|
||||
) -> list[str]:
|
||||
) -> Sequence[str]:
|
||||
graph = await graph_db.get_graph(graph_id, graph_version, user_id=user_id)
|
||||
if not graph:
|
||||
rev = "" if graph_version is None else f" v{graph_version}"
|
||||
@ -339,7 +426,7 @@ async def get_graph_run_node_execution_results(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
) -> Sequence[execution_db.ExecutionResult]:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
@ -378,7 +465,7 @@ async def get_graph_run_status(
|
||||
)
|
||||
async def get_templates(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(filter_by="template", user_id=user_id)
|
||||
|
||||
|
||||
@ -394,40 +481,6 @@ async def get_template(graph_id: str, version: int | None = None) -> graph_db.Gr
|
||||
return graph
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = create_graph.graph
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
return await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/templates",
|
||||
tags=["templates", "graphs"],
|
||||
@ -481,7 +534,7 @@ async def create_schedule(
|
||||
tags=["schedules"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def delete_schedule(
|
||||
def delete_schedule(
|
||||
schedule_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[Any, Any]:
|
||||
@ -494,7 +547,7 @@ async def delete_schedule(
|
||||
tags=["schedules"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_execution_schedules(
|
||||
def get_execution_schedules(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_id: str | None = None,
|
||||
) -> list[scheduler.JobInfo]:
|
||||
@ -511,10 +564,11 @@ async def get_execution_schedules(
|
||||
|
||||
@v1_router.post(
|
||||
"/api-keys",
|
||||
response_model=CreateAPIKeyResponse,
|
||||
response_model=list[CreateAPIKeyResponse] | dict[str, str],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def create_api_key(
|
||||
request: CreateAPIKeyRequest, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> CreateAPIKeyResponse:
|
||||
@ -534,13 +588,14 @@ async def create_api_key(
|
||||
|
||||
@v1_router.get(
|
||||
"/api-keys",
|
||||
response_model=List[APIKeyWithoutHash],
|
||||
response_model=list[APIKeyWithoutHash] | dict[str, str],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def get_api_keys(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> List[APIKeyWithoutHash]:
|
||||
) -> list[APIKeyWithoutHash]:
|
||||
"""List all API keys for the user"""
|
||||
try:
|
||||
return await list_user_api_keys(user_id)
|
||||
@ -551,10 +606,11 @@ async def get_api_keys(
|
||||
|
||||
@v1_router.get(
|
||||
"/api-keys/{key_id}",
|
||||
response_model=APIKeyWithoutHash,
|
||||
response_model=list[APIKeyWithoutHash] | dict[str, str],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def get_api_key(
|
||||
key_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> APIKeyWithoutHash:
|
||||
@ -571,10 +627,11 @@ async def get_api_key(
|
||||
|
||||
@v1_router.delete(
|
||||
"/api-keys/{key_id}",
|
||||
response_model=APIKeyWithoutHash,
|
||||
response_model=list[APIKeyWithoutHash] | dict[str, str],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def delete_api_key(
|
||||
key_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
@ -592,10 +649,11 @@ async def delete_api_key(
|
||||
|
||||
@v1_router.post(
|
||||
"/api-keys/{key_id}/suspend",
|
||||
response_model=APIKeyWithoutHash,
|
||||
response_model=list[APIKeyWithoutHash] | dict[str, str],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def suspend_key(
|
||||
key_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
@ -613,10 +671,11 @@ async def suspend_key(
|
||||
|
||||
@v1_router.put(
|
||||
"/api-keys/{key_id}/permissions",
|
||||
response_model=APIKeyWithoutHash,
|
||||
response_model=list[APIKeyWithoutHash] | dict[str, str],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def update_permissions(
|
||||
key_id: str,
|
||||
request: UpdatePermissionsRequest,
|
||||
|
@ -8,7 +8,7 @@ from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.queue import AsyncRedisExecutionEventBus
|
||||
from backend.data.execution import AsyncRedisExecutionEventBus
|
||||
from backend.data.user import DEFAULT_USER_ID
|
||||
from backend.server.conn_manager import ConnectionManager
|
||||
from backend.server.model import ExecutionSubscription, Methods, WsMessage
|
||||
|
6
autogpt_platform/backend/backend/util/exceptions.py
Normal file
6
autogpt_platform/backend/backend/util/exceptions.py
Normal file
@ -0,0 +1,6 @@
|
||||
class MissingConfigError(Exception):
|
||||
"""The attempted operation requires configuration which is not available"""
|
||||
|
||||
|
||||
class NeedConfirmation(Exception):
|
||||
"""The user must explicitly confirm that they want to proceed"""
|
@ -1,6 +1,7 @@
|
||||
import json
|
||||
from typing import Any, Type, TypeVar, overload
|
||||
|
||||
import jsonschema
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
from .type import type_match
|
||||
@ -30,3 +31,17 @@ def loads(data: str, *args, target_type: Type[T] | None = None, **kwargs) -> Any
|
||||
if target_type:
|
||||
return type_match(parsed, target_type)
|
||||
return parsed
|
||||
|
||||
|
||||
def validate_with_jsonschema(
|
||||
schema: dict[str, Any], data: dict[str, Any]
|
||||
) -> str | None:
|
||||
"""
|
||||
Validate the data against the schema.
|
||||
Returns the validation error message if the data does not match the schema.
|
||||
"""
|
||||
try:
|
||||
jsonschema.validate(data, schema)
|
||||
return None
|
||||
except jsonschema.ValidationError as e:
|
||||
return str(e)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
from functools import wraps
|
||||
from uuid import uuid4
|
||||
|
||||
@ -16,7 +17,7 @@ def _log_prefix(resource_name: str, conn_id: str):
|
||||
This needs to be called on the fly to get the current process ID & service name,
|
||||
not the parent process ID & service name.
|
||||
"""
|
||||
return f"[PID-{os.getpid()}|{get_service_name()}|{resource_name}-{conn_id}]"
|
||||
return f"[PID-{os.getpid()}|THREAD-{threading.get_native_id()}|{get_service_name()}|{resource_name}-{conn_id}]"
|
||||
|
||||
|
||||
def conn_retry(resource_name: str, action_name: str, max_retry: int = 5):
|
||||
@ -25,7 +26,7 @@ def conn_retry(resource_name: str, action_name: str, max_retry: int = 5):
|
||||
def on_retry(retry_state):
|
||||
prefix = _log_prefix(resource_name, conn_id)
|
||||
exception = retry_state.outcome.exception()
|
||||
logger.info(f"{prefix} {action_name} failed: {exception}. Retrying now...")
|
||||
logger.error(f"{prefix} {action_name} failed: {exception}. Retrying now...")
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
|
@ -11,6 +11,7 @@ from types import NoneType, UnionType
|
||||
from typing import (
|
||||
Annotated,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Coroutine,
|
||||
Dict,
|
||||
@ -64,7 +65,13 @@ def expose(func: C) -> C:
|
||||
logger.exception(msg)
|
||||
raise
|
||||
|
||||
# Register custom serializers and deserializers for annotated Pydantic models
|
||||
register_pydantic_serializers(func)
|
||||
|
||||
return pyro.expose(wrapper) # type: ignore
|
||||
|
||||
|
||||
def register_pydantic_serializers(func: Callable):
|
||||
"""Register custom serializers and deserializers for annotated Pydantic models"""
|
||||
for name, annotation in func.__annotations__.items():
|
||||
try:
|
||||
pydantic_types = _pydantic_models_from_type_annotation(annotation)
|
||||
@ -81,8 +88,6 @@ def expose(func: C) -> C:
|
||||
model.__qualname__, _make_custom_deserializer(model)
|
||||
)
|
||||
|
||||
return pyro.expose(wrapper) # type: ignore
|
||||
|
||||
|
||||
def _make_custom_serializer(model: Type[BaseModel]):
|
||||
def custom_class_to_dict(obj):
|
||||
@ -120,7 +125,7 @@ class AppService(AppProcess, ABC):
|
||||
|
||||
@classmethod
|
||||
def get_host(cls) -> str:
|
||||
return os.environ.get(f"{cls.service_name.upper()}_HOST", Config().pyro_host)
|
||||
return os.environ.get(f"{cls.service_name.upper()}_HOST", config.pyro_host)
|
||||
|
||||
def run_service(self) -> None:
|
||||
while True:
|
||||
@ -170,14 +175,13 @@ class AppService(AppProcess, ABC):
|
||||
|
||||
@conn_retry("Pyro", "Starting Pyro Service")
|
||||
def __start_pyro(self):
|
||||
conf = Config()
|
||||
maximum_connection_thread_count = max(
|
||||
Pyro5.config.THREADPOOL_SIZE,
|
||||
conf.num_node_workers * conf.num_graph_workers,
|
||||
config.num_node_workers * config.num_graph_workers,
|
||||
)
|
||||
|
||||
Pyro5.config.THREADPOOL_SIZE = maximum_connection_thread_count # type: ignore
|
||||
daemon = Pyro5.api.Daemon(host=conf.pyro_host, port=self.get_port())
|
||||
daemon = Pyro5.api.Daemon(host=config.pyro_host, port=self.get_port())
|
||||
self.uri = daemon.register(self, objectId=self.service_name)
|
||||
logger.info(f"[{self.service_name}] Connected to Pyro; URI = {self.uri}")
|
||||
daemon.requestLoop()
|
||||
@ -209,7 +213,7 @@ def get_service_client(service_type: Type[AS]) -> AS:
|
||||
class DynamicClient(PyroClient):
|
||||
@conn_retry("Pyro", f"Connecting to [{service_name}]")
|
||||
def __init__(self):
|
||||
host = os.environ.get(f"{service_name.upper()}_HOST", "localhost")
|
||||
host = os.environ.get(f"{service_name.upper()}_HOST", pyro_host)
|
||||
uri = f"PYRO:{service_type.service_name}@{host}:{service_type.get_port()}"
|
||||
logger.debug(f"Connecting to service [{service_name}]. URI = {uri}")
|
||||
self.proxy = Pyro5.api.Proxy(uri)
|
||||
@ -253,6 +257,10 @@ def _pydantic_models_from_type_annotation(annotation) -> Iterator[type[BaseModel
|
||||
key_type, value_type = args
|
||||
yield from _pydantic_models_from_type_annotation(key_type)
|
||||
yield from _pydantic_models_from_type_annotation(value_type)
|
||||
elif origin in (Awaitable, Coroutine):
|
||||
# For coroutines and awaitables, check the return type
|
||||
return_type = args[-1]
|
||||
yield from _pydantic_models_from_type_annotation(return_type)
|
||||
else:
|
||||
annotype = annotation if origin is None else origin
|
||||
|
||||
|
@ -3,7 +3,7 @@ import os
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, Generic, List, Set, Tuple, Type, TypeVar
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr, field_validator
|
||||
from pydantic import BaseModel, Field, PrivateAttr, ValidationInfo, field_validator
|
||||
from pydantic_settings import (
|
||||
BaseSettings,
|
||||
JsonConfigSettingsSource,
|
||||
@ -136,12 +136,32 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="The port for agent server API to run on",
|
||||
)
|
||||
|
||||
platform_base_url: str = Field(
|
||||
default="",
|
||||
description="Must be set so the application knows where it's hosted at. "
|
||||
"This is necessary to make sure webhooks find their way.",
|
||||
)
|
||||
|
||||
frontend_base_url: str = Field(
|
||||
default="http://localhost:3000",
|
||||
default="",
|
||||
description="Can be used to explicitly set the base URL for the frontend. "
|
||||
"This value is then used to generate redirect URLs for OAuth flows.",
|
||||
)
|
||||
|
||||
@field_validator("platform_base_url", "frontend_base_url")
|
||||
@classmethod
|
||||
def validate_platform_base_url(cls, v: str, info: ValidationInfo) -> str:
|
||||
if not v:
|
||||
return v
|
||||
if not v.startswith(("http://", "https://")):
|
||||
raise ValueError(
|
||||
f"{info.field_name} must be a full URL "
|
||||
"including a http:// or https:// schema"
|
||||
)
|
||||
if v.endswith("/"):
|
||||
return v.rstrip("/") # Remove trailing slash
|
||||
return v
|
||||
|
||||
app_env: AppEnvironment = Field(
|
||||
default=AppEnvironment.LOCAL,
|
||||
description="The name of the app environment: local or dev or prod",
|
||||
|
@ -1,9 +1,10 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import Sequence
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block import Block, initialize_blocks
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.execution import ExecutionResult, ExecutionStatus
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.data.user import create_default_user
|
||||
from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler
|
||||
@ -57,7 +58,7 @@ async def wait_execution(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
timeout: int = 20,
|
||||
) -> list:
|
||||
) -> Sequence[ExecutionResult]:
|
||||
async def is_execution_completed():
|
||||
status = await AgentServer().test_get_graph_run_status(
|
||||
graph_id, graph_exec_id, user_id
|
||||
|
@ -0,0 +1,26 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "AgentNode" ADD COLUMN "webhookId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "IntegrationWebhook" (
|
||||
"id" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3),
|
||||
"userId" TEXT NOT NULL,
|
||||
"provider" TEXT NOT NULL,
|
||||
"credentialsId" TEXT NOT NULL,
|
||||
"webhookType" TEXT NOT NULL,
|
||||
"resource" TEXT NOT NULL,
|
||||
"events" TEXT[],
|
||||
"config" JSONB NOT NULL,
|
||||
"secret" TEXT NOT NULL,
|
||||
"providerWebhookId" TEXT NOT NULL,
|
||||
|
||||
CONSTRAINT "IntegrationWebhook_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AgentNode" ADD CONSTRAINT "AgentNode_webhookId_fkey" FOREIGN KEY ("webhookId") REFERENCES "IntegrationWebhook"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "IntegrationWebhook" ADD CONSTRAINT "IntegrationWebhook_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
491
autogpt_platform/backend/poetry.lock
generated
491
autogpt_platform/backend/poetry.lock
generated
@ -2,17 +2,18 @@
|
||||
|
||||
[[package]]
|
||||
name = "aio-pika"
|
||||
version = "9.4.3"
|
||||
version = "9.5.0"
|
||||
description = "Wrapper around the aiormq for asyncio and humans"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "aio_pika-9.4.3-py3-none-any.whl", hash = "sha256:f1423d2d5a8b7315d144efe1773763bf687ac17aa1535385982687e9e5ed49bb"},
|
||||
{file = "aio_pika-9.4.3.tar.gz", hash = "sha256:fd2b1fce25f6ed5203ef1dd554dc03b90c9a46a64aaf758d032d78dc31e5295d"},
|
||||
{file = "aio_pika-9.5.0-py3-none-any.whl", hash = "sha256:7e03b80fab5a0d354dca45fb5ac95f074b87c639db34c6a1962cabe0fd95bd56"},
|
||||
{file = "aio_pika-9.5.0.tar.gz", hash = "sha256:d45d49e6543bcdfd2fe4b1a0ee59d931c15a96e99497bc599cf0fddb94061925"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiormq = ">=6.8.0,<6.9.0"
|
||||
exceptiongroup = ">=1,<2"
|
||||
yarl = "*"
|
||||
|
||||
[[package]]
|
||||
@ -226,28 +227,27 @@ trio = ["trio (>=0.26.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "apscheduler"
|
||||
version = "3.10.4"
|
||||
version = "3.11.0"
|
||||
description = "In-process task scheduler with Cron-like capabilities"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"},
|
||||
{file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"},
|
||||
{file = "APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da"},
|
||||
{file = "apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytz = "*"
|
||||
six = ">=1.4.0"
|
||||
tzlocal = ">=2.0,<3.dev0 || >=4.dev0"
|
||||
tzlocal = ">=3.0"
|
||||
|
||||
[package.extras]
|
||||
doc = ["sphinx", "sphinx-rtd-theme"]
|
||||
doc = ["packaging", "sphinx", "sphinx-rtd-theme (>=1.3.0)"]
|
||||
etcd = ["etcd3", "protobuf (<=3.21.0)"]
|
||||
gevent = ["gevent"]
|
||||
mongodb = ["pymongo (>=3.0)"]
|
||||
redis = ["redis (>=3.0)"]
|
||||
rethinkdb = ["rethinkdb (>=2.4.0)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.4)"]
|
||||
testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"]
|
||||
test = ["APScheduler[etcd,mongodb,redis,rethinkdb,sqlalchemy,tornado,zookeeper]", "PySide6", "anyio (>=4.5.2)", "gevent", "pytest", "pytz", "twisted"]
|
||||
tornado = ["tornado (>=4.3)"]
|
||||
twisted = ["twisted"]
|
||||
zookeeper = ["kazoo"]
|
||||
@ -297,9 +297,11 @@ expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.9.2"
|
||||
pydantic-settings = "^2.6.1"
|
||||
pyjwt = "^2.8.0"
|
||||
pyjwt = "^2.10.0"
|
||||
pytest-asyncio = "^0.24.0"
|
||||
pytest-mock = "^3.14.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.9.1"
|
||||
supabase = "^2.10.0"
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
@ -642,20 +644,20 @@ test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "deprecated"
|
||||
version = "1.2.14"
|
||||
version = "1.2.15"
|
||||
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
|
||||
files = [
|
||||
{file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
|
||||
{file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
|
||||
{file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"},
|
||||
{file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
wrapt = ">=1.10,<2"
|
||||
|
||||
[package.extras]
|
||||
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
|
||||
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "deprecation"
|
||||
@ -732,13 +734,13 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.4"
|
||||
version = "0.115.5"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"},
|
||||
{file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"},
|
||||
{file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"},
|
||||
{file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -899,13 +901,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.151.0"
|
||||
version = "2.154.0"
|
||||
description = "Google API Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google_api_python_client-2.151.0-py2.py3-none-any.whl", hash = "sha256:4427b2f47cd88b0355d540c2c52215f68c337f3bc9d6aae1ceeae4525977504c"},
|
||||
{file = "google_api_python_client-2.151.0.tar.gz", hash = "sha256:a9d26d630810ed4631aea21d1de3e42072f98240aaf184a8a1a874a371115034"},
|
||||
{file = "google_api_python_client-2.154.0-py2.py3-none-any.whl", hash = "sha256:a521bbbb2ec0ba9d6f307cdd64ed6e21eeac372d1bd7493a4ab5022941f784ad"},
|
||||
{file = "google_api_python_client-2.154.0.tar.gz", hash = "sha256:1b420062e03bfcaa1c79e2e00a612d29a6a934151ceb3d272fe150a656dc8f17"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -973,19 +975,22 @@ tool = ["click (>=6.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-appengine-logging"
|
||||
version = "1.4.5"
|
||||
version = "1.5.0"
|
||||
description = "Google Cloud Appengine Logging API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google_cloud_appengine_logging-1.4.5-py2.py3-none-any.whl", hash = "sha256:344e0244404049b42164e4d6dc718ca2c81b393d066956e7cb85fd9407ed9c48"},
|
||||
{file = "google_cloud_appengine_logging-1.4.5.tar.gz", hash = "sha256:de7d766e5d67b19fc5833974b505b32d2a5bbdfb283fd941e320e7cfdae4cb83"},
|
||||
{file = "google_cloud_appengine_logging-1.5.0-py2.py3-none-any.whl", hash = "sha256:81e36606e13c377c4898c918542888abb7a6896837ac5f559011c7729fc63d8a"},
|
||||
{file = "google_cloud_appengine_logging-1.5.0.tar.gz", hash = "sha256:39a2df694d97981ed00ef5df541f7cfcca920a92496707557f2b07bb7ba9d67a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
|
||||
google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev"
|
||||
proto-plus = ">=1.22.3,<2.0.0dev"
|
||||
proto-plus = [
|
||||
{version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""},
|
||||
{version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""},
|
||||
]
|
||||
protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
|
||||
|
||||
[[package]]
|
||||
@ -1180,13 +1185,13 @@ test = ["objgraph", "psutil"]
|
||||
|
||||
[[package]]
|
||||
name = "groq"
|
||||
version = "0.11.0"
|
||||
version = "0.12.0"
|
||||
description = "The official Python library for the groq API"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "groq-0.11.0-py3-none-any.whl", hash = "sha256:e328531c979542e563668c62260aec13b43a6ee0ca9e2fb22dff1d26f8c8ce54"},
|
||||
{file = "groq-0.11.0.tar.gz", hash = "sha256:dbb9aefedf388ddd4801ec7bf3eba7f5edb67948fec0cd2829d97244059f42a7"},
|
||||
{file = "groq-0.12.0-py3-none-any.whl", hash = "sha256:e8aa1529f82a01b2d15394b7ea242af9ee9387f65bdd1b91ce9a10f5a911dac1"},
|
||||
{file = "groq-0.12.0.tar.gz", hash = "sha256:569229e2dadfc428b0df3d2987407691a4e3bc035b5849a65ef4909514a4605e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1215,85 +1220,85 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4
|
||||
|
||||
[[package]]
|
||||
name = "grpcio"
|
||||
version = "1.66.2"
|
||||
version = "1.68.0"
|
||||
description = "HTTP/2-based RPC framework"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"},
|
||||
{file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"},
|
||||
{file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"},
|
||||
{file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"},
|
||||
{file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"},
|
||||
{file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"},
|
||||
{file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"},
|
||||
{file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-win32.whl", hash = "sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b"},
|
||||
{file = "grpcio-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-win32.whl", hash = "sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc"},
|
||||
{file = "grpcio-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-win32.whl", hash = "sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332"},
|
||||
{file = "grpcio-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-win32.whl", hash = "sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03"},
|
||||
{file = "grpcio-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-win32.whl", hash = "sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78"},
|
||||
{file = "grpcio-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-win32.whl", hash = "sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a"},
|
||||
{file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"},
|
||||
{file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
protobuf = ["grpcio-tools (>=1.66.2)"]
|
||||
protobuf = ["grpcio-tools (>=1.68.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "grpcio-status"
|
||||
version = "1.66.2"
|
||||
version = "1.68.0"
|
||||
description = "Status proto mapping for gRPC"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "grpcio_status-1.66.2-py3-none-any.whl", hash = "sha256:e5fe189f6897d12aa9cd74408a17ca41e44fad30871cf84f5cbd17bd713d2455"},
|
||||
{file = "grpcio_status-1.66.2.tar.gz", hash = "sha256:fb55cbb5c2e67062f7a4d5c99e489d074fb57e98678d5c3c6692a2d74d89e9ae"},
|
||||
{file = "grpcio_status-1.68.0-py3-none-any.whl", hash = "sha256:0a71b15d989f02df803b4ba85c5bf1f43aeaa58ac021e5f9974b8cadc41f784d"},
|
||||
{file = "grpcio_status-1.68.0.tar.gz", hash = "sha256:8369823de22ab6a2cddb3804669c149ae7a71819e127c2dca7c2322028d52bea"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
googleapis-common-protos = ">=1.5.5"
|
||||
grpcio = ">=1.66.2"
|
||||
grpcio = ">=1.68.0"
|
||||
protobuf = ">=5.26.1,<6.0dev"
|
||||
|
||||
[[package]]
|
||||
@ -1370,51 +1375,58 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0
|
||||
|
||||
[[package]]
|
||||
name = "httptools"
|
||||
version = "0.6.1"
|
||||
version = "0.6.4"
|
||||
description = "A collection of framework independent HTTP protocol utils."
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"},
|
||||
{file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"},
|
||||
{file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"},
|
||||
{file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"},
|
||||
{file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"},
|
||||
{file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"},
|
||||
{file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"},
|
||||
{file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"},
|
||||
{file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"},
|
||||
{file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"},
|
||||
{file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"},
|
||||
{file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"},
|
||||
{file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"},
|
||||
{file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"},
|
||||
{file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"},
|
||||
{file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"},
|
||||
{file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"},
|
||||
{file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"},
|
||||
{file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"},
|
||||
{file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"},
|
||||
{file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"},
|
||||
{file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"},
|
||||
{file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"},
|
||||
{file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"},
|
||||
{file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"},
|
||||
{file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"},
|
||||
{file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"},
|
||||
{file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"},
|
||||
{file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"},
|
||||
{file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"},
|
||||
{file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"},
|
||||
{file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"},
|
||||
{file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"},
|
||||
{file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"},
|
||||
{file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"},
|
||||
{file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"},
|
||||
{file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"},
|
||||
{file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"},
|
||||
{file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"},
|
||||
{file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"},
|
||||
{file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"},
|
||||
{file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"},
|
||||
{file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"},
|
||||
{file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"},
|
||||
{file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"},
|
||||
{file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"},
|
||||
{file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"},
|
||||
{file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"},
|
||||
{file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"},
|
||||
{file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"},
|
||||
{file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"},
|
||||
{file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"},
|
||||
{file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"},
|
||||
{file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"},
|
||||
{file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"},
|
||||
{file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"},
|
||||
{file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"},
|
||||
{file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"},
|
||||
{file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"},
|
||||
{file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"},
|
||||
{file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"},
|
||||
{file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"},
|
||||
{file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"},
|
||||
{file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"},
|
||||
{file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"},
|
||||
{file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"},
|
||||
{file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"},
|
||||
{file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"},
|
||||
{file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"},
|
||||
{file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"},
|
||||
{file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"},
|
||||
{file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"},
|
||||
{file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"},
|
||||
{file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"},
|
||||
{file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"},
|
||||
{file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"},
|
||||
{file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"},
|
||||
{file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"},
|
||||
{file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["Cython (>=0.29.24,<0.30.0)"]
|
||||
test = ["Cython (>=0.29.24)"]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
@ -1469,22 +1481,26 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "8.4.0"
|
||||
version = "8.5.0"
|
||||
description = "Read metadata from Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"},
|
||||
{file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"},
|
||||
{file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
|
||||
{file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = ">=0.5"
|
||||
zipp = ">=3.20"
|
||||
|
||||
[package.extras]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
perf = ["ipython"]
|
||||
test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
|
||||
test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
|
||||
type = ["pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
@ -1644,6 +1660,45 @@ files = [
|
||||
[package.dependencies]
|
||||
referencing = ">=0.31.0"
|
||||
|
||||
[[package]]
|
||||
name = "launchdarkly-eventsource"
|
||||
version = "1.2.0"
|
||||
description = "LaunchDarkly SSE Client"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "launchdarkly_eventsource-1.2.0-py3-none-any.whl", hash = "sha256:9b5ec7149e2ad9995be22ad5361deb480c229701e6b0cc799e94aa14f067b77b"},
|
||||
{file = "launchdarkly_eventsource-1.2.0.tar.gz", hash = "sha256:8cb3301ec0daeb5e17eaa37b3b65f6660fab851b317e69271185ef2fb42c2fde"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
urllib3 = ">=1.26.0,<3"
|
||||
|
||||
[[package]]
|
||||
name = "launchdarkly-server-sdk"
|
||||
version = "9.8.0"
|
||||
description = "LaunchDarkly SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "launchdarkly_server_sdk-9.8.0-py3-none-any.whl", hash = "sha256:8cb72f3cd283bd3b1954d59b8197f1467b35d5c10449904aaf560d59d4ceb368"},
|
||||
{file = "launchdarkly_server_sdk-9.8.0.tar.gz", hash = "sha256:e50a5eef770a5d0c609cf823c60ad9526f2f645e67efc638af31e7582ff62050"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2018.4.16"
|
||||
expiringdict = ">=1.1.4"
|
||||
launchdarkly-eventsource = ">=1.1.0,<2.0.0"
|
||||
pyRFC3339 = ">=1.0"
|
||||
semver = ">=2.10.2"
|
||||
urllib3 = ">=1.26.0,<3"
|
||||
|
||||
[package.extras]
|
||||
consul = ["python-consul (>=1.0.1)"]
|
||||
dynamodb = ["boto3 (>=1.9.71)"]
|
||||
redis = ["redis (>=2.10.5)"]
|
||||
test-filesource = ["pyyaml (>=5.3.1)", "watchdog (>=3.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "2.1.5"
|
||||
@ -1868,27 +1923,28 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
|
||||
|
||||
[[package]]
|
||||
name = "ollama"
|
||||
version = "0.3.3"
|
||||
version = "0.4.1"
|
||||
description = "The official Python client for Ollama."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "ollama-0.3.3-py3-none-any.whl", hash = "sha256:ca6242ce78ab34758082b7392df3f9f6c2cb1d070a9dede1a4c545c929e16dba"},
|
||||
{file = "ollama-0.3.3.tar.gz", hash = "sha256:f90a6d61803117f40b0e8ff17465cab5e1eb24758a473cfe8101aff38bc13b51"},
|
||||
{file = "ollama-0.4.1-py3-none-any.whl", hash = "sha256:b6fb16aa5a3652633e1716acb12cf2f44aa18beb229329e46a0302734822dfad"},
|
||||
{file = "ollama-0.4.1.tar.gz", hash = "sha256:8c6b5e7ff80dd0b8692150b03359f60bac7ca162b088c604069409142a684ad3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.27.0,<0.28.0"
|
||||
pydantic = ">=2.9.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.54.3"
|
||||
version = "1.55.1"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "openai-1.54.3-py3-none-any.whl", hash = "sha256:f18dbaf09c50d70c4185b892a2a553f80681d1d866323a2da7f7be2f688615d5"},
|
||||
{file = "openai-1.54.3.tar.gz", hash = "sha256:7511b74eeb894ac0b0253dc71f087a15d2e4d71d22d0088767205143d880cca6"},
|
||||
{file = "openai-1.55.1-py3-none-any.whl", hash = "sha256:d10d96a4f9dc5f05d38dea389119ec8dcd24bc9698293c8357253c601b4a77a5"},
|
||||
{file = "openai-1.55.1.tar.gz", hash = "sha256:471324321e7739214f16a544e801947a046d3c5d516fae8719a317234e4968d3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1906,18 +1962,18 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-api"
|
||||
version = "1.27.0"
|
||||
version = "1.28.2"
|
||||
description = "OpenTelemetry Python API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"},
|
||||
{file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"},
|
||||
{file = "opentelemetry_api-1.28.2-py3-none-any.whl", hash = "sha256:6fcec89e265beb258fe6b1acaaa3c8c705a934bd977b9f534a2b7c0d2d4275a6"},
|
||||
{file = "opentelemetry_api-1.28.2.tar.gz", hash = "sha256:ecdc70c7139f17f9b0cf3742d57d7020e3e8315d6cffcdf1a12a905d45b19cc0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
deprecated = ">=1.2.6"
|
||||
importlib-metadata = ">=6.0,<=8.4.0"
|
||||
importlib-metadata = ">=6.0,<=8.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
@ -1986,8 +2042,8 @@ python-dateutil = ">=2.5.3"
|
||||
tqdm = ">=4.64.1"
|
||||
typing-extensions = ">=3.7.4"
|
||||
urllib3 = [
|
||||
{version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""},
|
||||
{version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""},
|
||||
{version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@ -2051,13 +2107,13 @@ testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "poethepoet"
|
||||
version = "0.30.0"
|
||||
version = "0.31.0"
|
||||
description = "A task runner that works well with poetry."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "poethepoet-0.30.0-py3-none-any.whl", hash = "sha256:bf875741407a98da9e96f2f2d0b2c4c34f56d89939a7f53a4b6b3a64b546ec4e"},
|
||||
{file = "poethepoet-0.30.0.tar.gz", hash = "sha256:9f7ccda2d6525616ce989ca8ef973739fd668f50bef0b9d3631421d504d9ae4a"},
|
||||
{file = "poethepoet-0.31.0-py3-none-any.whl", hash = "sha256:5067c5adf9f228b8af1f3df7d57dc319ed8b3f153bf21faf99f7b74494174c3d"},
|
||||
{file = "poethepoet-0.31.0.tar.gz", hash = "sha256:b1cffb120149101b02ffa0583c6e61dfee53953a741df3dabf179836bdef97f5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2156,13 +2212,13 @@ node = ["nodejs-bin"]
|
||||
|
||||
[[package]]
|
||||
name = "proto-plus"
|
||||
version = "1.24.0"
|
||||
version = "1.25.0"
|
||||
description = "Beautiful, Pythonic protocol buffers."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"},
|
||||
{file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"},
|
||||
{file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"},
|
||||
{file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2359,8 +2415,8 @@ files = [
|
||||
annotated-types = ">=0.6.0"
|
||||
pydantic-core = "2.23.4"
|
||||
typing-extensions = [
|
||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@ -2501,13 +2557,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.9.0"
|
||||
version = "2.10.0"
|
||||
description = "JSON Web Token implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"},
|
||||
{file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"},
|
||||
{file = "PyJWT-2.10.0-py3-none-any.whl", hash = "sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15"},
|
||||
{file = "pyjwt-2.10.0.tar.gz", hash = "sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@ -2530,6 +2586,17 @@ files = [
|
||||
[package.extras]
|
||||
diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pyrfc3339"
|
||||
version = "2.0.1"
|
||||
description = "Generate and parse RFC 3339 timestamps"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "pyRFC3339-2.0.1-py3-none-any.whl", hash = "sha256:30b70a366acac3df7386b558c21af871522560ed7f3f73cf344b8c2cbb8b0c9d"},
|
||||
{file = "pyrfc3339-2.0.1.tar.gz", hash = "sha256:e47843379ea35c1296c3b6c67a948a1a490ae0584edfcbdea0eaffb5dd29960b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.389"
|
||||
@ -2604,6 +2671,23 @@ pytest = ">=8.2,<9"
|
||||
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
|
||||
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.14.0"
|
||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
|
||||
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=6.2.5"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-watcher"
|
||||
version = "0.4.3"
|
||||
@ -2772,20 +2856,20 @@ rpds-py = ">=0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "replicate"
|
||||
version = "1.0.3"
|
||||
version = "1.0.4"
|
||||
description = "Python client for Replicate"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "replicate-1.0.3-py3-none-any.whl", hash = "sha256:8c49d63444b7ea9ac1d6af99eb23a01efb5b7f079cc8a020d6f52b38843db1da"},
|
||||
{file = "replicate-1.0.3.tar.gz", hash = "sha256:0fd9ca5230fe67c42e4508dd96a5b1414b3fefa5342f8921dbb63c74266cb130"},
|
||||
{file = "replicate-1.0.4-py3-none-any.whl", hash = "sha256:f568f6271ff715067901b6094c23c37373bbcfd7de0ff9b85e9c9ead567e09e7"},
|
||||
{file = "replicate-1.0.4.tar.gz", hash = "sha256:f718601863ef1f419aa7dcdab1ea8770ba5489b571b86edf840cd506d68758ef"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.21.0,<1"
|
||||
packaging = "*"
|
||||
pydantic = ">1.10.7"
|
||||
typing-extensions = ">=4.5.0"
|
||||
typing_extensions = ">=4.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
@ -2954,40 +3038,51 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.4"
|
||||
version = "0.8.0"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
|
||||
{file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
|
||||
{file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
|
||||
{file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
|
||||
{file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
|
||||
{file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"},
|
||||
{file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"},
|
||||
{file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"},
|
||||
{file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"},
|
||||
{file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "3.0.2"
|
||||
description = "Python helper for Semantic Versioning (https://semver.org)"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"},
|
||||
{file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.18.0"
|
||||
version = "2.19.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry_sdk-2.18.0-py2.py3-none-any.whl", hash = "sha256:ee70e27d1bbe4cd52a38e1bd28a5fadb9b17bc29d91b5f2b97ae29c0a7610442"},
|
||||
{file = "sentry_sdk-2.18.0.tar.gz", hash = "sha256:0dc21febd1ab35c648391c664df96f5f79fb0d92d7d4225cd9832e53a617cafd"},
|
||||
{file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"},
|
||||
{file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -3013,7 +3108,7 @@ grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"]
|
||||
http2 = ["httpcore[http2] (==1.*)"]
|
||||
httpx = ["httpx (>=0.16.0)"]
|
||||
huey = ["huey (>=2)"]
|
||||
huggingface-hub = ["huggingface-hub (>=0.22)"]
|
||||
huggingface-hub = ["huggingface_hub (>=0.22)"]
|
||||
langchain = ["langchain (>=0.0.210)"]
|
||||
launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"]
|
||||
litestar = ["litestar (>=2.0.0)"]
|
||||
@ -3022,7 +3117,7 @@ openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
openfeature = ["openfeature-sdk (>=0.7.1)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-distro"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
pure-eval = ["asttokens", "executing", "pure_eval"]
|
||||
pymongo = ["pymongo (>=3.1)"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||
@ -3397,20 +3492,20 @@ zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.32.0"
|
||||
version = "0.32.1"
|
||||
description = "The lightning-fast ASGI server."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"},
|
||||
{file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"},
|
||||
{file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"},
|
||||
{file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=7.0"
|
||||
colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""}
|
||||
h11 = ">=0.8"
|
||||
httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
|
||||
httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""}
|
||||
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
|
||||
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
|
||||
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
|
||||
@ -3419,7 +3514,7 @@ watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standar
|
||||
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
|
||||
|
||||
[package.extras]
|
||||
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
|
||||
standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "uvloop"
|
||||
@ -3913,13 +4008,13 @@ requests = "*"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.20.2"
|
||||
version = "3.21.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
|
||||
{file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
|
||||
{file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
|
||||
{file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@ -3933,4 +4028,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "e33b0da31247495e8704fee5224f7b0cf53859cd0ce8bafb39889548a649f5fb"
|
||||
content-hash = "5bc61641d782791b76f39b18625560a6652b02f3ad788f110e0258293032c34a"
|
||||
|
@ -9,52 +9,54 @@ packages = [{ include = "backend" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
aio-pika = "^9.4.3"
|
||||
aio-pika = "^9.5.0"
|
||||
anthropic = "^0.39.0"
|
||||
apscheduler = "^3.10.4"
|
||||
apscheduler = "^3.11.0"
|
||||
autogpt-libs = { path = "../autogpt_libs", develop = true }
|
||||
click = "^8.1.7"
|
||||
croniter = "^5.0.1"
|
||||
discord-py = "^2.4.0"
|
||||
fastapi = "^0.115.4"
|
||||
fastapi = "^0.115.5"
|
||||
feedparser = "^6.0.11"
|
||||
flake8 = "^7.0.0"
|
||||
google-api-python-client = "^2.151.0"
|
||||
google-api-python-client = "^2.154.0"
|
||||
google-auth-oauthlib = "^1.2.1"
|
||||
groq = "^0.11.0"
|
||||
groq = "^0.12.0"
|
||||
jinja2 = "^3.1.4"
|
||||
jsonref = "^1.1.0"
|
||||
jsonschema = "^4.22.0"
|
||||
ollama = "^0.3.0"
|
||||
openai = "^1.54.3"
|
||||
ollama = "^0.4.1"
|
||||
openai = "^1.55.1"
|
||||
praw = "~7.8.1"
|
||||
prisma = "^0.15.0"
|
||||
psutil = "^6.1.0"
|
||||
pydantic = "^2.7.2"
|
||||
pydantic = "^2.9.2"
|
||||
pydantic-settings = "^2.3.4"
|
||||
pyro5 = "^5.15"
|
||||
pytest = "^8.2.1"
|
||||
pytest-asyncio = "^0.24.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
redis = "^5.2.0"
|
||||
sentry-sdk = "2.18.0"
|
||||
sentry-sdk = "2.19.0"
|
||||
strenum = "^0.4.9"
|
||||
supabase = "^2.10.0"
|
||||
tenacity = "^9.0.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.32.0" }
|
||||
uvicorn = { extras = ["standard"], version = "^0.32.1" }
|
||||
websockets = "^13.1"
|
||||
youtube-transcript-api = "^0.6.2"
|
||||
googlemaps = "^4.10.0"
|
||||
replicate = "^1.0.3"
|
||||
replicate = "^1.0.4"
|
||||
pinecone = "^5.3.1"
|
||||
cryptography = "^43.0.3"
|
||||
sqlalchemy = "^2.0.36"
|
||||
psycopg2-binary = "^2.9.10"
|
||||
launchdarkly-server-sdk = "^9.8.0"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
poethepoet = "^0.30.0"
|
||||
poethepoet = "^0.31.0"
|
||||
httpx = "^0.27.0"
|
||||
pytest-watcher = "^0.4.2"
|
||||
requests = "^2.32.3"
|
||||
ruff = "^0.7.4"
|
||||
ruff = "^0.8.0"
|
||||
pyright = "^1.1.389"
|
||||
isort = "^5.13.2"
|
||||
black = "^24.10.0"
|
||||
|
@ -23,6 +23,7 @@ model User {
|
||||
// Relations
|
||||
AgentGraphs AgentGraph[]
|
||||
AgentGraphExecutions AgentGraphExecution[]
|
||||
IntegrationWebhooks IntegrationWebhook[]
|
||||
AnalyticsDetails AnalyticsDetails[]
|
||||
AnalyticsMetrics AnalyticsMetrics[]
|
||||
UserBlockCredit UserBlockCredit[]
|
||||
@ -74,6 +75,10 @@ model AgentNode {
|
||||
// JSON serialized dict[str, str] containing predefined input values.
|
||||
constantInput String @default("{}")
|
||||
|
||||
// For webhook-triggered blocks: reference to the webhook that triggers the node
|
||||
webhookId String?
|
||||
Webhook IntegrationWebhook? @relation(fields: [webhookId], references: [id])
|
||||
|
||||
// JSON serialized dict[str, str] containing the node metadata.
|
||||
metadata String @default("{}")
|
||||
|
||||
@ -186,6 +191,28 @@ model AgentNodeExecutionInputOutput {
|
||||
@@unique([referencedByInputExecId, referencedByOutputExecId, name])
|
||||
}
|
||||
|
||||
// Webhook that is registered with a provider and propagates to one or more nodes
|
||||
model IntegrationWebhook {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime? @updatedAt
|
||||
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Restrict) // Webhooks must be deregistered before deleting
|
||||
|
||||
provider String // e.g. 'github'
|
||||
credentialsId String // relation to the credentials that the webhook was created with
|
||||
webhookType String // e.g. 'repo'
|
||||
resource String // e.g. 'Significant-Gravitas/AutoGPT'
|
||||
events String[] // e.g. ['created', 'updated']
|
||||
config Json
|
||||
secret String // crypto string, used to verify payload authenticity
|
||||
|
||||
providerWebhookId String // Webhook ID assigned by the provider
|
||||
|
||||
AgentNodes AgentNode[]
|
||||
}
|
||||
|
||||
model AnalyticsDetails {
|
||||
// PK uses gen_random_uuid() to allow the db inserts to happen outside of prisma
|
||||
// typical uuid() inserts are handled by prisma
|
||||
|
@ -67,7 +67,6 @@ services:
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- EXECUTIONSCHEDULER_HOST=rest_server
|
||||
- EXECUTIONMANAGER_HOST=executor
|
||||
- DATABASEMANAGER_HOST=executor
|
||||
- FRONTEND_BASE_URL=http://localhost:3000
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
@ -106,8 +105,6 @@ services:
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- AGENTSERVER_HOST=rest_server
|
||||
- DATABASEMANAGER_HOST=0.0.0.0
|
||||
- EXECUTIONMANAGER_HOST=0.0.0.0
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
ports:
|
||||
- "8002:8000"
|
||||
|
@ -26,6 +26,7 @@
|
||||
"@faker-js/faker": "^9.2.0",
|
||||
"@hookform/resolvers": "^3.9.1",
|
||||
"@next/third-parties": "^15.0.3",
|
||||
"@radix-ui/react-alert-dialog": "^1.1.2",
|
||||
"@radix-ui/react-avatar": "^1.1.1",
|
||||
"@radix-ui/react-checkbox": "^1.1.2",
|
||||
"@radix-ui/react-collapsible": "^1.1.1",
|
||||
@ -52,7 +53,7 @@
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "1.0.4",
|
||||
"cookie": "1.0.1",
|
||||
"cookie": "1.0.2",
|
||||
"date-fns": "^4.1.0",
|
||||
"dotenv": "^16.4.5",
|
||||
"elliptic": "6.6.1",
|
||||
@ -61,44 +62,44 @@
|
||||
"next": "^14.2.13",
|
||||
"next-themes": "^0.4.3",
|
||||
"react": "^18",
|
||||
"react-day-picker": "^9.3.2",
|
||||
"react-day-picker": "^9.4.0",
|
||||
"react-dom": "^18",
|
||||
"react-hook-form": "^7.53.2",
|
||||
"react-icons": "^5.3.0",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-modal": "^3.16.1",
|
||||
"react-shepherd": "^6.1.4",
|
||||
"react-shepherd": "^6.1.6",
|
||||
"recharts": "^2.13.3",
|
||||
"tailwind-merge": "^2.5.4",
|
||||
"tailwind-merge": "^2.5.5",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"uuid": "^11.0.3",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "^3.2.2",
|
||||
"@playwright/test": "^1.48.2",
|
||||
"@storybook/addon-essentials": "^8.4.2",
|
||||
"@storybook/addon-interactions": "^8.4.2",
|
||||
"@storybook/addon-links": "^8.4.2",
|
||||
"@storybook/addon-onboarding": "^8.4.2",
|
||||
"@storybook/blocks": "^8.4.2",
|
||||
"@storybook/nextjs": "^8.4.2",
|
||||
"@playwright/test": "^1.49.0",
|
||||
"@storybook/addon-essentials": "^8.4.5",
|
||||
"@storybook/addon-interactions": "^8.4.5",
|
||||
"@storybook/addon-links": "^8.4.5",
|
||||
"@storybook/addon-onboarding": "^8.4.5",
|
||||
"@storybook/blocks": "^8.4.5",
|
||||
"@storybook/nextjs": "^8.4.5",
|
||||
"@storybook/react": "^8.3.5",
|
||||
"@storybook/test": "^8.3.5",
|
||||
"@storybook/test-runner": "^0.19.1",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/node": "^22.9.3",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"@types/react-modal": "^3.16.3",
|
||||
"concurrently": "^9.1.0",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "15.0.3",
|
||||
"eslint-plugin-storybook": "^0.11.0",
|
||||
"eslint-plugin-storybook": "^0.11.1",
|
||||
"postcss": "^8",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-plugin-tailwindcss": "^0.6.8",
|
||||
"storybook": "^8.4.2",
|
||||
"tailwindcss": "^3.4.14",
|
||||
"prettier-plugin-tailwindcss": "^0.6.9",
|
||||
"storybook": "^8.4.5",
|
||||
"tailwindcss": "^3.4.15",
|
||||
"typescript": "^5"
|
||||
},
|
||||
"packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e"
|
||||
|
@ -1,16 +1,16 @@
|
||||
"use client";
|
||||
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import FlowEditor from '@/components/Flow';
|
||||
import FlowEditor from "@/components/Flow";
|
||||
|
||||
export default function Home() {
|
||||
const query = useSearchParams();
|
||||
|
||||
return (
|
||||
<FlowEditor
|
||||
className="flow-container"
|
||||
flowID={query.get("flowID") ?? query.get("templateID") ?? undefined}
|
||||
template={!!query.get("templateID")}
|
||||
/>
|
||||
<FlowEditor
|
||||
className="flow-container"
|
||||
flowID={query.get("flowID") ?? query.get("templateID") ?? undefined}
|
||||
template={!!query.get("templateID")}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
@ -74,7 +74,7 @@
|
||||
}
|
||||
|
||||
.agpt-border-input {
|
||||
@apply border-input focus-visible:border-gray-400 focus-visible:outline-none;
|
||||
@apply border border-input focus-visible:border-gray-400 focus-visible:outline-none;
|
||||
}
|
||||
|
||||
.agpt-shadow-input {
|
||||
|
@ -4,7 +4,7 @@ import { useSupabase } from "@/components/SupabaseProvider";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import useUser from "@/hooks/useUser";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useCallback, useContext, useMemo } from "react";
|
||||
import { useCallback, useContext, useMemo, useState } from "react";
|
||||
import { FaSpinner } from "react-icons/fa";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
@ -21,6 +21,16 @@ import {
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { CredentialsProviderName } from "@/lib/autogpt-server-api";
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from "@/components/ui/alert-dialog";
|
||||
|
||||
export default function PrivatePage() {
|
||||
const { user, isLoading, error } = useUser();
|
||||
@ -29,15 +39,40 @@ export default function PrivatePage() {
|
||||
const providers = useContext(CredentialsProvidersContext);
|
||||
const { toast } = useToast();
|
||||
|
||||
const [confirmationDialogState, setConfirmationDialogState] = useState<
|
||||
| {
|
||||
open: true;
|
||||
message: string;
|
||||
onConfirm: () => void;
|
||||
onReject: () => void;
|
||||
}
|
||||
| { open: false }
|
||||
>({ open: false });
|
||||
|
||||
const removeCredentials = useCallback(
|
||||
async (provider: CredentialsProviderName, id: string) => {
|
||||
async (
|
||||
provider: CredentialsProviderName,
|
||||
id: string,
|
||||
force: boolean = false,
|
||||
) => {
|
||||
if (!providers || !providers[provider]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let result;
|
||||
try {
|
||||
const { revoked } = await providers[provider].deleteCredentials(id);
|
||||
if (revoked !== false) {
|
||||
result = await providers[provider].deleteCredentials(id, force);
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: "Something went wrong when deleting credentials: " + error,
|
||||
variant: "destructive",
|
||||
duration: 2000,
|
||||
});
|
||||
setConfirmationDialogState({ open: false });
|
||||
return;
|
||||
}
|
||||
if (result.deleted) {
|
||||
if (result.revoked) {
|
||||
toast({
|
||||
title: "Credentials deleted",
|
||||
duration: 2000,
|
||||
@ -49,11 +84,13 @@ export default function PrivatePage() {
|
||||
duration: 3000,
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: "Something went wrong when deleting credentials: " + error,
|
||||
variant: "destructive",
|
||||
duration: 2000,
|
||||
setConfirmationDialogState({ open: false });
|
||||
} else if (result.need_confirmation) {
|
||||
setConfirmationDialogState({
|
||||
open: true,
|
||||
message: result.message,
|
||||
onConfirm: () => removeCredentials(provider, id, true),
|
||||
onReject: () => setConfirmationDialogState({ open: false }),
|
||||
});
|
||||
}
|
||||
},
|
||||
@ -106,7 +143,9 @@ export default function PrivatePage() {
|
||||
return (
|
||||
<div className="mx-auto max-w-3xl md:py-8">
|
||||
<div className="flex items-center justify-between">
|
||||
<p>Hello {user.email}</p>
|
||||
<p>
|
||||
Hello <span data-testid="profile-email">{user.email}</span>
|
||||
</p>
|
||||
<Button onClick={() => supabase.auth.signOut()}>
|
||||
<LogOutIcon className="mr-1.5 size-4" />
|
||||
Log out
|
||||
@ -158,6 +197,36 @@ export default function PrivatePage() {
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
|
||||
<AlertDialog open={confirmationDialogState.open}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
{confirmationDialogState.open && confirmationDialogState.message}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onReject()
|
||||
}
|
||||
>
|
||||
Cancel
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
variant="destructive"
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onConfirm()
|
||||
}
|
||||
>
|
||||
Continue
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import {
|
||||
} from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { TextRenderer } from "@/components/ui/render";
|
||||
import { history } from "./history";
|
||||
import NodeHandle from "./NodeHandle";
|
||||
import {
|
||||
@ -38,6 +39,7 @@ import { getPrimaryCategoryColor } from "@/lib/utils";
|
||||
import { FlowContext } from "./Flow";
|
||||
import { Badge } from "./ui/badge";
|
||||
import NodeOutputs from "./NodeOutputs";
|
||||
import SchemaTooltip from "./SchemaTooltip";
|
||||
import { IconCoin } from "./ui/icons";
|
||||
import * as Separator from "@radix-ui/react-separator";
|
||||
import * as ContextMenu from "@radix-ui/react-context-menu";
|
||||
@ -166,7 +168,7 @@ export function CustomNode({
|
||||
<div key={key}>
|
||||
<NodeHandle
|
||||
keyName={key}
|
||||
isConnected={isHandleConnected(key)}
|
||||
isConnected={isOutputHandleConnected(key)}
|
||||
schema={schema.properties[key]}
|
||||
side="right"
|
||||
/>
|
||||
@ -205,16 +207,18 @@ export function CustomNode({
|
||||
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
const isHidden = propSchema.hidden;
|
||||
const isConnectable =
|
||||
// No input connection handles on INPUT and WEBHOOK blocks
|
||||
![BlockUIType.INPUT, BlockUIType.WEBHOOK].includes(nodeType) &&
|
||||
// No input connection handles for credentials
|
||||
propKey !== "credentials" &&
|
||||
// No input connection handles on INPUT blocks
|
||||
nodeType !== BlockUIType.INPUT &&
|
||||
// For OUTPUT blocks, only show the 'value' (hides 'name') input connection handle
|
||||
!(nodeType == BlockUIType.OUTPUT && propKey == "name");
|
||||
const isConnected = isInputHandleConnected(propKey);
|
||||
return (
|
||||
!isHidden &&
|
||||
(isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
|
||||
<div key={propKey} data-id={`input-handle-${propKey}`}>
|
||||
{isConnectable ? (
|
||||
@ -227,15 +231,15 @@ export function CustomNode({
|
||||
/>
|
||||
) : (
|
||||
propKey != "credentials" && (
|
||||
<span
|
||||
className="text-m green mb-0 text-gray-900"
|
||||
title={propSchema.description}
|
||||
>
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<div className="flex gap-1">
|
||||
<span className="text-m green mb-0 text-gray-900">
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<SchemaTooltip description={propSchema.description} />
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
{!isConnected && (
|
||||
{isConnected || (
|
||||
<NodeGenericInputField
|
||||
nodeId={id}
|
||||
propKey={getInputPropKey(propKey)}
|
||||
@ -298,21 +302,28 @@ export function CustomNode({
|
||||
setErrors({ ...errors });
|
||||
};
|
||||
|
||||
const isHandleConnected = (key: string) => {
|
||||
const isInputHandleConnected = (key: string) => {
|
||||
return (
|
||||
data.connections &&
|
||||
data.connections.some((conn: any) => {
|
||||
if (typeof conn === "string") {
|
||||
const [source, target] = conn.split(" -> ");
|
||||
return (
|
||||
(target.includes(key) && target.includes(data.title)) ||
|
||||
(source.includes(key) && source.includes(data.title))
|
||||
);
|
||||
const [_source, target] = conn.split(" -> ");
|
||||
return target.includes(key) && target.includes(data.title);
|
||||
}
|
||||
return (
|
||||
(conn.target === id && conn.targetHandle === key) ||
|
||||
(conn.source === id && conn.sourceHandle === key)
|
||||
);
|
||||
return conn.target === id && conn.targetHandle === key;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const isOutputHandleConnected = (key: string) => {
|
||||
return (
|
||||
data.connections &&
|
||||
data.connections.some((conn: any) => {
|
||||
if (typeof conn === "string") {
|
||||
const [source, _target] = conn.split(" -> ");
|
||||
return source.includes(key) && source.includes(data.title);
|
||||
}
|
||||
return conn.source === id && conn.sourceHandle === key;
|
||||
})
|
||||
);
|
||||
};
|
||||
@ -554,9 +565,13 @@ export function CustomNode({
|
||||
<div className="flex w-full flex-col">
|
||||
<div className="flex flex-row items-center justify-between">
|
||||
<div className="font-roboto flex items-center px-3 text-lg font-semibold">
|
||||
{beautifyString(
|
||||
data.blockType?.replace(/Block$/, "") || data.title,
|
||||
)}
|
||||
<TextRenderer
|
||||
value={beautifyString(
|
||||
data.blockType?.replace(/Block$/, "") || data.title,
|
||||
)}
|
||||
truncateLengthLimit={80}
|
||||
/>
|
||||
|
||||
<div className="px-2 text-xs text-gray-500">
|
||||
#{id.split("-")[0]}
|
||||
</div>
|
||||
|
@ -7,7 +7,6 @@ import getServerUser from "@/hooks/getServerUser";
|
||||
import ProfileDropdown from "./ProfileDropdown";
|
||||
import { IconCircleUser, IconMenu } from "@/components/ui/icons";
|
||||
import CreditButton from "@/components/nav/CreditButton";
|
||||
|
||||
import { NavBarButtons } from "./nav/NavBarButtons";
|
||||
|
||||
export async function NavBar() {
|
||||
@ -17,7 +16,7 @@ export async function NavBar() {
|
||||
);
|
||||
const { user } = await getServerUser();
|
||||
|
||||
return (
|
||||
return user ? (
|
||||
<header className="sticky top-0 z-50 mx-4 flex h-16 select-none items-center gap-4 border border-gray-300 bg-background p-3 md:rounded-b-2xl md:px-6 md:shadow">
|
||||
<div className="flex flex-1 items-center gap-4">
|
||||
<Sheet>
|
||||
@ -67,5 +66,19 @@ export async function NavBar() {
|
||||
{isAvailable && user && <ProfileDropdown />}
|
||||
</div>
|
||||
</header>
|
||||
) : (
|
||||
<nav className="flex w-full items-center p-2 pt-8">
|
||||
<div className="flex h-10 w-20 flex-1 flex-row items-center justify-center gap-2">
|
||||
<a href="https://agpt.co/">
|
||||
<Image
|
||||
src="/AUTOgpt_Logo_dark.png"
|
||||
alt="AutoGPT Logo"
|
||||
width={100}
|
||||
height={40}
|
||||
priority
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
);
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
const typeName: Record<string, string> = {
|
||||
string: "text",
|
||||
number: "number",
|
||||
integer: "integer",
|
||||
boolean: "true/false",
|
||||
object: "object",
|
||||
array: "list",
|
||||
|
@ -3,6 +3,7 @@ import { Card, CardContent, CardHeader } from "@/components/ui/card";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { TextRenderer } from "@/components/ui/render";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { beautifyString } from "@/lib/utils";
|
||||
import {
|
||||
@ -180,7 +181,7 @@ export const BlocksControl: React.FC<BlocksControlProps> = ({
|
||||
</CardHeader>
|
||||
<CardContent className="overflow-scroll border-t p-0">
|
||||
<ScrollArea
|
||||
className="h-[60vh] w-fit w-full"
|
||||
className="h-[60vh]"
|
||||
data-id="blocks-control-scroll-area"
|
||||
>
|
||||
{getFilteredBlockList().map((block) => (
|
||||
@ -202,13 +203,19 @@ export const BlocksControl: React.FC<BlocksControlProps> = ({
|
||||
className="block truncate pb-1 text-sm font-semibold"
|
||||
data-id={`block-name-${block.id}`}
|
||||
>
|
||||
{beautifyString(block.name).replace(/ Block$/, "")}
|
||||
<TextRenderer
|
||||
value={beautifyString(block.name).replace(
|
||||
/ Block$/,
|
||||
"",
|
||||
)}
|
||||
truncateLengthLimit={45}
|
||||
/>
|
||||
</span>
|
||||
<span className="block break-words text-xs font-normal text-gray-500">
|
||||
{/* Cap description at 100 characters max */}
|
||||
{block.description?.length > 100
|
||||
? block.description.slice(0, 100) + "..."
|
||||
: block.description}
|
||||
<span className="block break-all text-xs font-normal text-gray-500">
|
||||
<TextRenderer
|
||||
value={block.description}
|
||||
truncateLengthLimit={165}
|
||||
/>
|
||||
</span>
|
||||
</div>
|
||||
<div
|
||||
|
@ -115,6 +115,7 @@ export const SaveControl = ({
|
||||
value={agentName}
|
||||
onChange={(e) => onNameChange(e.target.value)}
|
||||
data-id="save-control-name-input"
|
||||
maxLength={100}
|
||||
/>
|
||||
<Label htmlFor="description">Description</Label>
|
||||
<Input
|
||||
@ -124,6 +125,7 @@ export const SaveControl = ({
|
||||
value={agentDescription}
|
||||
onChange={(e) => onDescriptionChange(e.target.value)}
|
||||
data-id="save-control-description-input"
|
||||
maxLength={500}
|
||||
/>
|
||||
{agentMeta?.version && (
|
||||
<>
|
||||
|
@ -11,22 +11,6 @@ code {
|
||||
monospace;
|
||||
}
|
||||
|
||||
input,
|
||||
textarea {
|
||||
background-color: #ffffff;
|
||||
color: #000000;
|
||||
border: 1px solid #555;
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
width: calc(100% - 18px);
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
input::placeholder,
|
||||
textarea::placeholder {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.modal {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
|
@ -3,6 +3,7 @@ import { cn } from "@/lib/utils";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import SchemaTooltip from "@/components/SchemaTooltip";
|
||||
import useCredentials from "@/hooks/useCredentials";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
@ -235,12 +236,10 @@ export const CredentialsInput: FC<{
|
||||
if (savedApiKeys.length === 0 && savedOAuthCredentials.length === 0) {
|
||||
return (
|
||||
<>
|
||||
<span
|
||||
className="text-m green mb-0 text-gray-900"
|
||||
title={schema.description}
|
||||
>
|
||||
Credentials
|
||||
</span>
|
||||
<div className="mb-2 flex gap-1">
|
||||
<span className="text-m green text-gray-900">Credentials</span>
|
||||
<SchemaTooltip description={schema.description} />
|
||||
</div>
|
||||
<div className={cn("flex flex-row space-x-2", className)}>
|
||||
{supportsOAuth2 && (
|
||||
<Button onClick={handleOAuthLogin}>
|
||||
|
@ -1,5 +1,6 @@
|
||||
import AutoGPTServerAPI, {
|
||||
APIKeyCredentials,
|
||||
CredentialsDeleteNeedConfirmationResponse,
|
||||
CredentialsDeleteResponse,
|
||||
CredentialsMetaResponse,
|
||||
CredentialsProviderName,
|
||||
@ -59,7 +60,12 @@ export type CredentialsProviderData = {
|
||||
createAPIKeyCredentials: (
|
||||
credentials: APIKeyCredentialsCreatable,
|
||||
) => Promise<CredentialsMetaResponse>;
|
||||
deleteCredentials: (id: string) => Promise<CredentialsDeleteResponse>;
|
||||
deleteCredentials: (
|
||||
id: string,
|
||||
force?: boolean,
|
||||
) => Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
>;
|
||||
};
|
||||
|
||||
export type CredentialsProvidersContextType = {
|
||||
@ -144,8 +150,14 @@ export default function CredentialsProvider({
|
||||
async (
|
||||
provider: CredentialsProviderName,
|
||||
id: string,
|
||||
): Promise<CredentialsDeleteResponse> => {
|
||||
const result = await api.deleteCredentials(provider, id);
|
||||
force: boolean = false,
|
||||
): Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
> => {
|
||||
const result = await api.deleteCredentials(provider, id, force);
|
||||
if (!result.deleted) {
|
||||
return result;
|
||||
}
|
||||
setProviders((prev) => {
|
||||
if (!prev || !prev[provider]) return prev;
|
||||
|
||||
@ -172,43 +184,64 @@ export default function CredentialsProvider({
|
||||
api.isAuthenticated().then((isAuthenticated) => {
|
||||
if (!isAuthenticated) return;
|
||||
|
||||
CREDENTIALS_PROVIDER_NAMES.forEach(
|
||||
(provider: CredentialsProviderName) => {
|
||||
api.listCredentials(provider).then((response) => {
|
||||
const { oauthCreds, apiKeys } = response.reduce<{
|
||||
api.listCredentials().then((response) => {
|
||||
const credentialsByProvider = response.reduce(
|
||||
(acc, cred) => {
|
||||
if (!acc[cred.provider]) {
|
||||
acc[cred.provider] = { oauthCreds: [], apiKeys: [] };
|
||||
}
|
||||
if (cred.type === "oauth2") {
|
||||
acc[cred.provider].oauthCreds.push(cred);
|
||||
} else if (cred.type === "api_key") {
|
||||
acc[cred.provider].apiKeys.push(cred);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<
|
||||
CredentialsProviderName,
|
||||
{
|
||||
oauthCreds: CredentialsMetaResponse[];
|
||||
apiKeys: CredentialsMetaResponse[];
|
||||
}>(
|
||||
(acc, cred) => {
|
||||
if (cred.type === "oauth2") {
|
||||
acc.oauthCreds.push(cred);
|
||||
} else if (cred.type === "api_key") {
|
||||
acc.apiKeys.push(cred);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{ oauthCreds: [], apiKeys: [] },
|
||||
);
|
||||
}
|
||||
>,
|
||||
);
|
||||
|
||||
setProviders((prev) => ({
|
||||
...prev,
|
||||
[provider]: {
|
||||
setProviders((prev) => ({
|
||||
...prev,
|
||||
...Object.fromEntries(
|
||||
CREDENTIALS_PROVIDER_NAMES.map((provider) => [
|
||||
provider,
|
||||
{
|
||||
provider,
|
||||
providerName: providerDisplayNames[provider],
|
||||
savedApiKeys: apiKeys,
|
||||
savedOAuthCredentials: oauthCreds,
|
||||
providerName:
|
||||
providerDisplayNames[provider as CredentialsProviderName],
|
||||
savedApiKeys: credentialsByProvider[provider]?.apiKeys ?? [],
|
||||
savedOAuthCredentials:
|
||||
credentialsByProvider[provider]?.oauthCreds ?? [],
|
||||
oAuthCallback: (code: string, state_token: string) =>
|
||||
oAuthCallback(provider, code, state_token),
|
||||
oAuthCallback(
|
||||
provider as CredentialsProviderName,
|
||||
code,
|
||||
state_token,
|
||||
),
|
||||
createAPIKeyCredentials: (
|
||||
credentials: APIKeyCredentialsCreatable,
|
||||
) => createAPIKeyCredentials(provider, credentials),
|
||||
deleteCredentials: (id: string) =>
|
||||
deleteCredentials(provider, id),
|
||||
) =>
|
||||
createAPIKeyCredentials(
|
||||
provider as CredentialsProviderName,
|
||||
credentials,
|
||||
),
|
||||
deleteCredentials: (id: string, force: boolean = false) =>
|
||||
deleteCredentials(
|
||||
provider as CredentialsProviderName,
|
||||
id,
|
||||
force,
|
||||
),
|
||||
},
|
||||
}));
|
||||
});
|
||||
},
|
||||
);
|
||||
]),
|
||||
),
|
||||
}));
|
||||
});
|
||||
});
|
||||
}, [api, createAPIKeyCredentials, deleteCredentials, oAuthCallback]);
|
||||
|
||||
|
@ -2,6 +2,7 @@ import AutoGPTServerAPI, { GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import React, { useEffect, useMemo, useState } from "react";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { TextRenderer } from "@/components/ui/render";
|
||||
import Link from "next/link";
|
||||
import {
|
||||
Dialog,
|
||||
@ -94,7 +95,10 @@ export const AgentFlowList = ({
|
||||
});
|
||||
}}
|
||||
>
|
||||
{template.name}
|
||||
<TextRenderer
|
||||
value={template.name}
|
||||
truncateLengthLimit={30}
|
||||
/>
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
</>
|
||||
@ -162,7 +166,9 @@ export const AgentFlowList = ({
|
||||
onClick={() => onSelectFlow(flow)}
|
||||
data-state={selectedFlow?.id == flow.id ? "selected" : null}
|
||||
>
|
||||
<TableCell>{flow.name}</TableCell>
|
||||
<TableCell>
|
||||
<TextRenderer value={flow.name} truncateLengthLimit={30} />
|
||||
</TableCell>
|
||||
{/* <TableCell><FlowStatusBadge status={flow.status ?? "active"} /></TableCell> */}
|
||||
{/* <TableCell>
|
||||
{flow.updatedAt ?? "???"}
|
||||
|
@ -71,7 +71,7 @@ export const FlowRunInfo: React.FC<
|
||||
result: result.output_data?.output || undefined,
|
||||
})),
|
||||
);
|
||||
}, [api, flow.id, flow.version, flowRun.id]);
|
||||
}, [api, flow.id, flowRun.id]);
|
||||
|
||||
// Fetch graph and execution data
|
||||
useEffect(() => {
|
||||
@ -80,7 +80,7 @@ export const FlowRunInfo: React.FC<
|
||||
}
|
||||
|
||||
fetchBlockResults();
|
||||
}, [isOutputOpen, blockOutputs]);
|
||||
}, [isOutputOpen, blockOutputs, fetchBlockResults]);
|
||||
|
||||
if (flowRun.graphID != flow.id) {
|
||||
throw new Error(
|
||||
@ -90,7 +90,7 @@ export const FlowRunInfo: React.FC<
|
||||
|
||||
const handleStopRun = useCallback(() => {
|
||||
api.stopGraphExecution(flow.id, flowRun.id);
|
||||
}, [flow.id, flowRun.id]);
|
||||
}, [api, flow.id, flowRun.id]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
@ -12,6 +12,7 @@ import {
|
||||
} from "@/components/ui/table";
|
||||
import moment from "moment/moment";
|
||||
import { FlowRunStatusBadge } from "@/components/monitor/FlowRunStatusBadge";
|
||||
import { TextRenderer } from "../ui/render";
|
||||
|
||||
export const FlowRunsList: React.FC<{
|
||||
flows: GraphMeta[];
|
||||
@ -43,7 +44,10 @@ export const FlowRunsList: React.FC<{
|
||||
data-state={selectedRun?.id == run.id ? "selected" : null}
|
||||
>
|
||||
<TableCell>
|
||||
{flows.find((f) => f.id == run.graphID)!.name}
|
||||
<TextRenderer
|
||||
value={flows.find((f) => f.id == run.graphID)!.name}
|
||||
truncateLengthLimit={30}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>{moment(run.startTime).format("HH:mm")}</TableCell>
|
||||
<TableCell>
|
||||
|
@ -30,6 +30,7 @@ import {
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { TextRenderer } from "../ui/render";
|
||||
|
||||
interface SchedulesTableProps {
|
||||
schedules: Schedule[];
|
||||
@ -111,7 +112,7 @@ export const SchedulesTable = ({
|
||||
<SelectContent>
|
||||
{agents.map((agent, i) => (
|
||||
<SelectItem key={agent.id + i} value={agent.id}>
|
||||
{agent.name}
|
||||
<TextRenderer value={agent.name} truncateLengthLimit={30} />
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
|
@ -20,6 +20,14 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "./ui/select";
|
||||
import {
|
||||
MultiSelector,
|
||||
MultiSelectorContent,
|
||||
MultiSelectorInput,
|
||||
MultiSelectorItem,
|
||||
MultiSelectorList,
|
||||
MultiSelectorTrigger,
|
||||
} from "./ui/multiselect";
|
||||
import { LocalValuedInput } from "./ui/input";
|
||||
import NodeHandle from "./NodeHandle";
|
||||
import { ConnectionData } from "./CustomNode";
|
||||
@ -133,6 +141,37 @@ export const NodeGenericInputField: FC<{
|
||||
}
|
||||
|
||||
if ("properties" in propSchema) {
|
||||
// Render a multi-select for all-boolean sub-schemas with more than 3 properties
|
||||
if (
|
||||
Object.values(propSchema.properties).every(
|
||||
(subSchema) => "type" in subSchema && subSchema.type == "boolean",
|
||||
) &&
|
||||
Object.keys(propSchema.properties).length >= 3
|
||||
) {
|
||||
const options = Object.keys(propSchema.properties);
|
||||
const selectedKeys = Object.entries(currentValue || {})
|
||||
.filter(([_, v]) => v)
|
||||
.map(([k, _]) => k);
|
||||
return (
|
||||
<NodeMultiSelectInput
|
||||
selfKey={propKey}
|
||||
schema={propSchema}
|
||||
selection={selectedKeys}
|
||||
error={errors[propKey]}
|
||||
className={className}
|
||||
displayName={displayName}
|
||||
handleInputChange={(key, selection) => {
|
||||
handleInputChange(
|
||||
key,
|
||||
Object.fromEntries(
|
||||
options.map((option) => [option, selection.includes(option)]),
|
||||
),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<NodeObjectInputTree
|
||||
nodeId={nodeId}
|
||||
@ -595,6 +634,56 @@ const NodeArrayInput: FC<{
|
||||
);
|
||||
};
|
||||
|
||||
const NodeMultiSelectInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIOObjectSubSchema; // TODO: Support BlockIOArraySubSchema
|
||||
selection?: string[];
|
||||
error?: string;
|
||||
className?: string;
|
||||
displayName?: string;
|
||||
handleInputChange: NodeObjectInputTreeProps["handleInputChange"];
|
||||
}> = ({
|
||||
selfKey,
|
||||
schema,
|
||||
selection = [],
|
||||
error,
|
||||
className,
|
||||
displayName,
|
||||
handleInputChange,
|
||||
}) => {
|
||||
const options = Object.keys(schema.properties);
|
||||
|
||||
return (
|
||||
<div className={cn("flex flex-col", className)}>
|
||||
<MultiSelector
|
||||
className="nodrag"
|
||||
values={selection}
|
||||
onValuesChange={(v) => handleInputChange(selfKey, v)}
|
||||
>
|
||||
<MultiSelectorTrigger>
|
||||
<MultiSelectorInput
|
||||
placeholder={
|
||||
schema.placeholder ?? `Select ${displayName || schema.title}...`
|
||||
}
|
||||
/>
|
||||
</MultiSelectorTrigger>
|
||||
<MultiSelectorContent className="nowheel">
|
||||
<MultiSelectorList>
|
||||
{options
|
||||
.map((key) => ({ ...schema.properties[key], key }))
|
||||
.map(({ key, title, description }) => (
|
||||
<MultiSelectorItem key={key} value={key} title={description}>
|
||||
{title ?? key}
|
||||
</MultiSelectorItem>
|
||||
))}
|
||||
</MultiSelectorList>
|
||||
</MultiSelectorContent>
|
||||
</MultiSelector>
|
||||
{error && <span className="error-message">{error}</span>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const NodeStringInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIOStringSubSchema;
|
||||
@ -783,7 +872,7 @@ const NodeBooleanInput: FC<{
|
||||
defaultChecked={value}
|
||||
onCheckedChange={(v) => handleInputChange(selfKey, v)}
|
||||
/>
|
||||
<span className="ml-3">{displayName}</span>
|
||||
{displayName && <span className="ml-3">{displayName}</span>}
|
||||
</div>
|
||||
{error && <span className="error-message">{error}</span>}
|
||||
</div>
|
||||
|
143
autogpt_platform/frontend/src/components/ui/alert-dialog.tsx
Normal file
143
autogpt_platform/frontend/src/components/ui/alert-dialog.tsx
Normal file
@ -0,0 +1,143 @@
|
||||
"use client";
|
||||
|
||||
import * as React from "react";
|
||||
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { buttonVariants } from "@/components/ui/button";
|
||||
import { VariantProps } from "class-variance-authority";
|
||||
|
||||
const AlertDialog = AlertDialogPrimitive.Root;
|
||||
|
||||
const AlertDialogTrigger = AlertDialogPrimitive.Trigger;
|
||||
|
||||
const AlertDialogPortal = AlertDialogPrimitive.Portal;
|
||||
|
||||
const AlertDialogOverlay = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Overlay>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Overlay>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Overlay
|
||||
className={cn(
|
||||
"fixed inset-0 z-50 bg-black/80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
ref={ref}
|
||||
/>
|
||||
));
|
||||
AlertDialogOverlay.displayName = AlertDialogPrimitive.Overlay.displayName;
|
||||
|
||||
const AlertDialogContent = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Content>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPortal>
|
||||
<AlertDialogOverlay />
|
||||
<AlertDialogPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border border-neutral-200 bg-white p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] dark:border-neutral-800 dark:bg-neutral-950 sm:rounded-lg",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</AlertDialogPortal>
|
||||
));
|
||||
AlertDialogContent.displayName = AlertDialogPrimitive.Content.displayName;
|
||||
|
||||
const AlertDialogHeader = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col space-y-2 text-center sm:text-left",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
AlertDialogHeader.displayName = "AlertDialogHeader";
|
||||
|
||||
const AlertDialogFooter = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
AlertDialogFooter.displayName = "AlertDialogFooter";
|
||||
|
||||
const AlertDialogTitle = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Title>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Title>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Title
|
||||
ref={ref}
|
||||
className={cn("text-lg font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogTitle.displayName = AlertDialogPrimitive.Title.displayName;
|
||||
|
||||
const AlertDialogDescription = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Description>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Description>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Description
|
||||
ref={ref}
|
||||
className={cn("text-sm text-neutral-500 dark:text-neutral-400", className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogDescription.displayName =
|
||||
AlertDialogPrimitive.Description.displayName;
|
||||
|
||||
const AlertDialogAction = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Action>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Action> &
|
||||
VariantProps<typeof buttonVariants>
|
||||
>(({ className, variant, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Action
|
||||
ref={ref}
|
||||
className={cn(buttonVariants({ variant: variant }), className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogAction.displayName = AlertDialogPrimitive.Action.displayName;
|
||||
|
||||
const AlertDialogCancel = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Cancel>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Cancel>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Cancel
|
||||
ref={ref}
|
||||
className={cn(
|
||||
buttonVariants({ variant: "outline" }),
|
||||
"mt-2 sm:mt-0",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogCancel.displayName = AlertDialogPrimitive.Cancel.displayName;
|
||||
|
||||
export {
|
||||
AlertDialog,
|
||||
AlertDialogPortal,
|
||||
AlertDialogOverlay,
|
||||
AlertDialogTrigger,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogFooter,
|
||||
AlertDialogTitle,
|
||||
AlertDialogDescription,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
};
|
@ -144,7 +144,7 @@ const MultiSelector = forwardRef<HTMLDivElement, MultiSelectorProps>(
|
||||
ref={ref}
|
||||
onKeyDown={handleKeyDown}
|
||||
className={cn(
|
||||
"flex flex-col space-y-2 overflow-visible bg-transparent",
|
||||
"flex flex-col overflow-visible bg-transparent",
|
||||
className,
|
||||
)}
|
||||
dir={dir}
|
||||
@ -174,7 +174,7 @@ const MultiSelectorTrigger = forwardRef<
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex flex-wrap gap-1 rounded-lg border border-muted bg-background p-1 py-2",
|
||||
"agpt-border-input agpt-shadow-input flex flex-wrap gap-1 rounded-lg bg-background px-3 py-2 pl-1 text-sm",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
@ -183,7 +183,7 @@ const MultiSelectorTrigger = forwardRef<
|
||||
<Badge
|
||||
key={item}
|
||||
className={cn(
|
||||
"flex items-center gap-1 rounded-xl px-1",
|
||||
"flex items-center gap-1 rounded-xl px-1 pl-2",
|
||||
activeIndex === index && "ring-2 ring-muted-foreground",
|
||||
)}
|
||||
variant={"secondary"}
|
||||
@ -237,10 +237,10 @@ MultiSelectorInput.displayName = "MultiSelectorInput";
|
||||
const MultiSelectorContent = forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ children }, ref) => {
|
||||
>(({ children, className }, ref) => {
|
||||
const { open } = useMultiSelect();
|
||||
return (
|
||||
<div ref={ref} className="relative">
|
||||
<div ref={ref} className={cn("relative mt-2", className)}>
|
||||
{open && children}
|
||||
</div>
|
||||
);
|
||||
|
@ -76,15 +76,14 @@ const AudioRenderer: React.FC<{ audioUrl: string }> = ({ audioUrl }) => (
|
||||
</div>
|
||||
);
|
||||
|
||||
const TextRenderer: React.FC<{ value: any; truncateLongData?: boolean }> = ({
|
||||
value,
|
||||
truncateLongData,
|
||||
}) => {
|
||||
const maxChars = 100;
|
||||
export const TextRenderer: React.FC<{
|
||||
value: any;
|
||||
truncateLengthLimit?: number;
|
||||
}> = ({ value, truncateLengthLimit }) => {
|
||||
const text =
|
||||
typeof value === "object" ? JSON.stringify(value, null, 2) : String(value);
|
||||
return truncateLongData && text.length > maxChars
|
||||
? text.slice(0, maxChars) + "..."
|
||||
return truncateLengthLimit && text.length > truncateLengthLimit
|
||||
? text.slice(0, truncateLengthLimit) + "..."
|
||||
: text;
|
||||
};
|
||||
|
||||
@ -101,5 +100,10 @@ export const ContentRenderer: React.FC<{
|
||||
return <AudioRenderer audioUrl={value} />;
|
||||
}
|
||||
}
|
||||
return <TextRenderer value={value} truncateLongData={truncateLongData} />;
|
||||
return (
|
||||
<TextRenderer
|
||||
value={value}
|
||||
truncateLengthLimit={truncateLongData ? 100 : undefined}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
@ -14,7 +14,10 @@ const ScrollArea = React.forwardRef<
|
||||
className={cn("relative overflow-hidden", className)}
|
||||
{...props}
|
||||
>
|
||||
<ScrollAreaPrimitive.Viewport className="h-full w-full rounded-[inherit]">
|
||||
<ScrollAreaPrimitive.Viewport
|
||||
className="h-full w-full rounded-[inherit]"
|
||||
style={{ overflow: "scroll" }}
|
||||
>
|
||||
{children}
|
||||
</ScrollAreaPrimitive.Viewport>
|
||||
<ScrollBar />
|
||||
|
@ -1,24 +1,24 @@
|
||||
import { SupabaseClient } from "@supabase/supabase-js";
|
||||
import {
|
||||
AnalyticsMetrics,
|
||||
AnalyticsDetails,
|
||||
AnalyticsMetrics,
|
||||
APIKeyCredentials,
|
||||
Block,
|
||||
CredentialsDeleteNeedConfirmationResponse,
|
||||
CredentialsDeleteResponse,
|
||||
CredentialsMetaResponse,
|
||||
ExecutionMeta,
|
||||
Graph,
|
||||
GraphCreatable,
|
||||
GraphUpdateable,
|
||||
GraphExecuteResponse,
|
||||
GraphMeta,
|
||||
GraphMetaWithRuns,
|
||||
GraphExecuteResponse,
|
||||
ExecutionMeta,
|
||||
GraphUpdateable,
|
||||
NodeExecutionResult,
|
||||
OAuth2Credentials,
|
||||
User,
|
||||
ScheduleCreatable,
|
||||
ScheduleUpdateable,
|
||||
Schedule,
|
||||
ScheduleCreatable,
|
||||
User,
|
||||
} from "./types";
|
||||
|
||||
export default class BaseAutoGPTServerAPI {
|
||||
@ -212,8 +212,12 @@ export default class BaseAutoGPTServerAPI {
|
||||
);
|
||||
}
|
||||
|
||||
listCredentials(provider: string): Promise<CredentialsMetaResponse[]> {
|
||||
return this._get(`/integrations/${provider}/credentials`);
|
||||
listCredentials(provider?: string): Promise<CredentialsMetaResponse[]> {
|
||||
return this._get(
|
||||
provider
|
||||
? `/integrations/${provider}/credentials`
|
||||
: "/integrations/credentials",
|
||||
);
|
||||
}
|
||||
|
||||
getCredentials(
|
||||
@ -226,10 +230,14 @@ export default class BaseAutoGPTServerAPI {
|
||||
deleteCredentials(
|
||||
provider: string,
|
||||
id: string,
|
||||
): Promise<CredentialsDeleteResponse> {
|
||||
force: boolean = true,
|
||||
): Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
> {
|
||||
return this._request(
|
||||
"DELETE",
|
||||
`/integrations/${provider}/credentials/${id}`,
|
||||
force ? { force: true } : undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@ -271,13 +279,14 @@ export default class BaseAutoGPTServerAPI {
|
||||
?.access_token || "";
|
||||
|
||||
let url = this.baseUrl + path;
|
||||
if (method === "GET" && payload) {
|
||||
const payloadAsQuery = ["GET", "DELETE"].includes(method);
|
||||
if (payloadAsQuery && payload) {
|
||||
// For GET requests, use payload as query
|
||||
const queryParams = new URLSearchParams(payload);
|
||||
url += `?${queryParams.toString()}`;
|
||||
}
|
||||
|
||||
const hasRequestBody = method !== "GET" && payload !== undefined;
|
||||
const hasRequestBody = !payloadAsQuery && payload !== undefined;
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
|
@ -56,6 +56,7 @@ export type BlockIOSubSchemaMeta = {
|
||||
description?: string;
|
||||
placeholder?: string;
|
||||
advanced?: boolean;
|
||||
hidden?: boolean;
|
||||
};
|
||||
|
||||
export type BlockIOObjectSubSchema = BlockIOSubSchemaMeta & {
|
||||
@ -259,6 +260,7 @@ export type NodeExecutionResult = {
|
||||
/* Mirror of backend/server/integrations/router.py:CredentialsMetaResponse */
|
||||
export type CredentialsMetaResponse = {
|
||||
id: string;
|
||||
provider: CredentialsProviderName;
|
||||
type: CredentialsType;
|
||||
title?: string;
|
||||
scopes?: Array<string>;
|
||||
@ -271,6 +273,13 @@ export type CredentialsDeleteResponse = {
|
||||
revoked: boolean | null;
|
||||
};
|
||||
|
||||
/* Mirror of backend/server/integrations/router.py:CredentialsDeletionNeedsConfirmationResponse */
|
||||
export type CredentialsDeleteNeedConfirmationResponse = {
|
||||
deleted: false;
|
||||
need_confirmation: true;
|
||||
message: string;
|
||||
};
|
||||
|
||||
/* Mirror of backend/data/model.py:CredentialsMetaInput */
|
||||
export type CredentialsMetaInput = {
|
||||
id: string;
|
||||
@ -284,7 +293,7 @@ type BaseCredentials = {
|
||||
id: string;
|
||||
type: CredentialsType;
|
||||
title?: string;
|
||||
provider: string;
|
||||
provider: CredentialsProviderName;
|
||||
};
|
||||
|
||||
/* Mirror of autogpt_libs/supabase_integration_credentials_store/types.py:OAuth2Credentials */
|
||||
@ -317,6 +326,7 @@ export enum BlockUIType {
|
||||
INPUT = "Input",
|
||||
OUTPUT = "Output",
|
||||
NOTE = "Note",
|
||||
WEBHOOK = "Webhook",
|
||||
AGENT = "Agent",
|
||||
}
|
||||
|
||||
|
@ -41,6 +41,7 @@ export function getTypeTextColor(type: string | null): string {
|
||||
{
|
||||
string: "text-green-500",
|
||||
number: "text-blue-500",
|
||||
integer: "text-blue-500",
|
||||
boolean: "text-yellow-500",
|
||||
object: "text-purple-500",
|
||||
array: "text-indigo-500",
|
||||
@ -58,6 +59,7 @@ export function getTypeBgColor(type: string | null): string {
|
||||
{
|
||||
string: "border-green-500",
|
||||
number: "border-blue-500",
|
||||
integer: "border-blue-500",
|
||||
boolean: "border-yellow-500",
|
||||
object: "border-purple-500",
|
||||
array: "border-indigo-500",
|
||||
@ -74,6 +76,7 @@ export function getTypeColor(type: string | null): string {
|
||||
{
|
||||
string: "#22c55e",
|
||||
number: "#3b82f6",
|
||||
integer: "#3b82f6",
|
||||
boolean: "#eab308",
|
||||
object: "#a855f7",
|
||||
array: "#6366f1",
|
||||
|
@ -1,13 +1,12 @@
|
||||
import { test, expect } from "./fixtures";
|
||||
// auth.spec.ts
|
||||
import { test } from "./fixtures";
|
||||
|
||||
test.describe("Authentication", () => {
|
||||
test("user can login successfully", async ({ page, loginPage, testUser }) => {
|
||||
await page.goto("/login"); // Make sure we're on the login page
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
// expect to be redirected to the home page
|
||||
await expect(page).toHaveURL("/");
|
||||
// expect to see the Monitor text
|
||||
await expect(page.getByText("Monitor")).toBeVisible();
|
||||
await test.expect(page).toHaveURL("/");
|
||||
await test.expect(page.getByText("Monitor")).toBeVisible();
|
||||
});
|
||||
|
||||
test("user can logout successfully", async ({
|
||||
@ -15,17 +14,17 @@ test.describe("Authentication", () => {
|
||||
loginPage,
|
||||
testUser,
|
||||
}) => {
|
||||
await page.goto("/login"); // Make sure we're on the login page
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
|
||||
// Expect to be on the home page
|
||||
await expect(page).toHaveURL("/");
|
||||
await test.expect(page).toHaveURL("/");
|
||||
|
||||
// Click on the user menu
|
||||
await page.getByRole("button", { name: "CN" }).click();
|
||||
// Click on the logout menu item
|
||||
await page.getByRole("menuitem", { name: "Log out" }).click();
|
||||
// Expect to be redirected to the login page
|
||||
await expect(page).toHaveURL("/login");
|
||||
|
||||
await test.expect(page).toHaveURL("/login");
|
||||
});
|
||||
|
||||
test("login in, then out, then in again", async ({
|
||||
@ -33,14 +32,14 @@ test.describe("Authentication", () => {
|
||||
loginPage,
|
||||
testUser,
|
||||
}) => {
|
||||
await page.goto("/login"); // Make sure we're on the login page
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await page.goto("/");
|
||||
await page.getByRole("button", { name: "CN" }).click();
|
||||
await page.getByRole("menuitem", { name: "Log out" }).click();
|
||||
await expect(page).toHaveURL("/login");
|
||||
await test.expect(page).toHaveURL("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await expect(page).toHaveURL("/");
|
||||
await expect(page.getByText("Monitor")).toBeVisible();
|
||||
await test.expect(page).toHaveURL("/");
|
||||
await test.expect(page.getByText("Monitor")).toBeVisible();
|
||||
});
|
||||
});
|
||||
|
@ -1,18 +1,109 @@
|
||||
/* eslint-disable react-hooks/rules-of-hooks */
|
||||
import { test as base } from "@playwright/test";
|
||||
import { createTestUserFixture } from "./test-user.fixture";
|
||||
import { createLoginPageFixture } from "./login-page.fixture";
|
||||
import type { TestUser } from "./test-user.fixture";
|
||||
import { createClient, SupabaseClient } from "@supabase/supabase-js";
|
||||
import { faker } from "@faker-js/faker";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { TestUser } from "./test-user.fixture";
|
||||
import { LoginPage } from "../pages/login.page";
|
||||
|
||||
type Fixtures = {
|
||||
// Extend both worker state and test-specific fixtures
|
||||
type WorkerFixtures = {
|
||||
workerAuth: TestUser;
|
||||
};
|
||||
|
||||
type TestFixtures = {
|
||||
testUser: TestUser;
|
||||
loginPage: LoginPage;
|
||||
};
|
||||
|
||||
// Combine fixtures
|
||||
export const test = base.extend<Fixtures>({
|
||||
testUser: createTestUserFixture,
|
||||
loginPage: createLoginPageFixture,
|
||||
let supabase: SupabaseClient;
|
||||
|
||||
function getSupabaseAdmin() {
|
||||
if (!supabase) {
|
||||
supabase = createClient(
|
||||
process.env.SUPABASE_URL!,
|
||||
process.env.SUPABASE_SERVICE_ROLE_KEY!,
|
||||
{
|
||||
auth: {
|
||||
autoRefreshToken: false,
|
||||
persistSession: false,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
return supabase;
|
||||
}
|
||||
|
||||
export const test = base.extend<TestFixtures, WorkerFixtures>({
|
||||
// Define the worker-level fixture that creates and manages worker-specific auth
|
||||
workerAuth: [
|
||||
async ({}, use, workerInfo) => {
|
||||
const workerId = workerInfo.workerIndex;
|
||||
const fileName = path.resolve(
|
||||
process.cwd(),
|
||||
`.auth/worker-${workerId}.json`,
|
||||
);
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
const dirPath = path.dirname(fileName);
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
|
||||
let auth: TestUser;
|
||||
if (fs.existsSync(fileName)) {
|
||||
auth = JSON.parse(fs.readFileSync(fileName, "utf-8"));
|
||||
} else {
|
||||
// Generate new worker-specific test user
|
||||
auth = {
|
||||
email: `test.worker.${workerId}.${Date.now()}@example.com`,
|
||||
password: faker.internet.password({ length: 12 }),
|
||||
};
|
||||
|
||||
const supabase = getSupabaseAdmin();
|
||||
const {
|
||||
data: { user },
|
||||
error: signUpError,
|
||||
} = await supabase.auth.signUp({
|
||||
email: auth.email,
|
||||
password: auth.password,
|
||||
});
|
||||
|
||||
if (signUpError) {
|
||||
throw signUpError;
|
||||
}
|
||||
|
||||
auth.id = user?.id;
|
||||
fs.writeFileSync(fileName, JSON.stringify(auth));
|
||||
}
|
||||
|
||||
await use(auth);
|
||||
|
||||
// Cleanup code is commented out to preserve test users during development
|
||||
/*
|
||||
if (workerInfo.project.metadata.teardown) {
|
||||
if (auth.id) {
|
||||
await deleteTestUser(auth.id);
|
||||
}
|
||||
if (fs.existsSync(fileName)) {
|
||||
fs.unlinkSync(fileName);
|
||||
}
|
||||
}
|
||||
*/
|
||||
},
|
||||
{ scope: "worker" },
|
||||
],
|
||||
|
||||
// Define the test-level fixture that provides access to the worker auth
|
||||
testUser: async ({ workerAuth }, use) => {
|
||||
await use(workerAuth);
|
||||
},
|
||||
|
||||
// Update login page fixture to use worker auth by default
|
||||
loginPage: async ({ page }, use) => {
|
||||
await use(new LoginPage(page));
|
||||
},
|
||||
});
|
||||
|
||||
export { expect } from "@playwright/test";
|
||||
|
15
autogpt_platform/frontend/src/tests/pages/base.page.ts
Normal file
15
autogpt_platform/frontend/src/tests/pages/base.page.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { Page } from "@playwright/test";
|
||||
import { NavBar } from "./navbar.page";
|
||||
|
||||
export class BasePage {
|
||||
readonly navbar: NavBar;
|
||||
|
||||
constructor(protected page: Page) {
|
||||
this.navbar = new NavBar(page);
|
||||
}
|
||||
|
||||
async waitForPageLoad() {
|
||||
// Common page load waiting logic
|
||||
await this.page.waitForLoadState("networkidle", { timeout: 10000 });
|
||||
}
|
||||
}
|
51
autogpt_platform/frontend/src/tests/pages/navbar.page.ts
Normal file
51
autogpt_platform/frontend/src/tests/pages/navbar.page.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { Page } from "@playwright/test";
|
||||
|
||||
export class NavBar {
|
||||
constructor(private page: Page) {}
|
||||
|
||||
async clickProfileLink() {
|
||||
// await this.page.getByTestId("profile-link").click();
|
||||
|
||||
await this.page.getByRole("button", { name: "CN" }).click();
|
||||
await this.page.getByRole("menuitem", { name: "Profile" }).click();
|
||||
}
|
||||
|
||||
async clickMonitorLink() {
|
||||
await this.page.getByTestId("monitor-link").click();
|
||||
}
|
||||
|
||||
async clickBuildLink() {
|
||||
await this.page.getByTestId("build-link").click();
|
||||
}
|
||||
|
||||
async clickMarketplaceLink() {
|
||||
await this.page.getByTestId("marketplace-link").click();
|
||||
}
|
||||
|
||||
async getUserMenuButton() {
|
||||
return this.page.getByRole("button", { name: "CN" });
|
||||
}
|
||||
|
||||
async clickUserMenu() {
|
||||
await (await this.getUserMenuButton()).click();
|
||||
}
|
||||
|
||||
async logout() {
|
||||
await this.clickUserMenu();
|
||||
await this.page.getByRole("menuitem", { name: "Log out" }).click();
|
||||
}
|
||||
|
||||
async isLoggedIn(): Promise<boolean> {
|
||||
try {
|
||||
await (
|
||||
await this.getUserMenuButton()
|
||||
).waitFor({
|
||||
state: "visible",
|
||||
timeout: 5000,
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
38
autogpt_platform/frontend/src/tests/pages/profile.page.ts
Normal file
38
autogpt_platform/frontend/src/tests/pages/profile.page.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { Page } from "@playwright/test";
|
||||
import { BasePage } from "./base.page";
|
||||
|
||||
export class ProfilePage extends BasePage {
|
||||
constructor(page: Page) {
|
||||
super(page);
|
||||
}
|
||||
|
||||
async getDisplayedEmail(): Promise<string> {
|
||||
await this.waitForPageToLoad();
|
||||
const email = await this.page.getByTestId("profile-email").textContent();
|
||||
if (!email) {
|
||||
throw new Error("Email not found");
|
||||
}
|
||||
return email;
|
||||
}
|
||||
|
||||
async isLoaded(): Promise<boolean> {
|
||||
try {
|
||||
await this.waitForPageToLoad();
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error loading profile page", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForPageToLoad(): Promise<void> {
|
||||
await this.page.waitForLoadState("networkidle", { timeout: 60_000 });
|
||||
|
||||
await this.page.getByTestId("profile-email").waitFor({
|
||||
state: "visible",
|
||||
timeout: 60_000,
|
||||
});
|
||||
|
||||
await this.page.waitForLoadState("networkidle", { timeout: 60_000 });
|
||||
}
|
||||
}
|
57
autogpt_platform/frontend/src/tests/profile.spec.ts
Normal file
57
autogpt_platform/frontend/src/tests/profile.spec.ts
Normal file
@ -0,0 +1,57 @@
|
||||
// profile.spec.ts
|
||||
import { test } from "./fixtures";
|
||||
import { ProfilePage } from "./pages/profile.page";
|
||||
|
||||
test.describe("Profile", () => {
|
||||
let profilePage: ProfilePage;
|
||||
|
||||
test.beforeEach(async ({ page, loginPage, testUser }) => {
|
||||
profilePage = new ProfilePage(page);
|
||||
|
||||
// Start each test with login using worker auth
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await test.expect(page).toHaveURL("/");
|
||||
});
|
||||
|
||||
test("user can view their profile information", async ({
|
||||
page,
|
||||
testUser,
|
||||
}) => {
|
||||
await profilePage.navbar.clickProfileLink();
|
||||
// workaround for #8788
|
||||
// sleep for 10 seconds to allow page to load due to bug in our system
|
||||
await page.waitForTimeout(10000);
|
||||
await page.reload();
|
||||
await page.reload();
|
||||
await test.expect(profilePage.isLoaded()).resolves.toBeTruthy();
|
||||
await test.expect(page).toHaveURL(new RegExp("/profile"));
|
||||
|
||||
// Verify email matches test worker's email
|
||||
const displayedEmail = await profilePage.getDisplayedEmail();
|
||||
test.expect(displayedEmail).toBe(testUser.email);
|
||||
});
|
||||
|
||||
test("profile navigation is accessible from navbar", async ({ page }) => {
|
||||
await profilePage.navbar.clickProfileLink();
|
||||
await test.expect(page).toHaveURL(new RegExp("/profile"));
|
||||
// workaround for #8788
|
||||
await page.reload();
|
||||
await page.reload();
|
||||
await test.expect(profilePage.isLoaded()).resolves.toBeTruthy();
|
||||
});
|
||||
|
||||
test("profile displays user Credential providers", async ({ page }) => {
|
||||
await profilePage.navbar.clickProfileLink();
|
||||
|
||||
// await test
|
||||
// .expect(page.getByTestId("profile-section-personal"))
|
||||
// .toBeVisible();
|
||||
// await test
|
||||
// .expect(page.getByTestId("profile-section-settings"))
|
||||
// .toBeVisible();
|
||||
// await test
|
||||
// .expect(page.getByTestId("profile-section-security"))
|
||||
// .toBeVisible();
|
||||
});
|
||||
});
|
File diff suppressed because it is too large
Load Diff
40
autogpt_platform/market/poetry.lock
generated
40
autogpt_platform/market/poetry.lock
generated
@ -1058,29 +1058,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.4"
|
||||
version = "0.8.0"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
|
||||
{file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
|
||||
{file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
|
||||
{file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
|
||||
{file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
|
||||
{file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
|
||||
{file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
|
||||
{file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
|
||||
{file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"},
|
||||
{file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"},
|
||||
{file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"},
|
||||
{file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"},
|
||||
{file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1298,4 +1298,4 @@ watchmedo = ["PyYAML (>=3.10)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "985f87e9d6e2b7232f880a476c69c626bc4227156d8a57d8f1867236b215f82f"
|
||||
content-hash = "89a2655b6c666f40a0319881580bc447aea78febee65a04eebf73fd092e2147e"
|
||||
|
@ -28,7 +28,7 @@ pytest-asyncio = "^0.24.0"
|
||||
|
||||
pytest-watcher = "^0.4.3"
|
||||
requests = "^2.32.3"
|
||||
ruff = "^0.7.4"
|
||||
ruff = "^0.8.0"
|
||||
pyright = "^1.1.389"
|
||||
isort = "^5.13.2"
|
||||
black = "^24.10.0"
|
||||
|
@ -180,7 +180,6 @@ class AnthropicProvider(BaseChatModelProvider[AnthropicModelName, AnthropicSetti
|
||||
"""Create a completion using the Anthropic API."""
|
||||
anthropic_messages, completion_kwargs = self._get_chat_completion_args(
|
||||
prompt_messages=model_prompt,
|
||||
model=model_name,
|
||||
functions=functions,
|
||||
max_output_tokens=max_output_tokens,
|
||||
**kwargs,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user