Merge remote-tracking branch 'origin/zamilmajdy/secrt-1014-scalability-implement-transactions-snapshotting' into zamilmajdy/secrt-1014-scalability-implement-transactions-snapshotting

This commit is contained in:
Zamil Majdy 2025-01-03 12:05:31 -06:00
commit a1c4a02f77
23 changed files with 938 additions and 533 deletions

View File

@ -1,4 +1,3 @@
import base64
import re
from typing_extensions import TypedDict
@ -513,330 +512,3 @@ def prepare_pr_api_url(pr_url: str, path: str) -> str:
base_url, pr_number = match.groups()
return f"{base_url}/pulls/{pr_number}/{path}"
class GithubCreateFileBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
file_path: str = SchemaField(
description="Path where the file should be created",
placeholder="path/to/file.txt",
)
content: str = SchemaField(
description="Content to write to the file",
placeholder="File content here",
)
branch: str = SchemaField(
description="Branch where the file should be created",
default="main",
)
commit_message: str = SchemaField(
description="Message for the commit",
default="Create new file",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the created file")
sha: str = SchemaField(description="SHA of the commit")
error: str = SchemaField(
description="Error message if the file creation failed"
)
def __init__(self):
super().__init__(
id="8fd132ac-b917-428a-8159-d62893e8a3fe",
description="This block creates a new file in a GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateFileBlock.Input,
output_schema=GithubCreateFileBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"file_path": "test/file.txt",
"content": "Test content",
"branch": "main",
"commit_message": "Create test file",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/repo/blob/main/test/file.txt"),
("sha", "abc123"),
],
test_mock={
"create_file": lambda *args, **kwargs: (
"https://github.com/owner/repo/blob/main/test/file.txt",
"abc123",
)
},
)
@staticmethod
def create_file(
credentials: GithubCredentials,
repo_url: str,
file_path: str,
content: str,
branch: str,
commit_message: str,
) -> tuple[str, str]:
api = get_api(credentials)
# Convert content to base64
content_bytes = content.encode("utf-8")
content_base64 = base64.b64encode(content_bytes).decode("utf-8")
# Create the file using the GitHub API
contents_url = f"{repo_url}/contents/{file_path}"
data = {
"message": commit_message,
"content": content_base64,
"branch": branch,
}
response = api.put(contents_url, json=data)
result = response.json()
return result["content"]["html_url"], result["commit"]["sha"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, sha = self.create_file(
credentials,
input_data.repo_url,
input_data.file_path,
input_data.content,
input_data.branch,
input_data.commit_message,
)
yield "url", url
yield "sha", sha
except Exception as e:
yield "error", str(e)
class GithubUpdateFileBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
file_path: str = SchemaField(
description="Path to the file to update",
placeholder="path/to/file.txt",
)
content: str = SchemaField(
description="New content for the file",
placeholder="Updated content here",
)
branch: str = SchemaField(
description="Branch containing the file",
default="main",
)
commit_message: str = SchemaField(
description="Message for the commit",
default="Update file",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the updated file")
sha: str = SchemaField(description="SHA of the commit")
error: str = SchemaField(description="Error message if the file update failed")
def __init__(self):
super().__init__(
id="30be12a4-57cb-4aa4-baf5-fcc68d136076",
description="This block updates an existing file in a GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubUpdateFileBlock.Input,
output_schema=GithubUpdateFileBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"file_path": "test/file.txt",
"content": "Updated content",
"branch": "main",
"commit_message": "Update test file",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/repo/blob/main/test/file.txt"),
("sha", "def456"),
],
test_mock={
"update_file": lambda *args, **kwargs: (
"https://github.com/owner/repo/blob/main/test/file.txt",
"def456",
)
},
)
@staticmethod
def update_file(
credentials: GithubCredentials,
repo_url: str,
file_path: str,
content: str,
branch: str,
commit_message: str,
) -> tuple[str, str]:
api = get_api(credentials)
# First get the current file to get its SHA
contents_url = f"{repo_url}/contents/{file_path}"
params = {"ref": branch}
response = api.get(contents_url, params=params)
current_file = response.json()
# Convert new content to base64
content_bytes = content.encode("utf-8")
content_base64 = base64.b64encode(content_bytes).decode("utf-8")
# Update the file
data = {
"message": commit_message,
"content": content_base64,
"sha": current_file["sha"],
"branch": branch,
}
response = api.put(contents_url, json=data)
result = response.json()
return result["content"]["html_url"], result["commit"]["sha"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, sha = self.update_file(
credentials,
input_data.repo_url,
input_data.file_path,
input_data.content,
input_data.branch,
input_data.commit_message,
)
yield "url", url
yield "sha", sha
except Exception as e:
yield "error", str(e)
class GithubCreateRepositoryBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
name: str = SchemaField(
description="Name of the repository to create",
placeholder="my-new-repo",
)
description: str = SchemaField(
description="Description of the repository",
placeholder="A description of the repository",
default="",
)
private: bool = SchemaField(
description="Whether the repository should be private",
default=False,
)
auto_init: bool = SchemaField(
description="Whether to initialize the repository with a README",
default=True,
)
gitignore_template: str = SchemaField(
description="Git ignore template to use (e.g., Python, Node, Java)",
default="",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the created repository")
clone_url: str = SchemaField(description="Git clone URL of the repository")
error: str = SchemaField(
description="Error message if the repository creation failed"
)
def __init__(self):
super().__init__(
id="029ec3b8-1cfd-46d3-b6aa-28e4a706efd1",
description="This block creates a new GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateRepositoryBlock.Input,
output_schema=GithubCreateRepositoryBlock.Output,
test_input={
"name": "test-repo",
"description": "A test repository",
"private": False,
"auto_init": True,
"gitignore_template": "Python",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/test-repo"),
("clone_url", "https://github.com/owner/test-repo.git"),
],
test_mock={
"create_repository": lambda *args, **kwargs: (
"https://github.com/owner/test-repo",
"https://github.com/owner/test-repo.git",
)
},
)
@staticmethod
def create_repository(
credentials: GithubCredentials,
name: str,
description: str,
private: bool,
auto_init: bool,
gitignore_template: str,
) -> tuple[str, str]:
api = get_api(credentials, convert_urls=False) # Disable URL conversion
data = {
"name": name,
"description": description,
"private": private,
"auto_init": auto_init,
}
if gitignore_template:
data["gitignore_template"] = gitignore_template
# Create repository using the user endpoint
response = api.post("https://api.github.com/user/repos", json=data)
result = response.json()
return result["html_url"], result["clone_url"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, clone_url = self.create_repository(
credentials,
input_data.name,
input_data.description,
input_data.private,
input_data.auto_init,
input_data.gitignore_template,
)
yield "url", url
yield "clone_url", clone_url
except Exception as e:
yield "error", str(e)

View File

@ -699,3 +699,420 @@ class GithubDeleteBranchBlock(Block):
input_data.branch,
)
yield "status", status
class GithubCreateFileBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
file_path: str = SchemaField(
description="Path where the file should be created",
placeholder="path/to/file.txt",
)
content: str = SchemaField(
description="Content to write to the file",
placeholder="File content here",
)
branch: str = SchemaField(
description="Branch where the file should be created",
default="main",
)
commit_message: str = SchemaField(
description="Message for the commit",
default="Create new file",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the created file")
sha: str = SchemaField(description="SHA of the commit")
error: str = SchemaField(
description="Error message if the file creation failed"
)
def __init__(self):
super().__init__(
id="8fd132ac-b917-428a-8159-d62893e8a3fe",
description="This block creates a new file in a GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateFileBlock.Input,
output_schema=GithubCreateFileBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"file_path": "test/file.txt",
"content": "Test content",
"branch": "main",
"commit_message": "Create test file",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/repo/blob/main/test/file.txt"),
("sha", "abc123"),
],
test_mock={
"create_file": lambda *args, **kwargs: (
"https://github.com/owner/repo/blob/main/test/file.txt",
"abc123",
)
},
)
@staticmethod
def create_file(
credentials: GithubCredentials,
repo_url: str,
file_path: str,
content: str,
branch: str,
commit_message: str,
) -> tuple[str, str]:
api = get_api(credentials)
# Convert content to base64
content_bytes = content.encode("utf-8")
content_base64 = base64.b64encode(content_bytes).decode("utf-8")
# Create the file using the GitHub API
contents_url = f"{repo_url}/contents/{file_path}"
data = {
"message": commit_message,
"content": content_base64,
"branch": branch,
}
response = api.put(contents_url, json=data)
result = response.json()
return result["content"]["html_url"], result["commit"]["sha"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, sha = self.create_file(
credentials,
input_data.repo_url,
input_data.file_path,
input_data.content,
input_data.branch,
input_data.commit_message,
)
yield "url", url
yield "sha", sha
except Exception as e:
yield "error", str(e)
class GithubUpdateFileBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
file_path: str = SchemaField(
description="Path to the file to update",
placeholder="path/to/file.txt",
)
content: str = SchemaField(
description="New content for the file",
placeholder="Updated content here",
)
branch: str = SchemaField(
description="Branch containing the file",
default="main",
)
commit_message: str = SchemaField(
description="Message for the commit",
default="Update file",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the updated file")
sha: str = SchemaField(description="SHA of the commit")
error: str = SchemaField(description="Error message if the file update failed")
def __init__(self):
super().__init__(
id="30be12a4-57cb-4aa4-baf5-fcc68d136076",
description="This block updates an existing file in a GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubUpdateFileBlock.Input,
output_schema=GithubUpdateFileBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"file_path": "test/file.txt",
"content": "Updated content",
"branch": "main",
"commit_message": "Update test file",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/repo/blob/main/test/file.txt"),
("sha", "def456"),
],
test_mock={
"update_file": lambda *args, **kwargs: (
"https://github.com/owner/repo/blob/main/test/file.txt",
"def456",
)
},
)
@staticmethod
def update_file(
credentials: GithubCredentials,
repo_url: str,
file_path: str,
content: str,
branch: str,
commit_message: str,
) -> tuple[str, str]:
api = get_api(credentials)
# First get the current file to get its SHA
contents_url = f"{repo_url}/contents/{file_path}"
params = {"ref": branch}
response = api.get(contents_url, params=params)
current_file = response.json()
# Convert new content to base64
content_bytes = content.encode("utf-8")
content_base64 = base64.b64encode(content_bytes).decode("utf-8")
# Update the file
data = {
"message": commit_message,
"content": content_base64,
"sha": current_file["sha"],
"branch": branch,
}
response = api.put(contents_url, json=data)
result = response.json()
return result["content"]["html_url"], result["commit"]["sha"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, sha = self.update_file(
credentials,
input_data.repo_url,
input_data.file_path,
input_data.content,
input_data.branch,
input_data.commit_message,
)
yield "url", url
yield "sha", sha
except Exception as e:
yield "error", str(e)
class GithubCreateRepositoryBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
name: str = SchemaField(
description="Name of the repository to create",
placeholder="my-new-repo",
)
description: str = SchemaField(
description="Description of the repository",
placeholder="A description of the repository",
default="",
)
private: bool = SchemaField(
description="Whether the repository should be private",
default=False,
)
auto_init: bool = SchemaField(
description="Whether to initialize the repository with a README",
default=True,
)
gitignore_template: str = SchemaField(
description="Git ignore template to use (e.g., Python, Node, Java)",
default="",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the created repository")
clone_url: str = SchemaField(description="Git clone URL of the repository")
error: str = SchemaField(
description="Error message if the repository creation failed"
)
def __init__(self):
super().__init__(
id="029ec3b8-1cfd-46d3-b6aa-28e4a706efd1",
description="This block creates a new GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateRepositoryBlock.Input,
output_schema=GithubCreateRepositoryBlock.Output,
test_input={
"name": "test-repo",
"description": "A test repository",
"private": False,
"auto_init": True,
"gitignore_template": "Python",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/test-repo"),
("clone_url", "https://github.com/owner/test-repo.git"),
],
test_mock={
"create_repository": lambda *args, **kwargs: (
"https://github.com/owner/test-repo",
"https://github.com/owner/test-repo.git",
)
},
)
@staticmethod
def create_repository(
credentials: GithubCredentials,
name: str,
description: str,
private: bool,
auto_init: bool,
gitignore_template: str,
) -> tuple[str, str]:
api = get_api(credentials, convert_urls=False) # Disable URL conversion
data = {
"name": name,
"description": description,
"private": private,
"auto_init": auto_init,
}
if gitignore_template:
data["gitignore_template"] = gitignore_template
# Create repository using the user endpoint
response = api.post("https://api.github.com/user/repos", json=data)
result = response.json()
return result["html_url"], result["clone_url"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, clone_url = self.create_repository(
credentials,
input_data.name,
input_data.description,
input_data.private,
input_data.auto_init,
input_data.gitignore_template,
)
yield "url", url
yield "clone_url", clone_url
except Exception as e:
yield "error", str(e)
class GithubListStargazersBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
class Output(BlockSchema):
class StargazerItem(TypedDict):
username: str
url: str
stargazer: StargazerItem = SchemaField(
title="Stargazer",
description="Stargazers with their username and profile URL",
)
error: str = SchemaField(
description="Error message if listing stargazers failed"
)
def __init__(self):
super().__init__(
id="a4b9c2d1-e5f6-4g7h-8i9j-0k1l2m3n4o5p", # Generated unique UUID
description="This block lists all users who have starred a specified GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubListStargazersBlock.Input,
output_schema=GithubListStargazersBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"stargazer",
{
"username": "octocat",
"url": "https://github.com/octocat",
},
)
],
test_mock={
"list_stargazers": lambda *args, **kwargs: [
{
"username": "octocat",
"url": "https://github.com/octocat",
}
]
},
)
@staticmethod
def list_stargazers(
credentials: GithubCredentials, repo_url: str
) -> list[Output.StargazerItem]:
api = get_api(credentials)
# Add /stargazers to the repo URL to get stargazers endpoint
stargazers_url = f"{repo_url}/stargazers"
# Set accept header to get starred_at timestamp
headers = {"Accept": "application/vnd.github.star+json"}
response = api.get(stargazers_url, headers=headers)
data = response.json()
stargazers: list[GithubListStargazersBlock.Output.StargazerItem] = [
{
"username": stargazer["login"],
"url": stargazer["html_url"],
}
for stargazer in data
]
return stargazers
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
stargazers = self.list_stargazers(
credentials,
input_data.repo_url,
)
yield from (("stargazer", stargazer) for stargazer in stargazers)
except Exception as e:
yield "error", str(e)

View File

@ -22,10 +22,10 @@ from backend.util import json
from backend.util.settings import Config
from .model import (
CREDENTIALS_FIELD_NAME,
ContributorDetails,
Credentials,
CredentialsMetaInput,
is_credentials_field_name,
)
app_config = Config()
@ -138,17 +138,38 @@ class BlockSchema(BaseModel):
@classmethod
def __pydantic_init_subclass__(cls, **kwargs):
"""Validates the schema definition. Rules:
- Only one `CredentialsMetaInput` field may be present.
- This field MUST be called `credentials`.
- A field that is called `credentials` MUST be a `CredentialsMetaInput`.
- Fields with annotation `CredentialsMetaInput` MUST be
named `credentials` or `*_credentials`
- Fields named `credentials` or `*_credentials` MUST be
of type `CredentialsMetaInput`
"""
super().__pydantic_init_subclass__(**kwargs)
# Reset cached JSON schema to prevent inheriting it from parent class
cls.cached_jsonschema = {}
credentials_fields = [
field_name
credentials_fields = cls.get_credentials_fields()
for field_name in cls.get_fields():
if is_credentials_field_name(field_name):
if field_name not in credentials_fields:
raise TypeError(
f"Credentials field '{field_name}' on {cls.__qualname__} "
f"is not of type {CredentialsMetaInput.__name__}"
)
credentials_fields[field_name].validate_credentials_field_schema(cls)
elif field_name in credentials_fields:
raise KeyError(
f"Credentials field '{field_name}' on {cls.__qualname__} "
"has invalid name: must be 'credentials' or *_credentials"
)
@classmethod
def get_credentials_fields(cls) -> dict[str, type[CredentialsMetaInput]]:
return {
field_name: info.annotation
for field_name, info in cls.model_fields.items()
if (
inspect.isclass(info.annotation)
@ -157,32 +178,7 @@ class BlockSchema(BaseModel):
CredentialsMetaInput,
)
)
]
if len(credentials_fields) > 1:
raise ValueError(
f"{cls.__qualname__} can only have one CredentialsMetaInput field"
)
elif (
len(credentials_fields) == 1
and credentials_fields[0] != CREDENTIALS_FIELD_NAME
):
raise ValueError(
f"CredentialsMetaInput field on {cls.__qualname__} "
"must be named 'credentials'"
)
elif (
len(credentials_fields) == 0
and CREDENTIALS_FIELD_NAME in cls.model_fields.keys()
):
raise TypeError(
f"Field 'credentials' on {cls.__qualname__} "
f"must be of type {CredentialsMetaInput.__name__}"
)
if credentials_field := cls.model_fields.get(CREDENTIALS_FIELD_NAME):
credentials_input_type = cast(
CredentialsMetaInput, credentials_field.annotation
)
credentials_input_type.validate_credentials_field_schema(cls)
}
BlockSchemaInputType = TypeVar("BlockSchemaInputType", bound=BlockSchema)
@ -255,7 +251,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
test_input: BlockInput | list[BlockInput] | None = None,
test_output: BlockData | list[BlockData] | None = None,
test_mock: dict[str, Any] | None = None,
test_credentials: Optional[Credentials] = None,
test_credentials: Optional[Credentials | dict[str, Credentials]] = None,
disabled: bool = False,
static_output: bool = False,
block_type: BlockType = BlockType.STANDARD,
@ -297,10 +293,16 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
if self.webhook_config:
if isinstance(self.webhook_config, BlockWebhookConfig):
# Enforce presence of credentials field on auto-setup webhook blocks
if CREDENTIALS_FIELD_NAME not in self.input_schema.model_fields:
if not (cred_fields := self.input_schema.get_credentials_fields()):
raise TypeError(
"credentials field is required on auto-setup webhook blocks"
)
# Disallow multiple credentials inputs on webhook blocks
elif len(cred_fields) > 1:
raise ValueError(
"Multiple credentials inputs not supported on webhook blocks"
)
self.block_type = BlockType.WEBHOOK
else:
self.block_type = BlockType.WEBHOOK_MANUAL

View File

@ -424,6 +424,26 @@ class GraphModel(Graph):
result[key] = value
return result
def clean_graph(self):
blocks = [block() for block in get_blocks().values()]
input_blocks = [
node
for node in self.nodes
if next(
(
b
for b in blocks
if b.id == node.block_id and b.block_type == BlockType.INPUT
),
None,
)
]
for node in self.nodes:
if any(input_block.id == node.id for input_block in input_blocks):
node.input_default["value"] = ""
# --------------------- CRUD functions --------------------- #

View File

@ -245,7 +245,8 @@ CP = TypeVar("CP", bound=ProviderName)
CT = TypeVar("CT", bound=CredentialsType)
CREDENTIALS_FIELD_NAME = "credentials"
def is_credentials_field_name(field_name: str) -> bool:
return field_name == "credentials" or field_name.endswith("_credentials")
class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
@ -254,21 +255,21 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
provider: CP
type: CT
@staticmethod
def _add_json_schema_extra(schema, cls: CredentialsMetaInput):
schema["credentials_provider"] = get_args(
cls.model_fields["provider"].annotation
)
schema["credentials_types"] = get_args(cls.model_fields["type"].annotation)
@classmethod
def allowed_providers(cls) -> tuple[ProviderName, ...]:
return get_args(cls.model_fields["provider"].annotation)
model_config = ConfigDict(
json_schema_extra=_add_json_schema_extra, # type: ignore
)
@classmethod
def allowed_cred_types(cls) -> tuple[CredentialsType, ...]:
return get_args(cls.model_fields["type"].annotation)
@classmethod
def validate_credentials_field_schema(cls, model: type["BlockSchema"]):
"""Validates the schema of a `credentials` field"""
field_schema = model.jsonschema()["properties"][CREDENTIALS_FIELD_NAME]
"""Validates the schema of a credentials input field"""
field_name = next(
name for name, type in model.get_credentials_fields().items() if type is cls
)
field_schema = model.jsonschema()["properties"][field_name]
try:
schema_extra = _CredentialsFieldSchemaExtra[CP, CT].model_validate(
field_schema
@ -282,11 +283,20 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
f"{field_schema}"
) from e
if (
len(schema_extra.credentials_provider) > 1
and not schema_extra.discriminator
):
raise TypeError("Multi-provider CredentialsField requires discriminator!")
if len(cls.allowed_providers()) > 1 and not schema_extra.discriminator:
raise TypeError(
f"Multi-provider CredentialsField '{field_name}' "
"requires discriminator!"
)
@staticmethod
def _add_json_schema_extra(schema, cls: CredentialsMetaInput):
schema["credentials_provider"] = cls.allowed_providers()
schema["credentials_types"] = cls.allowed_cred_types()
model_config = ConfigDict(
json_schema_extra=_add_json_schema_extra, # type: ignore
)
class _CredentialsFieldSchemaExtra(BaseModel, Generic[CP, CT]):

View File

@ -10,7 +10,6 @@ from contextlib import contextmanager
from multiprocessing.pool import AsyncResult, Pool
from typing import TYPE_CHECKING, Any, Generator, TypeVar, cast
from pydantic import BaseModel
from redis.lock import Lock as RedisLock
if TYPE_CHECKING:
@ -20,7 +19,14 @@ from autogpt_libs.utils.cache import thread_cached
from backend.blocks.agent import AgentExecutorBlock
from backend.data import redis
from backend.data.block import Block, BlockData, BlockInput, BlockType, get_block
from backend.data.block import (
Block,
BlockData,
BlockInput,
BlockSchema,
BlockType,
get_block,
)
from backend.data.execution import (
ExecutionQueue,
ExecutionResult,
@ -31,7 +37,6 @@ from backend.data.execution import (
parse_execution_output,
)
from backend.data.graph import GraphModel, Link, Node
from backend.data.model import CREDENTIALS_FIELD_NAME, CredentialsMetaInput
from backend.integrations.creds_manager import IntegrationCredentialsManager
from backend.util import json
from backend.util.decorator import error_logged, time_measured
@ -170,10 +175,11 @@ def execute_node(
# one (running) block at a time; simultaneous execution of blocks using same
# credentials is not supported.
creds_lock = None
if CREDENTIALS_FIELD_NAME in input_data:
credentials_meta = CredentialsMetaInput(**input_data[CREDENTIALS_FIELD_NAME])
input_model = cast(type[BlockSchema], node_block.input_schema)
for field_name, input_type in input_model.get_credentials_fields().items():
credentials_meta = input_type(**input_data[field_name])
credentials, creds_lock = creds_manager.acquire(user_id, credentials_meta.id)
extra_exec_kwargs["credentials"] = credentials
extra_exec_kwargs[field_name] = credentials
output_size = 0
end_status = ExecutionStatus.COMPLETED
@ -890,41 +896,39 @@ class ExecutionManager(AppService):
raise ValueError(f"Unknown block {node.block_id} for node #{node.id}")
# Find any fields of type CredentialsMetaInput
model_fields = cast(type[BaseModel], block.input_schema).model_fields
if CREDENTIALS_FIELD_NAME not in model_fields:
credentials_fields = cast(
type[BlockSchema], block.input_schema
).get_credentials_fields()
if not credentials_fields:
continue
field = model_fields[CREDENTIALS_FIELD_NAME]
# The BlockSchema class enforces that a `credentials` field is always a
# `CredentialsMetaInput`, so we can safely assume this here.
credentials_meta_type = cast(CredentialsMetaInput, field.annotation)
credentials_meta = credentials_meta_type.model_validate(
node.input_default[CREDENTIALS_FIELD_NAME]
)
# Fetch the corresponding Credentials and perform sanity checks
credentials = self.credentials_store.get_creds_by_id(
user_id, credentials_meta.id
)
if not credentials:
raise ValueError(
f"Unknown credentials #{credentials_meta.id} "
f"for node #{node.id}"
for field_name, credentials_meta_type in credentials_fields.items():
credentials_meta = credentials_meta_type.model_validate(
node.input_default[field_name]
)
if (
credentials.provider != credentials_meta.provider
or credentials.type != credentials_meta.type
):
logger.warning(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch: "
f"{credentials_meta.type}<>{credentials.type};"
f"{credentials_meta.provider}<>{credentials.provider}"
)
raise ValueError(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch"
# Fetch the corresponding Credentials and perform sanity checks
credentials = self.credentials_store.get_creds_by_id(
user_id, credentials_meta.id
)
if not credentials:
raise ValueError(
f"Unknown credentials #{credentials_meta.id} "
f"for node #{node.id} input '{field_name}'"
)
if (
credentials.provider != credentials_meta.provider
or credentials.type != credentials_meta.type
):
logger.warning(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch: "
f"{credentials_meta.type}<>{credentials.type};"
f"{credentials_meta.provider}<>{credentials.provider}"
)
raise ValueError(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch"
)
# ------- UTILITIES ------- #

View File

@ -1,9 +1,8 @@
import logging
from typing import TYPE_CHECKING, Callable, Optional, cast
from backend.data.block import BlockWebhookConfig, get_block
from backend.data.block import BlockSchema, BlockWebhookConfig, get_block
from backend.data.graph import set_node_webhook
from backend.data.model import CREDENTIALS_FIELD_NAME
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
if TYPE_CHECKING:
@ -30,14 +29,28 @@ async def on_graph_activate(
# Compare nodes in new_graph_version with previous_graph_version
updated_nodes = []
for new_node in graph.nodes:
block = get_block(new_node.block_id)
if not block:
raise ValueError(
f"Node #{new_node.id} is instance of unknown block #{new_node.block_id}"
)
block_input_schema = cast(BlockSchema, block.input_schema)
node_credentials = None
if creds_meta := new_node.input_default.get(CREDENTIALS_FIELD_NAME):
node_credentials = get_credentials(creds_meta["id"])
if not node_credentials:
raise ValueError(
f"Node #{new_node.id} updated with non-existent "
f"credentials #{node_credentials}"
if (
# Webhook-triggered blocks are only allowed to have 1 credentials input
(
creds_field_name := next(
iter(block_input_schema.get_credentials_fields()), None
)
)
and (creds_meta := new_node.input_default.get(creds_field_name))
and not (node_credentials := get_credentials(creds_meta["id"]))
):
raise ValueError(
f"Node #{new_node.id} input '{creds_field_name}' updated with "
f"non-existent credentials #{creds_meta['id']}"
)
updated_node = await on_node_activate(
graph.user_id, new_node, credentials=node_credentials
@ -62,14 +75,28 @@ async def on_graph_deactivate(
"""
updated_nodes = []
for node in graph.nodes:
block = get_block(node.block_id)
if not block:
raise ValueError(
f"Node #{node.id} is instance of unknown block #{node.block_id}"
)
block_input_schema = cast(BlockSchema, block.input_schema)
node_credentials = None
if creds_meta := node.input_default.get(CREDENTIALS_FIELD_NAME):
node_credentials = get_credentials(creds_meta["id"])
if not node_credentials:
logger.error(
f"Node #{node.id} referenced non-existent "
f"credentials #{creds_meta['id']}"
if (
# Webhook-triggered blocks are only allowed to have 1 credentials input
(
creds_field_name := next(
iter(block_input_schema.get_credentials_fields()), None
)
)
and (creds_meta := node.input_default.get(creds_field_name))
and not (node_credentials := get_credentials(creds_meta["id"]))
):
logger.error(
f"Node #{node.id} input '{creds_field_name}' referenced non-existent "
f"credentials #{creds_meta['id']}"
)
updated_node = await on_node_deactivate(node, credentials=node_credentials)
updated_nodes.append(updated_node)
@ -119,14 +146,17 @@ async def on_node_activate(
else:
resource = "" # not relevant for manual webhooks
needs_credentials = CREDENTIALS_FIELD_NAME in block.input_schema.model_fields
block_input_schema = cast(BlockSchema, block.input_schema)
credentials_field_name = next(iter(block_input_schema.get_credentials_fields()), "")
credentials_meta = (
node.input_default.get(CREDENTIALS_FIELD_NAME) if needs_credentials else None
node.input_default.get(credentials_field_name)
if credentials_field_name
else None
)
event_filter_input_name = block.webhook_config.event_filter_input
has_everything_for_webhook = (
resource is not None
and (credentials_meta or not needs_credentials)
and (credentials_meta or not credentials_field_name)
and (
not event_filter_input_name
or (
@ -230,7 +260,7 @@ async def on_node_deactivate(
)
await webhooks_manager.prune_webhook_if_dangling(webhook.id, credentials)
if (
CREDENTIALS_FIELD_NAME in block.input_schema.model_fields
cast(BlockSchema, block.input_schema).get_credentials_fields()
and not credentials
):
logger.warning(

View File

@ -1,14 +1,18 @@
import logging
import random
from datetime import datetime
from typing import Optional
import fastapi
import prisma.enums
import prisma.errors
import prisma.models
import prisma.types
import backend.data.graph
import backend.server.v2.store.exceptions
import backend.server.v2.store.model
from backend.data.graph import GraphModel
logger = logging.getLogger(__name__)
@ -786,3 +790,45 @@ async def get_my_agents(
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch my agents"
) from e
async def get_agent(
store_listing_version_id: str, version_id: Optional[int]
) -> GraphModel:
"""Get agent using the version ID and store listing version ID."""
try:
store_listing_version = (
await prisma.models.StoreListingVersion.prisma().find_unique(
where={"id": store_listing_version_id}, include={"Agent": True}
)
)
if not store_listing_version or not store_listing_version.Agent:
raise fastapi.HTTPException(
status_code=404,
detail=f"Store listing version {store_listing_version_id} not found",
)
agent = store_listing_version.Agent
graph = await backend.data.graph.get_graph(
agent.id, agent.version, template=True
)
if not graph:
raise fastapi.HTTPException(
status_code=404, detail=f"Agent {agent.id} not found"
)
graph.version = 1
graph.is_template = False
graph.is_active = True
delattr(graph, "user_id")
return graph
except Exception as e:
logger.error(f"Error getting agent: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch agent"
) from e

View File

@ -1,4 +1,6 @@
import json
import logging
import tempfile
import typing
import urllib.parse
@ -6,7 +8,9 @@ import autogpt_libs.auth.depends
import autogpt_libs.auth.middleware
import fastapi
import fastapi.responses
from fastapi.encoders import jsonable_encoder
import backend.data.block
import backend.data.graph
import backend.server.v2.store.db
import backend.server.v2.store.image_gen
@ -575,3 +579,66 @@ async def generate_image(
status_code=500,
content={"detail": "An error occurred while generating the image"},
)
@router.get(
"/download/agents/{store_listing_version_id}",
tags=["store", "public"],
)
async def download_agent_file(
store_listing_version_id: str = fastapi.Path(
..., description="The ID of the agent to download"
),
version: typing.Optional[int] = fastapi.Query(
None, description="Specific version of the agent"
),
) -> fastapi.responses.FileResponse:
"""
Download the agent file by streaming its content.
Args:
agent_id (str): The ID of the agent to download.
version (Optional[int]): Specific version of the agent to download.
Returns:
StreamingResponse: A streaming response containing the agent's graph data.
Raises:
HTTPException: If the agent is not found or an unexpected error occurs.
"""
graph_data = await backend.server.v2.store.db.get_agent(
store_listing_version_id=store_listing_version_id, version_id=version
)
graph_data.clean_graph()
graph_date_dict = jsonable_encoder(graph_data)
def remove_credentials(obj):
if obj and isinstance(obj, dict):
if "credentials" in obj:
del obj["credentials"]
if "creds" in obj:
del obj["creds"]
for value in obj.values():
remove_credentials(value)
elif isinstance(obj, list):
for item in obj:
remove_credentials(item)
return obj
graph_date_dict = remove_credentials(graph_date_dict)
file_name = f"agent_{store_listing_version_id}_v{version or 'latest'}.json"
# Sending graph as a stream (similar to marketplace v1)
with tempfile.NamedTemporaryFile(
mode="w", suffix=".json", delete=False
) as tmp_file:
tmp_file.write(json.dumps(graph_date_dict))
tmp_file.flush()
return fastapi.responses.FileResponse(
tmp_file.name, filename=file_name, media_type="application/json"
)

View File

@ -33,20 +33,6 @@ ALLOWED_SCHEMES = ["http", "https"]
HOSTNAME_REGEX = re.compile(r"^[A-Za-z0-9.-]+$") # Basic DNS-safe hostname pattern
def _canonicalize_url(url: str) -> str:
"""
Normalizes the URL by:
1. Stripping whitespace and trailing slashes.
2. Ensuring the scheme is http:// or https:// if missing.
3. Replacing backslashes with forward slashes.
"""
url = url.strip().strip("/")
if not url.startswith(("http://", "https://")):
url = "http://" + url
url = url.replace("\\", "/")
return url
def _is_ip_blocked(ip: str) -> bool:
"""
Checks if the IP address is in a blocked network.
@ -61,9 +47,12 @@ def validate_url(url: str, trusted_origins: list[str]) -> str:
to a private, link-local, or otherwise blocked IP address unless
the hostname is explicitly trusted.
"""
# Normalize/canonicalize input
url = _canonicalize_url(url)
# Canonicalize URL
url = url.strip("/ ").replace("\\", "/")
parsed = urlparse(url)
if not parsed.scheme:
url = f"http://{url}"
parsed = urlparse(url)
# Check scheme
if parsed.scheme not in ALLOWED_SCHEMES:

View File

@ -1,11 +1,11 @@
import logging
import time
from typing import Sequence
from typing import Sequence, cast
from backend.data import db
from backend.data.block import Block, initialize_blocks
from backend.data.block import Block, BlockSchema, initialize_blocks
from backend.data.execution import ExecutionResult, ExecutionStatus
from backend.data.model import CREDENTIALS_FIELD_NAME
from backend.data.model import _BaseCredentials
from backend.data.user import create_default_user
from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler
from backend.server.rest_api import AgentServer
@ -100,14 +100,22 @@ def execute_block_test(block: Block):
else:
log.info(f"{prefix} mock {mock_name} not found in block")
# Populate credentials argument(s)
extra_exec_kwargs = {}
if CREDENTIALS_FIELD_NAME in block.input_schema.model_fields:
if not block.test_credentials:
raise ValueError(
f"{prefix} requires credentials but has no test_credentials"
)
extra_exec_kwargs[CREDENTIALS_FIELD_NAME] = block.test_credentials
input_model = cast(type[BlockSchema], block.input_schema)
credentials_input_fields = input_model.get_credentials_fields()
if len(credentials_input_fields) == 1 and isinstance(
block.test_credentials, _BaseCredentials
):
field_name = next(iter(credentials_input_fields))
extra_exec_kwargs[field_name] = block.test_credentials
elif credentials_input_fields and block.test_credentials:
if not isinstance(block.test_credentials, dict):
raise TypeError(f"Block {block.name} has no usable test credentials")
else:
for field_name in credentials_input_fields:
if field_name in block.test_credentials:
extra_exec_kwargs[field_name] = block.test_credentials[field_name]
for input_data in block.test_input:
log.info(f"{prefix} in: {input_data}")

View File

@ -160,3 +160,45 @@ async def test_get_input_schema(server: SpinTestServer):
output_schema = created_graph.output_schema
output_schema["title"] = "ExpectedOutputSchema"
assert output_schema == ExpectedOutputSchema.jsonschema()
@pytest.mark.asyncio(scope="session")
async def test_clean_graph(server: SpinTestServer):
"""
Test the clean_graph function that:
1. Clears input block values
2. Removes credentials from nodes
"""
# Create a graph with input blocks and credentials
graph = Graph(
id="test_clean_graph",
name="Test Clean Graph",
description="Test graph cleaning",
nodes=[
Node(
id="input_node",
block_id=AgentInputBlock().id,
input_default={
"name": "test_input",
"value": "test value",
"description": "Test input description",
},
),
],
links=[],
)
# Create graph and get model
create_graph = CreateGraph(graph=graph)
created_graph = await server.agent_server.test_create_graph(
create_graph, DEFAULT_USER_ID
)
# Clean the graph
created_graph.clean_graph()
# # Verify input block value is cleared
input_node = next(
n for n in created_graph.nodes if n.block_id == AgentInputBlock().id
)
assert input_node.input_default["value"] == ""

View File

@ -245,6 +245,7 @@ export function CustomNode({
].includes(nodeType) &&
// No input connection handles for credentials
propKey !== "credentials" &&
!propKey.endsWith("_credentials") &&
// For OUTPUT blocks, only show the 'value' (hides 'name') input connection handle
!(nodeType == BlockUIType.OUTPUT && propKey == "name");
const isConnected = isInputHandleConnected(propKey);
@ -261,7 +262,8 @@ export function CustomNode({
side="left"
/>
) : (
propKey != "credentials" && (
propKey !== "credentials" &&
!propKey.endsWith("_credentials") && (
<div className="flex gap-1">
<span className="text-m green mb-0 text-gray-900 dark:text-gray-100">
{propSchema.title || beautifyString(propKey)}
@ -726,13 +728,10 @@ export function CustomNode({
</div>
{/* Body */}
<div className="ml-5 mt-6 rounded-b-xl">
<div className="mx-5 my-6 rounded-b-xl">
{/* Input Handles */}
{data.uiType !== BlockUIType.NOTE ? (
<div
className="flex w-fit items-start justify-between"
data-id="input-handles"
>
<div data-id="input-handles">
<div>
{data.uiType === BlockUIType.WEBHOOK_MANUAL &&
(data.webhook ? (
@ -781,7 +780,6 @@ export function CustomNode({
<Switch
onCheckedChange={toggleAdvancedSettings}
checked={isAdvancedOpen}
className="mr-5"
/>
</div>
</>
@ -790,7 +788,7 @@ export function CustomNode({
{data.uiType !== BlockUIType.NOTE && (
<>
<LineSeparator />
<div className="flex items-start justify-end rounded-b-xl pb-2 pr-2 pt-6">
<div className="flex items-start justify-end rounded-b-xl pt-6">
<div className="flex-none">
{data.outputSchema &&
generateOutputHandles(data.outputSchema, data.uiType)}

View File

@ -82,7 +82,7 @@ const NodeHandle: FC<HandleProps> = ({
data-testid={`output-handle-${keyName}`}
position={Position.Right}
id={keyName}
className="group -mr-[26px]"
className="group -mr-[38px]"
>
<div className="pointer-events-none flex items-center">
{label}

View File

@ -6,6 +6,10 @@ import { Separator } from "@/components/ui/separator";
import BackendAPI from "@/lib/autogpt-server-api";
import { useRouter } from "next/navigation";
import Link from "next/link";
import { useToast } from "@/components/ui/use-toast";
import useSupabase from "@/hooks/useSupabase";
import { DownloadIcon, LoaderIcon } from "lucide-react";
interface AgentInfoProps {
name: string;
creator: string;
@ -32,8 +36,11 @@ export const AgentInfo: React.FC<AgentInfoProps> = ({
storeListingVersionId,
}) => {
const router = useRouter();
const api = React.useMemo(() => new BackendAPI(), []);
const { user } = useSupabase();
const { toast } = useToast();
const [downloading, setDownloading] = React.useState(false);
const handleAddToLibrary = async () => {
try {
@ -45,6 +52,46 @@ export const AgentInfo: React.FC<AgentInfoProps> = ({
}
};
const handleDownloadToLibrary = async () => {
const downloadAgent = async (): Promise<void> => {
setDownloading(true);
try {
const file = await api.downloadStoreAgent(storeListingVersionId);
// Similar to Marketplace v1
const jsonData = JSON.stringify(file, null, 2);
// Create a Blob from the file content
const blob = new Blob([jsonData], { type: "application/json" });
// Create a temporary URL for the Blob
const url = window.URL.createObjectURL(blob);
// Create a temporary anchor element
const a = document.createElement("a");
a.href = url;
a.download = `agent_${storeListingVersionId}.json`; // Set the filename
// Append the anchor to the body, click it, and remove it
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
// Revoke the temporary URL
window.URL.revokeObjectURL(url);
toast({
title: "Download Complete",
description: "Your agent has been successfully downloaded.",
});
} catch (error) {
console.error(`Error downloading agent:`, error);
throw error;
}
};
await downloadAgent();
setDownloading(false);
};
return (
<div className="w-full max-w-[396px] px-4 sm:px-6 lg:w-[396px] lg:px-0">
{/* Title */}
@ -72,15 +119,36 @@ export const AgentInfo: React.FC<AgentInfoProps> = ({
{/* Run Agent Button */}
<div className="mb-4 w-full lg:mb-[60px]">
<button
onClick={handleAddToLibrary}
className="inline-flex w-full items-center justify-center gap-2 rounded-[38px] bg-violet-600 px-4 py-3 transition-colors hover:bg-violet-700 sm:w-auto sm:gap-2.5 sm:px-5 sm:py-3.5 lg:px-6 lg:py-4"
>
<IconPlay className="h-5 w-5 text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
<span className="font-poppins text-base font-medium text-neutral-50 sm:text-lg">
Add To Library
</span>
</button>
{user ? (
<button
onClick={handleAddToLibrary}
className="inline-flex w-full items-center justify-center gap-2 rounded-[38px] bg-violet-600 px-4 py-3 transition-colors hover:bg-violet-700 sm:w-auto sm:gap-2.5 sm:px-5 sm:py-3.5 lg:px-6 lg:py-4"
>
<IconPlay className="h-5 w-5 text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
<span className="font-poppins text-base font-medium text-neutral-50 sm:text-lg">
Add To Library
</span>
</button>
) : (
<button
onClick={handleDownloadToLibrary}
className={`inline-flex w-full items-center justify-center gap-2 rounded-[38px] px-4 py-3 transition-colors sm:w-auto sm:gap-2.5 sm:px-5 sm:py-3.5 lg:px-6 lg:py-4 ${
downloading
? "bg-neutral-400"
: "bg-violet-600 hover:bg-violet-700"
}`}
disabled={downloading}
>
{downloading ? (
<LoaderIcon className="h-5 w-5 animate-spin text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
) : (
<DownloadIcon className="h-5 w-5 text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
)}
<span className="font-poppins text-base font-medium text-neutral-50 sm:text-lg">
{downloading ? "Downloading..." : "Download Agent as File"}
</span>
</button>
)}
</div>
{/* Rating and Runs */}

View File

@ -15,15 +15,11 @@
.custom-node [data-id^="date-picker"],
.custom-node [data-list-container],
.custom-node [data-add-item],
.custom-node [data-content-settings] {
min-width: calc(100% - 2.5rem);
max-width: 400px;
}
.array-item-container {
.custom-node [data-content-settings]. .array-item-container {
display: flex;
align-items: center;
min-width: calc(100% - 2.5rem);
max-width: 100%;
}
.custom-node .custom-switch {

View File

@ -1,5 +1,5 @@
import { z } from "zod";
import { cn } from "@/lib/utils";
import { beautifyString, cn } from "@/lib/utils";
import { useForm } from "react-hook-form";
import { Input } from "@/components/ui/input";
import { Button } from "@/components/ui/button";
@ -87,12 +87,13 @@ export type OAuthPopupResultMessage = { message_type: "oauth_popup_result" } & (
);
export const CredentialsInput: FC<{
selfKey: string;
className?: string;
selectedCredentials?: CredentialsMetaInput;
onSelectCredentials: (newValue?: CredentialsMetaInput) => void;
}> = ({ className, selectedCredentials, onSelectCredentials }) => {
}> = ({ selfKey, className, selectedCredentials, onSelectCredentials }) => {
const api = useBackendAPI();
const credentials = useCredentials();
const credentials = useCredentials(selfKey);
const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] =
useState(false);
const [isOAuth2FlowInProgress, setOAuth2FlowInProgress] = useState(false);
@ -209,6 +210,7 @@ export const CredentialsInput: FC<{
<>
{supportsApiKey && (
<APIKeyCredentialsModal
credentialsFieldName={selfKey}
open={isAPICredentialsModalOpen}
onClose={() => setAPICredentialsModalOpen(false)}
onCredentialsCreate={(credsMeta) => {
@ -242,7 +244,9 @@ export const CredentialsInput: FC<{
return (
<>
<div className="mb-2 flex gap-1">
<span className="text-m green text-gray-900">Credentials</span>
<span className="text-m green text-gray-900">
{providerName} Credentials
</span>
<SchemaTooltip description={schema.description} />
</div>
<div className={cn("flex flex-row space-x-2", className)}>
@ -310,7 +314,12 @@ export const CredentialsInput: FC<{
// Saved credentials exist
return (
<>
<span className="text-m green mb-0 text-gray-900">Credentials</span>
<div className="flex gap-1">
<span className="text-m green mb-0 text-gray-900">
{providerName} Credentials
</span>
<SchemaTooltip description={schema.description} />
</div>
<Select value={selectedCredentials?.id} onValueChange={handleValueChange}>
<SelectTrigger>
<SelectValue placeholder={schema.placeholder} />
@ -353,11 +362,12 @@ export const CredentialsInput: FC<{
};
export const APIKeyCredentialsModal: FC<{
credentialsFieldName: string;
open: boolean;
onClose: () => void;
onCredentialsCreate: (creds: CredentialsMetaInput) => void;
}> = ({ open, onClose, onCredentialsCreate }) => {
const credentials = useCredentials();
}> = ({ credentialsFieldName, open, onClose, onCredentialsCreate }) => {
const credentials = useCredentials(credentialsFieldName);
const formSchema = z.object({
apiKey: z.string().min(1, "API Key is required"),

View File

@ -201,7 +201,7 @@ export const NodeGenericInputField: FC<{
className,
displayName,
}) => {
className = cn(className, "my-2");
className = cn(className);
displayName ||= propSchema.title || beautifyString(propKey);
if ("allOf" in propSchema) {
@ -653,6 +653,7 @@ const NodeCredentialsInput: FC<{
return (
<div className={cn("flex flex-col", className)}>
<CredentialsInput
selfKey={selfKey}
onSelectCredentials={(credsMeta) =>
handleInputChange(selfKey, credsMeta)
}
@ -876,18 +877,19 @@ const NodeArrayInput: FC<{
(c) => c.targetHandle === entryKey && c.target === nodeId,
);
return (
<div key={entryKey} className="self-start">
<div key={entryKey}>
<NodeHandle
keyName={entryKey}
schema={schema.items!}
isConnected={isConnected}
isRequired={false}
side="left"
/>
<div className="mb-2 flex space-x-2">
<NodeHandle
keyName={entryKey}
schema={schema.items!}
isConnected={isConnected}
isRequired={false}
side="left"
/>
{!isConnected &&
(schema.items ? (
<NodeGenericInputField
className="w-full"
nodeId={nodeId}
propKey={entryKey}
propSchema={schema.items}

View File

@ -26,7 +26,9 @@ export type CredentialsData =
isLoading: false;
});
export default function useCredentials(): CredentialsData | null {
export default function useCredentials(
inputFieldName: string,
): CredentialsData | null {
const nodeId = useNodeId();
const allProviders = useContext(CredentialsProvidersContext);
@ -35,8 +37,9 @@ export default function useCredentials(): CredentialsData | null {
}
const data = useNodesData<Node<CustomNodeData>>(nodeId)!.data;
const credentialsSchema = data.inputSchema.properties
.credentials as BlockIOCredentialsSubSchema;
const credentialsSchema = data.inputSchema.properties[
inputFieldName
] as BlockIOCredentialsSubSchema;
const discriminatorValue: CredentialsProviderName | null =
(credentialsSchema.discriminator &&

View File

@ -348,6 +348,17 @@ export default class BackendAPI {
return this._get("/store/myagents", params);
}
downloadStoreAgent(
storeListingVersionId: string,
version?: number,
): Promise<BlobPart> {
const url = version
? `/store/download/agents/${storeListingVersionId}?version=${version}`
: `/store/download/agents/${storeListingVersionId}`;
return this._get(url);
}
/////////////////////////////////////////
/////////// V2 LIBRARY API //////////////
/////////////////////////////////////////

View File

@ -829,13 +829,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
[[package]]
name = "pyright"
version = "1.1.390"
version = "1.1.391"
description = "Command line wrapper for pyright"
optional = false
python-versions = ">=3.7"
files = [
{file = "pyright-1.1.390-py3-none-any.whl", hash = "sha256:ecebfba5b6b50af7c1a44c2ba144ba2ab542c227eb49bc1f16984ff714e0e110"},
{file = "pyright-1.1.390.tar.gz", hash = "sha256:aad7f160c49e0fbf8209507a15e17b781f63a86a1facb69ca877c71ef2e9538d"},
{file = "pyright-1.1.391-py3-none-any.whl", hash = "sha256:54fa186f8b3e8a55a44ebfa842636635688670c6896dcf6cf4a7fc75062f4d15"},
{file = "pyright-1.1.391.tar.gz", hash = "sha256:66b2d42cdf5c3cbab05f2f4b76e8bec8aa78e679bfa0b6ad7b923d9e027cadb2"},
]
[package.dependencies]
@ -871,13 +871,13 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments
[[package]]
name = "pytest-asyncio"
version = "0.25.0"
version = "0.25.1"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.9"
files = [
{file = "pytest_asyncio-0.25.0-py3-none-any.whl", hash = "sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3"},
{file = "pytest_asyncio-0.25.0.tar.gz", hash = "sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609"},
{file = "pytest_asyncio-0.25.1-py3-none-any.whl", hash = "sha256:c84878849ec63ff2ca509423616e071ef9cd8cc93c053aa33b5b8fb70a990671"},
{file = "pytest_asyncio-0.25.1.tar.gz", hash = "sha256:79be8a72384b0c917677e00daa711e07db15259f4d23203c59012bcd989d4aee"},
]
[package.dependencies]
@ -1058,29 +1058,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "ruff"
version = "0.8.3"
version = "0.8.4"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"},
{file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"},
{file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"},
{file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"},
{file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"},
{file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"},
{file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"},
{file = "ruff-0.8.4-py3-none-linux_armv6l.whl", hash = "sha256:58072f0c06080276804c6a4e21a9045a706584a958e644353603d36ca1eb8a60"},
{file = "ruff-0.8.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ffb60904651c00a1e0b8df594591770018a0f04587f7deeb3838344fe3adabac"},
{file = "ruff-0.8.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ddf5d654ac0d44389f6bf05cee4caeefc3132a64b58ea46738111d687352296"},
{file = "ruff-0.8.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e248b1f0fa2749edd3350a2a342b67b43a2627434c059a063418e3d375cfe643"},
{file = "ruff-0.8.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf197b98ed86e417412ee3b6c893f44c8864f816451441483253d5ff22c0e81e"},
{file = "ruff-0.8.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c41319b85faa3aadd4d30cb1cffdd9ac6b89704ff79f7664b853785b48eccdf3"},
{file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9f8402b7c4f96463f135e936d9ab77b65711fcd5d72e5d67597b543bbb43cf3f"},
{file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e56b3baa9c23d324ead112a4fdf20db9a3f8f29eeabff1355114dd96014604"},
{file = "ruff-0.8.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:736272574e97157f7edbbb43b1d046125fce9e7d8d583d5d65d0c9bf2c15addf"},
{file = "ruff-0.8.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fe710ab6061592521f902fca7ebcb9fabd27bc7c57c764298b1c1f15fff720"},
{file = "ruff-0.8.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:13e9ec6d6b55f6da412d59953d65d66e760d583dd3c1c72bf1f26435b5bfdbae"},
{file = "ruff-0.8.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:97d9aefef725348ad77d6db98b726cfdb075a40b936c7984088804dfd38268a7"},
{file = "ruff-0.8.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ab78e33325a6f5374e04c2ab924a3367d69a0da36f8c9cb6b894a62017506111"},
{file = "ruff-0.8.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8ef06f66f4a05c3ddbc9121a8b0cecccd92c5bf3dd43b5472ffe40b8ca10f0f8"},
{file = "ruff-0.8.4-py3-none-win32.whl", hash = "sha256:552fb6d861320958ca5e15f28b20a3d071aa83b93caee33a87b471f99a6c0835"},
{file = "ruff-0.8.4-py3-none-win_amd64.whl", hash = "sha256:f21a1143776f8656d7f364bd264a9d60f01b7f52243fbe90e7670c0dfe0cf65d"},
{file = "ruff-0.8.4-py3-none-win_arm64.whl", hash = "sha256:9183dd615d8df50defa8b1d9a074053891ba39025cf5ae88e8bcb52edcc4bf08"},
{file = "ruff-0.8.4.tar.gz", hash = "sha256:0d5f89f254836799af1615798caa5f80b7f935d7a670fad66c5007928e57ace8"},
]
[[package]]
@ -1298,4 +1298,4 @@ watchmedo = ["PyYAML (>=3.10)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "5dbf6cd95ba8e80c4a6b4e6a54c6cdfb1488619e4293d1d5a8572c5330485493"
content-hash = "c62380410681d30c5c5da8b047c449c92196f2a25ea5d353db2a3e5470737513"

View File

@ -24,12 +24,12 @@ prometheus-fastapi-instrumentator = "^7.0.0"
autogpt-libs = {path = "../autogpt_libs"}
[tool.poetry.group.dev.dependencies]
pytest = "^8.3.4"
pytest-asyncio = "^0.25.0"
pytest-asyncio = "^0.25.1"
pytest-watcher = "^0.4.3"
requests = "^2.32.3"
ruff = "^0.8.3"
pyright = "^1.1.390"
ruff = "^0.8.4"
pyright = "^1.1.391"
isort = "^5.13.2"
black = "^24.10.0"

View File

@ -238,6 +238,16 @@ Naturally, to add an authenticated block for a new provider, you'll have to add
```
</details>
#### Multiple credentials inputs
Multiple credentials inputs are supported, under the following conditions:
- The name of each of the credentials input fields must end with `_credentials`.
- The names of the credentials input fields must match the names of the corresponding
parameters on the `run(..)` method of the block.
- If more than one of the credentials parameters are required, `test_credentials`
is a `dict[str, Credentials]`, with for each required credentials input the
parameter name as the key and suitable test credentials as the value.
#### Adding an OAuth2 service integration
To add support for a new OAuth2-authenticated service, you'll need to add an `OAuthHandler`.