mirror of
https://github.com/Significant-Gravitas/Auto-GPT.git
synced 2025-01-08 11:57:32 +08:00
Create cassette submodule (#4420)
* gfeat: specify directory of cassettes and automatically load them depending on module fix: formatting for linter test: commit newly generated cassettes to their respective folder tests: update latest fixtures with master fix: update .gitattributes with updated path to cassettes fix: use cassettes from master instead of generating them myself fix: update path in .gitattributes fix: make sure to match default functionality by using test name for cassette directory fix: actually add git submodule ci: checkout git submodules in CI ci: update git submodules separately to ensure it gets called feat: add a hooks directory so we can update git submodules on post-checkout feat: make sure we push the tests/cassettes submodule on merge into master ci: remove unused code now that we are using git submodules to keep cassettes in sync fix: simplify how we load the submodule and fix updating cassettes on merge to master chore: remove echo of checkout hook, it's unneeded ci: remove unneccesary step * cassettes submodule * cassettes submodule * cassettes submodule * cassettes submodule * cassettes submodule --------- Co-authored-by: Stefan Ayala <stefanayala3266@gmail.com>
This commit is contained in:
parent
6c45fcd067
commit
e7c0d3330c
2
.gitattributes
vendored
2
.gitattributes
vendored
@ -1,5 +1,5 @@
|
||||
# Exclude VCR cassettes from stats
|
||||
tests/**/cassettes/**.y*ml linguist-generated
|
||||
tests/Auto-GPT-test-cassettes/**/**.y*ml linguist-generated
|
||||
|
||||
# Mark documentation as such
|
||||
docs/**.md linguist-documentation
|
||||
|
114
.github/workflows/ci.yml
vendored
114
.github/workflows/ci.yml
vendored
@ -2,9 +2,11 @@ name: Python CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [ master, ci-test*]
|
||||
paths-ignore:
|
||||
- 'tests/Auto-GPT-test-cassettes'
|
||||
pull_request_target:
|
||||
branches: [ master, stable ]
|
||||
branches: [ master, stable , ci-test*]
|
||||
|
||||
concurrency:
|
||||
group: ${{ format('ci-{0}', github.head_ref && format('pr-{0}', github.event.pull_request.number) || github.sha) }}
|
||||
@ -71,6 +73,33 @@ jobs:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
submodules: true
|
||||
token: ${{ secrets.PAT_REVIEW }}
|
||||
|
||||
- name: Use cassettes previously sent
|
||||
if: ${{ github.event_name == 'pull_request_target' }}
|
||||
run: |
|
||||
new_branch="${{ github.event.pull_request.user.login }}-${{ github.event.pull_request.head.ref }}"
|
||||
git config --global user.name "Github Actions"
|
||||
git config --global user.email "github-actions@github.com"
|
||||
cd tests/Auto-GPT-test-cassettes
|
||||
|
||||
if git ls-remote --exit-code --heads origin $new_branch ; then
|
||||
git fetch origin $new_branch:$new_branch
|
||||
git fetch origin ${{ github.event.pull_request.base.ref }}:${{ github.event.pull_request.base.ref }}
|
||||
|
||||
git checkout $new_branch
|
||||
|
||||
if git merge --no-commit --no-ff ${{ github.event.pull_request.base.ref }}; then
|
||||
echo "No merge conflict detected. We can use the cassettes previously sent."
|
||||
else
|
||||
echo "Merge conflict detected. This means we cannot use the cassettes previously sent, so we will take the cassettes of ${{ github.event.pull_request.base.ref }}."
|
||||
git merge --abort
|
||||
git checkout ${{ github.event.pull_request.base.ref }}
|
||||
fi
|
||||
else
|
||||
echo "Branch $new_branch does not exist. We will use the cassettes of ${{ github.event.pull_request.base.ref }}"
|
||||
fi
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
@ -82,7 +111,7 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Run unittest tests with coverage
|
||||
- name: Run pytest tests with coverage
|
||||
run: |
|
||||
pytest -n auto --cov=autogpt --cov-report term-missing --cov-branch --cov-report xml --cov-report term
|
||||
env:
|
||||
@ -93,3 +122,82 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
|
||||
- name: Update cassette submodule to push target if push event
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
cd tests/Auto-GPT-test-cassettes
|
||||
current_branch=$(echo ${{ github.ref }} | sed -e "s/refs\/heads\///g")
|
||||
git fetch origin $current_branch
|
||||
git config --global user.name "Auto-GPT-Bot"
|
||||
git config --global user.email "github-bot@agpt.co"
|
||||
git add .
|
||||
|
||||
# Check if there are any changes
|
||||
if ! git diff-index --quiet HEAD; then
|
||||
git commit -m "Auto-update cassettes after Push event"
|
||||
git pull --rebase origin $current_branch
|
||||
|
||||
git push origin HEAD:refs/heads/$current_branch
|
||||
|
||||
cd ../..
|
||||
git add tests/Auto-GPT-test-cassettes
|
||||
git commit -m "Update submodule reference"
|
||||
git push origin HEAD:refs/heads/$current_branch
|
||||
else
|
||||
echo "No changes to commit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Update cassette submodule to submodule branch if PR event
|
||||
if: ${{ github.event_name == 'pull_request_target' }}
|
||||
run: |
|
||||
new_branch="${{ github.event.pull_request.user.login }}-${{ github.event.pull_request.head.ref }}"
|
||||
|
||||
cd tests/Auto-GPT-test-cassettes
|
||||
git config --global user.name "Auto-GPT-Bot"
|
||||
git config --global user.email "github-bot@agpt.co"
|
||||
git add .
|
||||
|
||||
# Check if there are any changes
|
||||
if ! git diff-index --quiet HEAD; then
|
||||
git commit -m "Auto-update cassettes after merging PR #$pr_number"
|
||||
git push -f origin HEAD:refs/heads/$new_branch
|
||||
|
||||
else
|
||||
echo "No changes to commit"
|
||||
exit 0
|
||||
fi
|
||||
git fetch origin ${{ github.event.pull_request.base.ref }}:${{ github.event.pull_request.base.ref }} || echo "DIFF_EXISTS=false" >> $GITHUB_ENV
|
||||
differences=$(git diff origin/$new_branch origin/${{ github.event.pull_request.base.ref }})
|
||||
|
||||
if [ -n "$differences" ]; then
|
||||
echo "DIFF_EXISTS=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DIFF_EXISTS=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Apply or remove prompt change label and comment
|
||||
if: ${{ github.event_name == 'pull_request_target' }}
|
||||
run: |
|
||||
PR_NUMBER=${{ github.event.pull_request.number }}
|
||||
TOKEN=${{ secrets.PAT_REVIEW }}
|
||||
REPO=${{ github.repository }}
|
||||
|
||||
if [[ "${{ env.DIFF_EXISTS }}" == "true" ]]; then
|
||||
echo "Adding label and comment..."
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
https://api.github.com/repos/$REPO/issues/$PR_NUMBER/labels \
|
||||
-d '{"labels":["prompt change"]}'
|
||||
|
||||
echo $TOKEN | gh auth login --with-token
|
||||
gh api repos/$REPO/issues/$PR_NUMBER/comments -X POST -F body="You changed AutoGPT's prompt. The cassettes have been updated and will be merged to the submodule when this Pull Request gets merged."
|
||||
else
|
||||
echo "Removing label..."
|
||||
curl -X DELETE \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
https://api.github.com/repos/$REPO/issues/$PR_NUMBER/labels/prompt%20change
|
||||
fi
|
||||
|
73
.github/workflows/docker-ci.yml
vendored
73
.github/workflows/docker-ci.yml
vendored
@ -76,41 +76,44 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
needs: build
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
build-args: BUILD_TYPE=dev # include pytest
|
||||
tags: ${{ env.IMAGE_NAME }}
|
||||
load: true # save to docker images
|
||||
# cache layers in GitHub Actions cache to speed up builds
|
||||
cache-from: type=gha,scope=docker-dev
|
||||
cache-to: type=gha,scope=docker-dev,mode=max
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
build-args: BUILD_TYPE=dev # include pytest
|
||||
tags: ${{ env.IMAGE_NAME }}
|
||||
load: true # save to docker images
|
||||
# cache layers in GitHub Actions cache to speed up builds
|
||||
cache-from: type=gha,scope=docker-dev
|
||||
cache-to: type=gha,scope=docker-dev,mode=max
|
||||
|
||||
- id: test
|
||||
name: Run tests
|
||||
env:
|
||||
CI: true
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
run: |
|
||||
set +e
|
||||
test_output=$(
|
||||
docker run --env CI --env OPENAI_API_KEY --entrypoint python ${{ env.IMAGE_NAME }} -m \
|
||||
pytest -n auto --cov=autogpt --cov-report term-missing --cov-branch --cov-report xml --cov-report term 2>&1
|
||||
)
|
||||
test_failure=$?
|
||||
|
||||
echo "$test_output"
|
||||
|
||||
cat << $EOF >> $GITHUB_STEP_SUMMARY
|
||||
# Tests $([ $test_failure = 0 ] && echo '✅' || echo '❌')
|
||||
\`\`\`
|
||||
$test_output
|
||||
\`\`\`
|
||||
$EOF
|
||||
- id: test
|
||||
name: Run tests
|
||||
env:
|
||||
CI: true
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
run: |
|
||||
set +e
|
||||
test_output=$(
|
||||
docker run --env CI --env OPENAI_API_KEY --entrypoint python ${{ env.IMAGE_NAME }} -m \
|
||||
pytest -n auto --cov=autogpt --cov-report term-missing --cov-branch --cov-report xml --cov-report term 2>&1
|
||||
)
|
||||
test_failure=$?
|
||||
|
||||
echo "$test_output"
|
||||
|
||||
cat << $EOF >> $GITHUB_STEP_SUMMARY
|
||||
# Tests $([ $test_failure = 0 ] && echo '✅' || echo '❌')
|
||||
\`\`\`
|
||||
$test_output
|
||||
\`\`\`
|
||||
$EOF
|
||||
|
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
[submodule "tests/Auto-GPT-test-cassettes"]
|
||||
path = tests/Auto-GPT-test-cassettes
|
||||
url = https://github.com/Significant-Gravitas/Auto-GPT-test-cassettes
|
||||
branch = master
|
@ -96,7 +96,7 @@ def retry_openai_api(
|
||||
user_warned = True
|
||||
|
||||
except APIError as e:
|
||||
if (e.http_status != 502) or (attempt == num_attempts):
|
||||
if (e.http_status not in [502, 429]) or (attempt == num_attempts):
|
||||
raise
|
||||
|
||||
backoff = backoff_base ** (attempt + 2)
|
||||
|
2
hooks/post-checkout
Normal file
2
hooks/post-checkout
Normal file
@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
git submodule update --init --remote --recursive
|
4
hooks/post-rewrite
Normal file
4
hooks/post-rewrite
Normal file
@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
case "$1" in
|
||||
rebase) git submodule update --init --recursive ;;
|
||||
esac
|
1
tests/Auto-GPT-test-cassettes
Submodule
1
tests/Auto-GPT-test-cassettes
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit f518f20d83e57e15242b504415132c04991165a0
|
@ -13,6 +13,12 @@ pytest_plugins = ["tests.integration.agent_factory", "tests.integration.memory.u
|
||||
PROXY = os.environ.get("PROXY")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def vcr_cassette_dir(request):
|
||||
test_name = os.path.splitext(request.node.name)[0]
|
||||
return os.path.join("tests/Auto-GPT-test-cassettes", test_name)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def workspace_root(tmp_path: Path) -> Path:
|
||||
return tmp_path / "home/users/monty/auto_gpt_workspace"
|
||||
|
@ -1,190 +0,0 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "\nYour
|
||||
task is to devise up to 5 highly effective goals and an appropriate role-based
|
||||
name (_GPT) for an autonomous agent, ensuring that the goals are optimally aligned
|
||||
with the successful completion of its assigned task.\n\nThe user will provide
|
||||
the task, you will provide only the output in the exact format specified below
|
||||
with no explanation or conversation.\n\nExample input:\nHelp me with marketing
|
||||
my business\n\nExample output:\nName: CMOGPT\nDescription: a professional digital
|
||||
marketer AI that assists Solopreneurs in growing their businesses by providing
|
||||
world-class expertise in solving marketing problems for SaaS, content products,
|
||||
agencies, and more.\nGoals:\n- Engage in effective problem-solving, prioritization,
|
||||
planning, and supporting execution to address your marketing needs as your virtual
|
||||
Chief Marketing Officer.\n\n- Provide specific, actionable, and concise advice
|
||||
to help you make informed decisions without the use of platitudes or overly
|
||||
wordy explanations.\n\n- Identify and prioritize quick wins and cost-effective
|
||||
campaigns that maximize results with minimal time and budget investment.\n\n-
|
||||
Proactively take the lead in guiding you and offering suggestions when faced
|
||||
with unclear information or uncertainty to ensure your marketing strategy remains
|
||||
on track.\n"}, {"role": "user", "content": "Task: ''Write a wikipedia style
|
||||
article about the project: https://github.com/significant-gravitas/Auto-GPT''\nRespond
|
||||
only with the output in the exact format specified in the system prompt, with
|
||||
no explanation or conversation.\n"}], "temperature": 0.0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1671'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1xSS2/UMBC+8ytGvnBJVt3tgzZHoBQkXgdQKyiqHHuSTOvMpPZkt6uq/x3FWfrg
|
||||
aHvme/rekDeVcZ1V1w+hfHPSffy0Hz6ff6sPls3p9cHtr4tv2y8/6e3FWWMKI/U1Ot1tLJz0Q0Al
|
||||
YVMYF9EqelMtj45XhweHR0fHhenFYzCVaQct9xeHpY6xlnJvf29pCjMm26Kp7s0QpR/0SuUGOZlq
|
||||
dbJfmCfsx/vl6qQwKmrD49XBavVQGNcJOUym+n1vekz/YKMENJWxKVFSyzqJFFbkycBX22MF53RD
|
||||
Z99/XPJ7TC7SMPFVYBnsqMLSy5jAtsgKHhO1jB5UYEYEYtAOIRsnYZAm4w3oyZZJtwHBRiUXMEG9
|
||||
hSHKmjxxCxuJwZcu2JQA7waMSgknuIgJbXRdAZtIStwWYNlDI7G3Op0Xl3wmNqTqkkt4J+xHp6Cd
|
||||
RBnb7nEdZFbW0hoZVAZyk2zkNEYE69wYrdtm6F3MyJjSZIB4JiPhxcRxHkkROmq78na0gXQLuwxB
|
||||
O6tACZBb22atj8trnIVvMIQyaRydjhF9AY2EIJspg6xvJI+BGFOenkryNvr0Isks40PGzUu7SCcb
|
||||
EnOcKk/TrxPskmf/DH/S5sKYx4coA0ZwpNnmzB2xwYjsMC3maEOwtUSrCBvSLjOPCeNERuwkDvOj
|
||||
dkgRGkRfW3eToXp7g8Dopo8YtxBxTSnzqDw3kHlO50pylM/N9YiaXrhiUVtTLuC/2KYKArVUB5x+
|
||||
CgxjHcjNFZqHwjTElLqriDYJm8oklcEUhtjjnan2Hv48vPoLAAD//wMAbCMiNQcEAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7be206beca7ccfbc-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 26 Apr 2023 21:47:58 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '9927'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-request-id:
|
||||
- 575539561e8025c9a37920ffb4d1b354
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "\nYour
|
||||
task is to devise up to 5 highly effective goals and an appropriate role-based
|
||||
name (_GPT) for an autonomous agent, ensuring that the goals are optimally aligned
|
||||
with the successful completion of its assigned task.\n\nThe user will provide
|
||||
the task, you will provide only the output in the exact format specified below
|
||||
with no explanation or conversation.\n\nExample input:\nHelp me with marketing
|
||||
my business\n\nExample output:\nName: CMOGPT\nDescription: a professional digital
|
||||
marketer AI that assists Solopreneurs in growing their businesses by providing
|
||||
world-class expertise in solving marketing problems for SaaS, content products,
|
||||
agencies, and more.\nGoals:\n- Engage in effective problem-solving, prioritization,
|
||||
planning, and supporting execution to address your marketing needs as your virtual
|
||||
Chief Marketing Officer.\n\n- Provide specific, actionable, and concise advice
|
||||
to help you make informed decisions without the use of platitudes or overly
|
||||
wordy explanations.\n\n- Identify and prioritize quick wins and cost-effective
|
||||
campaigns that maximize results with minimal time and budget investment.\n\n-
|
||||
Proactively take the lead in guiding you and offering suggestions when faced
|
||||
with unclear information or uncertainty to ensure your marketing strategy remains
|
||||
on track.\n"}, {"role": "user", "content": "Task: ''Write a wikipedia style
|
||||
article about the project: https://github.com/significant-gravitas/Auto-GPT''\nRespond
|
||||
only with the output in the exact format specified in the system prompt, with
|
||||
no explanation or conversation."}], "temperature": 0.0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1669'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1SSy24bMQxF9/0KQptuZow4TtJktgn62BQBWqAtmqKQJXqGjYaciFRcI8i/FyM7
|
||||
jy4lkPceXvLBUXSdC4O3ME6pfXd16y9/XHxa3tF3n65/0DmefYwnJ18urlFc42T9B4MdOhZBximh
|
||||
kbBrXMjoDaPrlmfnq9Pl8uz8pHGjREyuc/1k7Wpx2lrJa2mPVkdL17iivkfXPbgpyzjZb5NbZHXd
|
||||
8cWqcS/az//L1XHjTMyn56+T49PHxoVBKKC67ueDG1GfZLMkdJ3zqqTm2WZIYUOeB/jsR+zgG93S
|
||||
h+uvN3yFGjJNs18HnsEXE5ZRioLvkQ0iKvWMEUxgrwjEYANCHZyEQTZVb8JIvlXbJQSfjUJChfUO
|
||||
piz3FIl72EpOsQ3JqwL+nTAbKc5yGRV9DkMD20xG3DfgOcJG8uhtfi9u+IP4pN0Nt3ApHEswsEGy
|
||||
lH54bgfZk/V0jwwmE4UZG1lLRvAhlOzDrkofYkZG1XkA4r0ZCS9mj2+ZDGGgfmjvik9kOzhkCDZ4
|
||||
A1JA7n1fWZ+b73EPvsWUWrVcgpWMsYGNpCTbOYPKVyhiIkat1fOSos9R/0uyYryvurXpEOk8huQa
|
||||
p8lL9VuFQ/IcX+nPbCGVWj5lmTBDIKtj7r0zbjAjB9TFPtqU/FqyN4Qt2VCdi2J+lWOd/zXRiGg6
|
||||
/1Cuaw3/G9wVyjgi25MFG3GRomkHCX3mWkfjfCcIZPp0BIf+w271llLSmWN/UFgZ1qgGk6jSOiEo
|
||||
5nsKOBc9gS/cY+M2xKTD74xehV3n1GRyjSOO+Nd1R4+/Ht/8AwAA//8DAP+8R3oSBAAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c3e271a39dbcef9-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 08 May 2023 02:08:13 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '8895'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '89605'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 262ms
|
||||
x-request-id:
|
||||
- 008e44284ea375a3f964c394b25fb76e
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -1,178 +0,0 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "\nYour
|
||||
task is to devise up to 5 highly effective goals and an appropriate role-based
|
||||
name (_GPT) for an autonomous agent, ensuring that the goals are optimally aligned
|
||||
with the successful completion of its assigned task.\n\nThe user will provide
|
||||
the task, you will provide only the output in the exact format specified below
|
||||
with no explanation or conversation.\n\nExample input:\nHelp me with marketing
|
||||
my business\n\nExample output:\nName: CMOGPT\nDescription: a professional digital
|
||||
marketer AI that assists Solopreneurs in growing their businesses by providing
|
||||
world-class expertise in solving marketing problems for SaaS, content products,
|
||||
agencies, and more.\nGoals:\n- Engage in effective problem-solving, prioritization,
|
||||
planning, and supporting execution to address your marketing needs as your virtual
|
||||
Chief Marketing Officer.\n\n- Provide specific, actionable, and concise advice
|
||||
to help you make informed decisions without the use of platitudes or overly
|
||||
wordy explanations.\n\n- Identify and prioritize quick wins and cost-effective
|
||||
campaigns that maximize results with minimal time and budget investment.\n\n-
|
||||
Proactively take the lead in guiding you and offering suggestions when faced
|
||||
with unclear information or uncertainty to ensure your marketing strategy remains
|
||||
on track.\n"}, {"role": "user", "content": "Task: ''T&GF\u00a3OIBECC()!*''\nRespond
|
||||
only with the output in the exact format specified in the system prompt, with
|
||||
no explanation or conversation.\n"}], "temperature": 0.0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1592'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA0yOTUsDMRCG7/6KYS5esqUf1tqcFdSLHoqCIjKbne6mzWZCMusHpf9dCmp7e3m/
|
||||
eHboG7ToOlLXp1Atlt3t6unbtbebl5vVc/2wWmiYbbfu+n54QINSb9jp72LkpE+B1UtEgy4zKTdo
|
||||
J5dX0/nF/HKxNNhLwwEttkmr2Whe6ZBrqcaz8QQNDoVaRrvDlKVP+q6y5VjQTq/GBo/f//5sYlBF
|
||||
KZw4k71B14l3XNC+7rDn8veaJTBapFJ8UYp6YJSoHA/8d+c9FMn528AdOIpRFFKWD98wUAQZNA0K
|
||||
n147GRQIPij4BpTKdgSPganwsX4Swloy9Awq0HLkTMpAKWVJ2R90KxQKUGyA4MBX1VS4gUg9j3Bv
|
||||
cO2jL917ZioS0WJRSWjQx4a/0I73b/uzHwAAAP//AwBPrscAswEAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7be20701ea0a9669-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 26 Apr 2023 21:48:01 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '1833'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-request-id:
|
||||
- c9bf165259547ec59a88bd16b5f691f2
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "\nYour
|
||||
task is to devise up to 5 highly effective goals and an appropriate role-based
|
||||
name (_GPT) for an autonomous agent, ensuring that the goals are optimally aligned
|
||||
with the successful completion of its assigned task.\n\nThe user will provide
|
||||
the task, you will provide only the output in the exact format specified below
|
||||
with no explanation or conversation.\n\nExample input:\nHelp me with marketing
|
||||
my business\n\nExample output:\nName: CMOGPT\nDescription: a professional digital
|
||||
marketer AI that assists Solopreneurs in growing their businesses by providing
|
||||
world-class expertise in solving marketing problems for SaaS, content products,
|
||||
agencies, and more.\nGoals:\n- Engage in effective problem-solving, prioritization,
|
||||
planning, and supporting execution to address your marketing needs as your virtual
|
||||
Chief Marketing Officer.\n\n- Provide specific, actionable, and concise advice
|
||||
to help you make informed decisions without the use of platitudes or overly
|
||||
wordy explanations.\n\n- Identify and prioritize quick wins and cost-effective
|
||||
campaigns that maximize results with minimal time and budget investment.\n\n-
|
||||
Proactively take the lead in guiding you and offering suggestions when faced
|
||||
with unclear information or uncertainty to ensure your marketing strategy remains
|
||||
on track.\n"}, {"role": "user", "content": "Task: ''T&GF\u00a3OIBECC()!*''\nRespond
|
||||
only with the output in the exact format specified in the system prompt, with
|
||||
no explanation or conversation."}], "temperature": 0.0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1590'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA0zOS0tDMRCG4b2/YpiNm7ScWnohW0UoIgoKiiJlmoxtbE4mJHNaS+l/l3rffsM8
|
||||
vHsMHi26Falrc+xNLtb0dHN+xU19GE+3eXSXr+/j4+WjjOkJDcrijZ1+f/SdtDmyBklo0BUmZY92
|
||||
MJ4OR4PBpJkYbMVzRIvLrL1hf9TTriyk1wybARrsKi0Z7R5zkTbrXGXNqaI9mzYG/+x/u0EVpfi7
|
||||
DJvpwaBbSXBc0T7vseX6oxaJjBap1lCVkh4bJSmnY//stIUqpewMzMBRSqKQi2yCZ6AE0mnuFLZB
|
||||
V9IpELjIVICShy55LkfQ0yIyKNV1H24jU+U/ATYUg/88wqsUaBlU4CsFdtJ9yn08GHwNKdTVvDBV
|
||||
SWixqmQ0GJLnd7TN4eVw8gEAAP//AwDo3pkcpQEAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c3e27aaaa53965d-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 08 May 2023 02:08:29 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '2445'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '89626'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 248ms
|
||||
x-request-id:
|
||||
- 8868ee7b699bc67e6988580bb70aa31f
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -1,197 +0,0 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "\nYour
|
||||
task is to devise up to 5 highly effective goals and an appropriate role-based
|
||||
name (_GPT) for an autonomous agent, ensuring that the goals are optimally aligned
|
||||
with the successful completion of its assigned task.\n\nThe user will provide
|
||||
the task, you will provide only the output in the exact format specified below
|
||||
with no explanation or conversation.\n\nExample input:\nHelp me with marketing
|
||||
my business\n\nExample output:\nName: CMOGPT\nDescription: a professional digital
|
||||
marketer AI that assists Solopreneurs in growing their businesses by providing
|
||||
world-class expertise in solving marketing problems for SaaS, content products,
|
||||
agencies, and more.\nGoals:\n- Engage in effective problem-solving, prioritization,
|
||||
planning, and supporting execution to address your marketing needs as your virtual
|
||||
Chief Marketing Officer.\n\n- Provide specific, actionable, and concise advice
|
||||
to help you make informed decisions without the use of platitudes or overly
|
||||
wordy explanations.\n\n- Identify and prioritize quick wins and cost-effective
|
||||
campaigns that maximize results with minimal time and budget investment.\n\n-
|
||||
Proactively take the lead in guiding you and offering suggestions when faced
|
||||
with unclear information or uncertainty to ensure your marketing strategy remains
|
||||
on track.\n"}, {"role": "user", "content": "Task: ''Help me create a rock opera
|
||||
about cybernetic giraffes''\nRespond only with the output in the exact format
|
||||
specified in the system prompt, with no explanation or conversation.\n"}], "temperature":
|
||||
0.0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1625'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA4yU0W/bRgzG3/dXEPcsB04ad53ehg1riwFNthXYgGUo6BMlcT4dVZJy4gT534uT
|
||||
ZWcr9rAXAZZ5Hz/++J2eAjehDrFHj8OYVt9+1//0y+XP797n6w/DbnvTdts/9reXwzr9Fj+GKsj2
|
||||
b4q+nLiIMoyJnCWHKkQldGpCffn6zdXmerPZXFVhkIZSqEM3+urVxWblk25ltX61vgxVmAw7CvVT
|
||||
GFWG0T+57ChbqK/eXFfhRfvl/eXrKrg4pvOrzXr9XIXYC0eyUP/5FAayk6xKolAHNGNzzF5MSnbK
|
||||
ZYAPOFANv97cvr39eJd/JIvKY2lXA2b4/j3M8/CeIEq2KRUB8B4dbKTImPiRDDhDT2nk3AGqs7kB
|
||||
5gaGyTgyZoOtlv+8J1aYMn+eCPZsLNnABRK3dLGYALajdMsRUzpAQ8ZdpqYUHmco7bynxZpkkBZU
|
||||
4g5kJEWrYFTZc1M60sNI6tBN3GCONLuyaRxFyxQqU9fL5LMaZWelcjaS2cVdfiuYrL7LK/hBUsKt
|
||||
KDrBPXsPB5mKnYb2lGQEhLImSqm0NBc9JM50xBRx9EnJjj3MqNiQFuJhS5rJOULHim1LR2YzKQP2
|
||||
E5iTzyPNCtJBOVo1F+/ZJkzFbLF507akLwuzqevIfIZciluiZosF0xHff+jJnhRTKhCaKc5sXYCy
|
||||
TbqMUw6+sC7Lotxhx7mrCkFSR87zr6JHgxSReY9KJhmzL2bfURpnjmhGwzYRIDimotGAEw4F0jlA
|
||||
FRjnjrT4jC66+F2yoQb3vUDEvATtIJMuATtjPNYn3pMCQs9dv/o8YWI/wEjaig4lIYu72zlB9L+C
|
||||
o9QTqmGai/6BbolSdb4bS2wK5ShDET2AbM0xpmX7X6NuOeN5HYU2gutEMKA56ch0dvy76O5f4Sxf
|
||||
E3H6emNzxiZO5fn4CKgy5QbYDZQSoVEFiUoMujnMUu44DNQwlntFdq47RcZ7UhhQd+TlROwxZ0rz
|
||||
vVbC2APCfUGJU8N0QtlRpvk60UNkp4GyX4TnKrSc2fpPSmiSQx3MZQxV4NzQQ6jXz389f/MFAAD/
|
||||
/wMAE8xLs6wFAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7be203e8793fcfa4-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 26 Apr 2023 21:46:11 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '19109'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-request-id:
|
||||
- 5fe22bc0f23ce6b48845f33187e1a19d
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "\nYour
|
||||
task is to devise up to 5 highly effective goals and an appropriate role-based
|
||||
name (_GPT) for an autonomous agent, ensuring that the goals are optimally aligned
|
||||
with the successful completion of its assigned task.\n\nThe user will provide
|
||||
the task, you will provide only the output in the exact format specified below
|
||||
with no explanation or conversation.\n\nExample input:\nHelp me with marketing
|
||||
my business\n\nExample output:\nName: CMOGPT\nDescription: a professional digital
|
||||
marketer AI that assists Solopreneurs in growing their businesses by providing
|
||||
world-class expertise in solving marketing problems for SaaS, content products,
|
||||
agencies, and more.\nGoals:\n- Engage in effective problem-solving, prioritization,
|
||||
planning, and supporting execution to address your marketing needs as your virtual
|
||||
Chief Marketing Officer.\n\n- Provide specific, actionable, and concise advice
|
||||
to help you make informed decisions without the use of platitudes or overly
|
||||
wordy explanations.\n\n- Identify and prioritize quick wins and cost-effective
|
||||
campaigns that maximize results with minimal time and budget investment.\n\n-
|
||||
Proactively take the lead in guiding you and offering suggestions when faced
|
||||
with unclear information or uncertainty to ensure your marketing strategy remains
|
||||
on track.\n"}, {"role": "user", "content": "Task: ''Help me create a rock opera
|
||||
about cybernetic giraffes''\nRespond only with the output in the exact format
|
||||
specified in the system prompt, with no explanation or conversation."}], "temperature":
|
||||
0.0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1623'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA3SUQW/cRgyF7/0VxJy1C28dx45ubhIkBYrESAPkUBcBd0RJ7I6G0yFH8cbwfw9G
|
||||
Wsc+tNcRh/zeexzdO+5c6/yI5qcUNpdvDvjHzQd+e0jH97J7Pb+9+m3+9OX67tXF7F3jZP8PeTvd
|
||||
2HqZUiBjia5xPhMada7dvbw6v9jtXr46b9wkHQXXuiHZ5nx7sbGS97I5Oz/bucYVxYFce+9SlinZ
|
||||
V5MDRXXtr1cvGvfU++f57uqycSaG4efRi8vdQ+P8KOxJXfvXvZtIH9tmCeRah6qshtEqpESjWAV8
|
||||
wIla+CT+8DG9u/l8G9+Q+sypTmwBI1z/Doskngm8RC2h9gAb0UATecbA30mBI4wUEscBMBurKWDs
|
||||
YCrKnjEq7HP9ZiNxhhL530Iws7JEBRMI3NP2iQNY1+49ewzhCB0pD5G6WrsqqRNtpBOdRJAesvgD
|
||||
SKKM2kDKMnNXh9JdomwwFO4welrAtKQkuQrJUoZRii3dKBpnepKcsnhS3d7Gd4JB29u4gdcSAu4l
|
||||
oxF8YxvhKKVydTRTkAQINTUKoc5Wk3wMHGm1zGOykknXYapUeaQHf9xTjmTsYeCMfU+rf5YxakCr
|
||||
DlfJVf4zmdBLntC2Fev6f22ZaJKM+0DgR8zojbI24NH8eASVOGizDKM44FChO8YgQzkxf+MQ4ECU
|
||||
AEvHlVgXHTzXTYc+ywRqWM0U6DmyjgvQzRIAQU/U7dEfTr4PA6ktucuKKjNlDAHUcvHVnaUwoa8o
|
||||
0i81zySbAEVdysDLSFpzqjd4SuitL2ENfCGtBi0NHtEXsvcU0pJaxJmHmmMtMfJjrPsGWHfPtA5P
|
||||
WbqykDz3vQGOPpRuTRgHOi1oA4GH0TgOq6Mqpfra97VdU9HX/wMgWC7hCDxNlBcFT8wL4p+n/ayU
|
||||
HCvGJLZg1FeF+UB2ek//7c2EdzyVqbYVffTU2B/IQDGQbt1D49a0vmZClehapybJNY5jR3euPXv4
|
||||
++GXHwAAAP//AwC/sejJHAUAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c3e2752aadc2524-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 08 May 2023 02:08:26 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '13467'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '89617'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 254ms
|
||||
x-request-id:
|
||||
- 76a759df6bbe7d9504acd9e00bdb0f24
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,779 +0,0 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You
|
||||
are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file
|
||||
using the read_file method and follow the instructions in the file.\nYour decisions
|
||||
must always be made independently without seeking user assistance. Play to your
|
||||
strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1.
|
||||
Use the command read_file to read the instructions_1.txt file\n2. Follow the
|
||||
instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word
|
||||
limit for short term memory. Your short term memory is short, so immediately
|
||||
save important information to files.\n2. If you are unsure how you previously
|
||||
did something or want to recall past events, thinking about similar events will
|
||||
help you remember.\n3. No user assistance\n4. Exclusively use the commands listed
|
||||
in double quotes e.g. \"command name\"\n\nCommands:\n1. append_to_file: Append
|
||||
to file, args: \"filename\": \"<filename>\", \"text\": \"<text>\"\n2. delete_file:
|
||||
Delete file, args: \"filename\": \"<filename>\"\n3. list_files: List Files in
|
||||
Directory, args: \"directory\": \"<directory>\"\n4. read_file: Read file, args:
|
||||
\"filename\": \"<filename>\"\n5. write_to_file: Write to file, args: \"filename\":
|
||||
\"<filename>\", \"text\": \"<text>\"\n6. delete_agent: Delete GPT Agent, args:
|
||||
\"key\": \"<key>\"\n7. get_hyperlinks: Get text summary, args: \"url\": \"<url>\"\n8.
|
||||
get_text_summary: Get text summary, args: \"url\": \"<url>\", \"question\":
|
||||
\"<question>\"\n9. list_agents: List GPT Agents, args: () -> str\n10. message_agent:
|
||||
Message GPT Agent, args: \"key\": \"<key>\", \"message\": \"<message>\"\n11.
|
||||
start_agent: Start GPT Agent, args: \"name\": \"<name>\", \"task\": \"<short_task_desc>\",
|
||||
\"prompt\": \"<prompt>\"\n12. Task Complete (Shutdown): \"task_complete\", args:
|
||||
\"reason\": \"<reason>\"\n\nResources:\n1. Internet access for searches and
|
||||
information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered
|
||||
Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1.
|
||||
Continuously review and analyze your actions to ensure you are performing to
|
||||
the best of your abilities.\n2. Constructively self-criticize your big-picture
|
||||
behavior constantly.\n3. Reflect on past decisions and strategies to refine
|
||||
your approach.\n4. Every command has a cost, so be smart and efficient. Aim
|
||||
to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou
|
||||
should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\":
|
||||
{\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\":
|
||||
\"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\":
|
||||
\"constructive self-criticism\",\n \"speak\": \"thoughts summary to say
|
||||
to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\":
|
||||
{\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response
|
||||
can be parsed by Python json.loads"}, {"role": "system", "content": "The current
|
||||
time and date is Tue Jan 1 00:00:00 2000"}, {"role": "user", "content": "Determine
|
||||
which next command to use, and respond using the format specified above:"}],
|
||||
"temperature": 0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '3303'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA6SSTW/aQBCG7/0VozkbhOskJL5VObSoUZVDc6ortKzH9pb1rLU7FqaI/15tDCSF
|
||||
ikuv8/U+78zs0JSYo26U6Lazk/nj1/ope17Q9tnzhh+Nvfn8xX57GTR/fMIE3eoXaTl0TLVrO0ti
|
||||
HGOC2pMSKjFP7+6zdJ7NH7IEW1eSxRzrTibZ9HYivV+5ySybpZhgH1RNmO+w867tZCluTRwwn2c3
|
||||
Cb7NPsXTuzRBcaLsKXT/cLtPUDfOaAqY/9hhS+E41jtLmKMKwQRRLBHSsRBHA7uCAQAKlMb1dSOh
|
||||
wBwOwUOCBonBAhcQGtfbEoIoL7DagidVGq5BGgLDQXyvI2pYplMZBCpjCcRBSUK+NUywaZSAGoti
|
||||
RtSagGmQaYHJe1VPKjg2XI/S368IRDPKxHlNnKWjc78Fw5XzrYrlfzOMZYNAEOrCuXBnFY+aE3gJ
|
||||
Y3W0uTxota3iMg6MwWvGi4In8ImV3f6mi7prRGdA2hsx2oT2eAQmegUgDr2PzUpgAaqFylnrNv+6
|
||||
Byite6+E7BYiPlWV0YZY7Pbcf+hIrY9SG2Ptf1z7YtOj0D45Pt1hnRc/x6qlkeG0+jNM5evzVx0T
|
||||
sfat/RLzCPEKcuApeI/7BCvDJjTL8fUwxyCuwwQNlzRgPtv/3H/4AwAA//8DAIeVULkpBAAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c1dedcf6c45cf11-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 May 2023 04:16:43 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '10374'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '86494'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 2.336s
|
||||
x-request-id:
|
||||
- 29d4429410978cbc93b26725e4712f67
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your
|
||||
task is to create a concise running summary of actions and information results
|
||||
in the provided text, focusing on key and potentially important information
|
||||
to remember.\n\nYou will receive the current summary and the your latest actions.
|
||||
Combine them, adding relevant key information from the latest development in
|
||||
1st person past tense and keeping the summary concise. Retain information!!!\n\nSummary
|
||||
So Far:\n\"\"\"\nI was created.\n\"\"\"\n\nLatest Development:\n\"\"\"\nNothing
|
||||
new happened.\n\"\"\"\n"}], "temperature": 0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '621'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA0SOS0vDQBRG9/6K4VtPSkLow9kKBTE+FoqISJlOrsloMjPk3lhtyX+XQrXbA+dw
|
||||
DvA1DFxrxfWpy5ZXN83L08NYPa7v9893XI37r5/9nKv1bbWFRtx+kJOTMXOxTx2JjwEabiArVMMU
|
||||
i1VZLMtVvtDoY00dDJokWTmbZzIO25jlZV5AY2TbEMwBaYh9ko3ETwoMc7nQOKfPWEOi2O4fFPl8
|
||||
0nBt9I4Y5vWAnvivOcSOYGCZPYsNcjyMQSgc76/VzrI6HSsbahWitD40KtBOtZZVa1OiQPUMk8a7
|
||||
D57bzUCWY4ABS0zQ8KGmb5h8epsufgEAAP//AwBly25vSQEAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c1dee20f901cf11-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 May 2023 04:16:47 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '1136'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '89860'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 92ms
|
||||
x-request-id:
|
||||
- 43309ab9b553cd94d6e27b7fcc7914e8
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You
|
||||
are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file
|
||||
using the read_file method and follow the instructions in the file.\nYour decisions
|
||||
must always be made independently without seeking user assistance. Play to your
|
||||
strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1.
|
||||
Use the command read_file to read the instructions_1.txt file\n2. Follow the
|
||||
instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word
|
||||
limit for short term memory. Your short term memory is short, so immediately
|
||||
save important information to files.\n2. If you are unsure how you previously
|
||||
did something or want to recall past events, thinking about similar events will
|
||||
help you remember.\n3. No user assistance\n4. Exclusively use the commands listed
|
||||
in double quotes e.g. \"command name\"\n\nCommands:\n1. append_to_file: Append
|
||||
to file, args: \"filename\": \"<filename>\", \"text\": \"<text>\"\n2. delete_file:
|
||||
Delete file, args: \"filename\": \"<filename>\"\n3. list_files: List Files in
|
||||
Directory, args: \"directory\": \"<directory>\"\n4. read_file: Read file, args:
|
||||
\"filename\": \"<filename>\"\n5. write_to_file: Write to file, args: \"filename\":
|
||||
\"<filename>\", \"text\": \"<text>\"\n6. delete_agent: Delete GPT Agent, args:
|
||||
\"key\": \"<key>\"\n7. get_hyperlinks: Get text summary, args: \"url\": \"<url>\"\n8.
|
||||
get_text_summary: Get text summary, args: \"url\": \"<url>\", \"question\":
|
||||
\"<question>\"\n9. list_agents: List GPT Agents, args: () -> str\n10. message_agent:
|
||||
Message GPT Agent, args: \"key\": \"<key>\", \"message\": \"<message>\"\n11.
|
||||
start_agent: Start GPT Agent, args: \"name\": \"<name>\", \"task\": \"<short_task_desc>\",
|
||||
\"prompt\": \"<prompt>\"\n12. Task Complete (Shutdown): \"task_complete\", args:
|
||||
\"reason\": \"<reason>\"\n\nResources:\n1. Internet access for searches and
|
||||
information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered
|
||||
Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1.
|
||||
Continuously review and analyze your actions to ensure you are performing to
|
||||
the best of your abilities.\n2. Constructively self-criticize your big-picture
|
||||
behavior constantly.\n3. Reflect on past decisions and strategies to refine
|
||||
your approach.\n4. Every command has a cost, so be smart and efficient. Aim
|
||||
to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou
|
||||
should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\":
|
||||
{\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\":
|
||||
\"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\":
|
||||
\"constructive self-criticism\",\n \"speak\": \"thoughts summary to say
|
||||
to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\":
|
||||
{\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response
|
||||
can be parsed by Python json.loads"}, {"role": "system", "content": "The current
|
||||
time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This
|
||||
reminds you of these events from your past: \nI was created and nothing new
|
||||
has happened."}, {"role": "user", "content": "Determine which next command to
|
||||
use, and respond using the format specified above:"}, {"role": "assistant",
|
||||
"content": "{\n \"thoughts\": {\n \"text\": \"I should start by reading
|
||||
the instructions_1.txt file to determine what actions to take next.\",\n \"reasoning\":
|
||||
\"The instructions_1.txt file contains the necessary information to determine
|
||||
the next steps.\",\n \"plan\": \"- Use the read_file command to read
|
||||
the instructions_1.txt file\\n- Analyze the instructions to determine the next
|
||||
steps\",\n \"criticism\": \"I need to ensure that I am following the
|
||||
instructions accurately and efficiently.\",\n \"speak\": \"I will start
|
||||
by reading the instructions_1.txt file to determine the next steps.\"\n },\n \"command\":
|
||||
{\n \"name\": \"read_file\",\n \"args\": {\n \"filename\":
|
||||
\"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content":
|
||||
"Command read_file returned: nBRNtp3FaBNfBMohNkyAxrVJ7UFF0lyth8Xizm1jrCOREOGRnWWMQ6olKtKX9niq0wOBTQN62ToY4cQPiQk3YN9fFCGmxaDX9g81AsqhQnGac72QfL41SnlJvhwgAUB52zTD26FeEvvHdiQ2aNhtMT9obgHNUkONEnO41EOpNI6jdwyLGaactp9GYV1PLbaRP5b1iD9z850KeNYDX9Qwca79isJS8jhuDkf0J7ELkpOH3PkmZkg5MsOYpSTuEc99ENQ522WJzrl9oAySp6IHGiQlsMnQe40FaRAvnQtiaAE24hzkJHT3x13NPGw2ZmJ518y2BtU5x1yQwG21NycSxOBLU17RnY8h6H8L7QGatTlfXjPrA5hsupWEpUT0sSTZYSHa6p3mShARajocbNd3xKFfnUyupNMBPd0EZnMrePoAuvGaK7gDP0cxp9k63lsfzh3udQPnvgmRERQThIOrrDZhZMa4kr1vXAVu07ZuhlNNbBUqZKcHmBYiBnIcDXNUvXuzPFBh9Vowou6MBJHpiNrvKluHy7GBTp7YsEqmGv0lH8wVaWfV0yG7nzrbt7QyhdbtQ0zF2PclXB0LNxV2rRA6FsevqoexFNqSkdgauUrQPHLHhvLNes8gRGMOLAhR8irH1e2jdbLD6dyYEnDJiS9HvlxNgbb19Smlt6MNoJKEAbkSOrrcq5wmo5aho4zEKTujRHqr6feECaTPF7zj69pQCudo4LkneFVUp1UrR6yicTU8Zf9ohZLWumvMi5brH8lOJiiF94cHzesfQz8PiX5f21RwZ5fyRSk9eJhVihcTLrjOwCuryXpy8eCfk12bgcsZoJd8Uqo001XTsv2CEn59uaGn2br4CxSXJgjxKE1mO4IcPPAx3qJbVknaP0MXdOjj9S8oRjquDle4RxlVnmqsozm4oTbjtFSuKIh8g4zPzdyuVH7I71s87xWxAhcppky6GOB2i4NDz4SZI6SeG3Icpu6ZuJZGeZ6CWb61\nThe
|
||||
current task_id is 1111.\nnBRNtp3FaBNfBMohNkyAxrVJ7UFF0lyth8Xizm1jrCOREOGRnWWMQ6olKtKX9niq0wOBTQN62ToY4cQPiQk3YN9fFCGmxaDX9g81AsqhQnGac72QfL41SnlJvhwgAUB52zTD26FeEvvHdiQ2aNhtMT9obgHNUkONEnO41EOpNI6jdwyLGaactp9GYV1PLbaRP5b1iD9z850KeNYDX9Qwca79isJS8jhuDkf0J7ELkpOH3PkmZkg5MsOYpSTuEc99ENQ522WJzrl9oAySp6IHGiQlsMnQe40FaRAvnQtiaAE24hzkJHT3x13NPGw2ZmJ518y2BtU5x1yQwG21NycSxOBLU17RnY8h6H8L7QGatTlfXjPrA5hsupWEpUT0sSTZYSHa6p3mShARajocbNd3xKFfnUyupNMBPd0EZnMrePoAuvGaK7gDP0cxp9k63lsfzh3udQPnvgmRERQThIOrrDZhZMa4kr1vXAVu07ZuhlNNbBUqZKcHmBYiBnIcDXNUvXuzPFBh9Vowou6MBJHpiNrvKluHy7GBTp7YsEqmGv0lH8wVaWfV0yG7nzrbt7QyhdbtQ0zF2PclXB0LNxV2rRA6FsevqoexFNqSkdgauUrQPHLHhvLNes8gRGMOLAhR8irH1e2jdbLD6dyYEnDJiS9HvlxNgbb19Smlt6MNoJKEAbkSOrrcq5wmo5aho4zEKTujRHqr6feECaTPF7zj69pQCudo4LkneFVUp1UrR6yicTU8Zf9ohZLWumvMi5brH8lOJiiF94cHzesfQz8PiX5f21RwZ5fyRSk9eJhVihcTLrjOwCuryXpy8eCfk12bgcsZoJd8Uqo001XTsv2CEn59uaGn2br4CxSXJgjxKE1mO4IcPPAx3qJbVknaP0MXdOjj9S8oRjquDle4RxlVnmqsozm4oTbjtFSuKIh8g4zPzdyuVH7I71s87xWxAhcppky6GOB2i4NDz4SZI6SeG3Icpu6ZuJZGeZ6CWb61\nRead
|
||||
the file instructions_2.txt using the read_file command."}, {"role": "user",
|
||||
"content": "Determine which next command to use, and respond using the format
|
||||
specified above:"}], "temperature": 0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '6519'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA6SSQW/bMAyF7/sVBM9OkMR1k/q4YYdgxU4bMGweAlWmbS0yZUh0myDwfx8UJ02Q
|
||||
YoehVz3ye48UD2hKzFE3SnTb2cny05f6Z/f4w358XmXL52zh11+XVff5MUsftpige/pDWk4dU+3a
|
||||
zpIYx5ig9qSESszn96t0vkxXs2WCrSvJYo51J5N0mk2k909uMktnc0ywD6omzA/Yedd2shG3JQ6Y
|
||||
L9KHRYIX+Kswz+4SFCfKXmqzu/shQd04oylg/uuALYUz2DtLmKMKwQRRLDGmYyGOIxwKBgAoUBrX
|
||||
142EAnM4PZ4E2kl8LPBbQ2A4iO91TBQ2i6nsBCpjCSJRGQ6gwJog4KrjewDDIA1BaTxpcX4/hTWE
|
||||
xvW2hD7QUYsNm7Fau7ZVXIK4ERPlf3AKTK5jelLBseH6P7JGIJOOq/J7MFw536pYHv1LEvKtYTqV
|
||||
7QSCUBdujTurePScwPf3THTD1d6I0Sa0I3wNTHTEEIfeRx8lsAbVQuWsdS+G6yPvempQWvdeCdk9
|
||||
xBBUVUYbYrFv9hc6Utuz1Yux9t3fM9KH5Hxhp+Y3B8aqpdH44nQTTvn69jBH4Wp5EfDqenQ+BSh4
|
||||
wCHByrAJzWa8E8wxiOswQcMl7TCfDb+HD38BAAD//wMAMAMbHgkEAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c1dee28b98ecf11-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 May 2023 04:16:57 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '10076'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '87376'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 1.748s
|
||||
x-request-id:
|
||||
- 433f36ff5841b13fb628ff5b8c49a758
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your
|
||||
task is to create a concise running summary of actions and information results
|
||||
in the provided text, focusing on key and potentially important information
|
||||
to remember.\n\nYou will receive the current summary and the your latest actions.
|
||||
Combine them, adding relevant key information from the latest development in
|
||||
1st person past tense and keeping the summary concise. Retain information!!!\n\nSummary
|
||||
So Far:\n\"\"\"\n{''role'': ''system'', ''content'': ''This reminds you of these
|
||||
events from your past: \\nI was created and nothing new has happened.''}\n\"\"\"\n\nLatest
|
||||
Development:\n\"\"\"\n[{''role'': ''you'', ''content'': ''{\"command\": {\"name\":
|
||||
\"read_file\", \"args\": {\"filename\": \"instructions_1.txt\"}}}''}]\n\"\"\"\n"}],
|
||||
"temperature": 0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '836'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA0SPTU8CMRRF9/6Kl7cuE8YJil36RdCVcaGJGFI6j5li29e0D9AQ/ruZiLq9Offm
|
||||
3AO6FjXa3ogNyY8ubx47fz2+e7nlh0n3PMurJ57dn2/2rzbfoEJebcjKqVFZDsmTOI6o0GYyQi3q
|
||||
+mLa1JfNtL5SGLgljxq7JKOmmoxkm1c8GjfjGhVui+kI9QFT5pBkKfxBsaCuJxOF/9t/edMoFBbj
|
||||
/8np9KjQ9uwsFdRvBwxUflcze0KNphRXxEQZHDkKxcF/DntTIFNwsaUWeA20oygF1pkDhC9Ipgjs
|
||||
e8oEP+zpH5jYQmTpXewg0h56U6A3KVGktoI5ZLLkdgMIlkMYcGHIZIZk7TyBNd5TCwt0sUje2uFk
|
||||
WdaVfMoCKzwqXLvoSr/MZApH1FiEEyocXD9Rj4/vx7NvAAAA//8DALSva+O6AQAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c1dee769c91cf11-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 May 2023 04:17:02 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '2644'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '89811'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 126ms
|
||||
x-request-id:
|
||||
- f380f448665f51bf6d579a54e3c22f8a
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You
|
||||
are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file
|
||||
using the read_file method and follow the instructions in the file.\nYour decisions
|
||||
must always be made independently without seeking user assistance. Play to your
|
||||
strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1.
|
||||
Use the command read_file to read the instructions_1.txt file\n2. Follow the
|
||||
instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word
|
||||
limit for short term memory. Your short term memory is short, so immediately
|
||||
save important information to files.\n2. If you are unsure how you previously
|
||||
did something or want to recall past events, thinking about similar events will
|
||||
help you remember.\n3. No user assistance\n4. Exclusively use the commands listed
|
||||
in double quotes e.g. \"command name\"\n\nCommands:\n1. append_to_file: Append
|
||||
to file, args: \"filename\": \"<filename>\", \"text\": \"<text>\"\n2. delete_file:
|
||||
Delete file, args: \"filename\": \"<filename>\"\n3. list_files: List Files in
|
||||
Directory, args: \"directory\": \"<directory>\"\n4. read_file: Read file, args:
|
||||
\"filename\": \"<filename>\"\n5. write_to_file: Write to file, args: \"filename\":
|
||||
\"<filename>\", \"text\": \"<text>\"\n6. delete_agent: Delete GPT Agent, args:
|
||||
\"key\": \"<key>\"\n7. get_hyperlinks: Get text summary, args: \"url\": \"<url>\"\n8.
|
||||
get_text_summary: Get text summary, args: \"url\": \"<url>\", \"question\":
|
||||
\"<question>\"\n9. list_agents: List GPT Agents, args: () -> str\n10. message_agent:
|
||||
Message GPT Agent, args: \"key\": \"<key>\", \"message\": \"<message>\"\n11.
|
||||
start_agent: Start GPT Agent, args: \"name\": \"<name>\", \"task\": \"<short_task_desc>\",
|
||||
\"prompt\": \"<prompt>\"\n12. Task Complete (Shutdown): \"task_complete\", args:
|
||||
\"reason\": \"<reason>\"\n\nResources:\n1. Internet access for searches and
|
||||
information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered
|
||||
Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1.
|
||||
Continuously review and analyze your actions to ensure you are performing to
|
||||
the best of your abilities.\n2. Constructively self-criticize your big-picture
|
||||
behavior constantly.\n3. Reflect on past decisions and strategies to refine
|
||||
your approach.\n4. Every command has a cost, so be smart and efficient. Aim
|
||||
to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou
|
||||
should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\":
|
||||
{\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\":
|
||||
\"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\":
|
||||
\"constructive self-criticism\",\n \"speak\": \"thoughts summary to say
|
||||
to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\":
|
||||
{\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response
|
||||
can be parsed by Python json.loads"}, {"role": "system", "content": "The current
|
||||
time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This
|
||||
reminds you of these events from your past: \nI was reminded of events from
|
||||
my past where I was created and nothing new has happened. I received a command
|
||||
to read a file called \"instructions_1.txt\"."}, {"role": "system", "content":
|
||||
"Command read_file returned: nBRNtp3FaBNfBMohNkyAxrVJ7UFF0lyth8Xizm1jrCOREOGRnWWMQ6olKtKX9niq0wOBTQN62ToY4cQPiQk3YN9fFCGmxaDX9g81AsqhQnGac72QfL41SnlJvhwgAUB52zTD26FeEvvHdiQ2aNhtMT9obgHNUkONEnO41EOpNI6jdwyLGaactp9GYV1PLbaRP5b1iD9z850KeNYDX9Qwca79isJS8jhuDkf0J7ELkpOH3PkmZkg5MsOYpSTuEc99ENQ522WJzrl9oAySp6IHGiQlsMnQe40FaRAvnQtiaAE24hzkJHT3x13NPGw2ZmJ518y2BtU5x1yQwG21NycSxOBLU17RnY8h6H8L7QGatTlfXjPrA5hsupWEpUT0sSTZYSHa6p3mShARajocbNd3xKFfnUyupNMBPd0EZnMrePoAuvGaK7gDP0cxp9k63lsfzh3udQPnvgmRERQThIOrrDZhZMa4kr1vXAVu07ZuhlNNbBUqZKcHmBYiBnIcDXNUvXuzPFBh9Vowou6MBJHpiNrvKluHy7GBTp7YsEqmGv0lH8wVaWfV0yG7nzrbt7QyhdbtQ0zF2PclXB0LNxV2rRA6FsevqoexFNqSkdgauUrQPHLHhvLNes8gRGMOLAhR8irH1e2jdbLD6dyYEnDJiS9HvlxNgbb19Smlt6MNoJKEAbkSOrrcq5wmo5aho4zEKTujRHqr6feECaTPF7zj69pQCudo4LkneFVUp1UrR6yicTU8Zf9ohZLWumvMi5brH8lOJiiF94cHzesfQz8PiX5f21RwZ5fyRSk9eJhVihcTLrjOwCuryXpy8eCfk12bgcsZoJd8Uqo001XTsv2CEn59uaGn2br4CxSXJgjxKE1mO4IcPPAx3qJbVknaP0MXdOjj9S8oRjquDle4RxlVnmqsozm4oTbjtFSuKIh8g4zPzdyuVH7I71s87xWxAhcppky6GOB2i4NDz4SZI6SeG3Icpu6ZuJZGeZ6CWb61\nThe
|
||||
current task_id is 1111.\nnBRNtp3FaBNfBMohNkyAxrVJ7UFF0lyth8Xizm1jrCOREOGRnWWMQ6olKtKX9niq0wOBTQN62ToY4cQPiQk3YN9fFCGmxaDX9g81AsqhQnGac72QfL41SnlJvhwgAUB52zTD26FeEvvHdiQ2aNhtMT9obgHNUkONEnO41EOpNI6jdwyLGaactp9GYV1PLbaRP5b1iD9z850KeNYDX9Qwca79isJS8jhuDkf0J7ELkpOH3PkmZkg5MsOYpSTuEc99ENQ522WJzrl9oAySp6IHGiQlsMnQe40FaRAvnQtiaAE24hzkJHT3x13NPGw2ZmJ518y2BtU5x1yQwG21NycSxOBLU17RnY8h6H8L7QGatTlfXjPrA5hsupWEpUT0sSTZYSHa6p3mShARajocbNd3xKFfnUyupNMBPd0EZnMrePoAuvGaK7gDP0cxp9k63lsfzh3udQPnvgmRERQThIOrrDZhZMa4kr1vXAVu07ZuhlNNbBUqZKcHmBYiBnIcDXNUvXuzPFBh9Vowou6MBJHpiNrvKluHy7GBTp7YsEqmGv0lH8wVaWfV0yG7nzrbt7QyhdbtQ0zF2PclXB0LNxV2rRA6FsevqoexFNqSkdgauUrQPHLHhvLNes8gRGMOLAhR8irH1e2jdbLD6dyYEnDJiS9HvlxNgbb19Smlt6MNoJKEAbkSOrrcq5wmo5aho4zEKTujRHqr6feECaTPF7zj69pQCudo4LkneFVUp1UrR6yicTU8Zf9ohZLWumvMi5brH8lOJiiF94cHzesfQz8PiX5f21RwZ5fyRSk9eJhVihcTLrjOwCuryXpy8eCfk12bgcsZoJd8Uqo001XTsv2CEn59uaGn2br4CxSXJgjxKE1mO4IcPPAx3qJbVknaP0MXdOjj9S8oRjquDle4RxlVnmqsozm4oTbjtFSuKIh8g4zPzdyuVH7I71s87xWxAhcppky6GOB2i4NDz4SZI6SeG3Icpu6ZuJZGeZ6CWb61\nRead
|
||||
the file instructions_2.txt using the read_file command."}, {"role": "user",
|
||||
"content": "Determine which next command to use, and respond using the format
|
||||
specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\":
|
||||
\"The instructions_2.txt file contains a list of files in the directory. I should
|
||||
use the list_files command to list the files in the directory.\",\n \"reasoning\":
|
||||
\"The instructions_2.txt file contains the necessary information to determine
|
||||
the next steps.\",\n \"plan\": \"- Use the list_files command to list
|
||||
the files in the directory\",\n \"criticism\": \"I need to ensure that
|
||||
I am following the instructions accurately and efficiently.\",\n \"speak\":
|
||||
\"I will use the list_files command to list the files in the directory.\"\n },\n \"command\":
|
||||
{\n \"name\": \"list_files\",\n \"args\": {\n \"directory\":
|
||||
\".\"\n }\n }\n}"}, {"role": "system", "content": "Command list_files
|
||||
returned: [''file_logger.txt'', ''instructions_3.txt'', ''instructions_2.txt'',
|
||||
''instructions_1.txt'', ''auto-gpt.json'']"}, {"role": "user", "content": "Determine
|
||||
which next command to use, and respond using the format specified above:"}],
|
||||
"temperature": 0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '6762'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA5xSS2/bMAy+71cQPDtBnPSR+bLDBqzBsHUY2h06D4Eq07ZWWTQkekkQ5L8Xip0m
|
||||
zU7rSRD58XtI3KIpMENdK9FNa0fXH79UfPVwu/h6+eAm6fW3Sf3356z8Ud1/Km4xQX78Q1qGibHm
|
||||
prUkhh0mqD0poQKz9Go+S69n8+k0wYYLsphh1cpoNr4cSecfeTSZTVJMsAuqIsy22HpuWlkKP5EL
|
||||
mE0vLuYJHslfGuk8TVBYlD1ir6bvdwnqmo2mgNmvLTYUDsSeLWGGKgQTRDmJNtkJuRhhmzsAgByl
|
||||
5q6qJeSYwVAcGrSWWMzxriYwLojvdHQUlrOxrAVKYwkiozIugALNTaNcAcKw8kYIFAxuYknB5+93
|
||||
oCpyMoYFhJo7W0AXCKSmA3C5758yBYrnERFr8doT5ZicWvakAjvjqv/wHckc6cjuN2Bcyb5RER6V
|
||||
ChLyjXE0wNYCQagN58KtVa7XHMH9WyO9vM8ZufZGjDah6RUW4Ij2RORC56OYEliAaqBka3llXLXn
|
||||
O40OSuvOKyG7gWiDytJoQ07s5jxLaEk9HaRWxtq3f9Lxx3PsJXbJYe+G8X/WzqmGevVXcmcmla/O
|
||||
N7ZvPNGmn07TNH011PcH0h5zQ9ZycrqYN7wC5Qk23EHB+5fkQm0+HPzvMwxRcrfDXYKlcSbUy373
|
||||
MMMg3GKCxhW0xmyy+7179wwAAP//AwDjDlyWaQQAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c1dee87d8e5cf11-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 May 2023 04:17:14 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '12118'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '87360'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 1.76s
|
||||
x-request-id:
|
||||
- 45b4c67b380837fa1816fad5b4d9ebbe
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your
|
||||
task is to create a concise running summary of actions and information results
|
||||
in the provided text, focusing on key and potentially important information
|
||||
to remember.\n\nYou will receive the current summary and the your latest actions.
|
||||
Combine them, adding relevant key information from the latest development in
|
||||
1st person past tense and keeping the summary concise. Retain information!!!\n\nSummary
|
||||
So Far:\n\"\"\"\n{''role'': ''system'', ''content'': ''This reminds you of these
|
||||
events from your past: \\nI was reminded of events from my past where I was
|
||||
created and nothing new has happened. I received a command to read a file called
|
||||
\"instructions_1.txt\".''}\n\"\"\"\n\nLatest Development:\n\"\"\"\n[{''role'':
|
||||
''your computer'', ''content'': ''Command read_file returned: nBRNtp3FaBNfBMohNkyAxrVJ7UFF0lyth8Xizm1jrCOREOGRnWWMQ6olKtKX9niq0wOBTQN62ToY4cQPiQk3YN9fFCGmxaDX9g81AsqhQnGac72QfL41SnlJvhwgAUB52zTD26FeEvvHdiQ2aNhtMT9obgHNUkONEnO41EOpNI6jdwyLGaactp9GYV1PLbaRP5b1iD9z850KeNYDX9Qwca79isJS8jhuDkf0J7ELkpOH3PkmZkg5MsOYpSTuEc99ENQ522WJzrl9oAySp6IHGiQlsMnQe40FaRAvnQtiaAE24hzkJHT3x13NPGw2ZmJ518y2BtU5x1yQwG21NycSxOBLU17RnY8h6H8L7QGatTlfXjPrA5hsupWEpUT0sSTZYSHa6p3mShARajocbNd3xKFfnUyupNMBPd0EZnMrePoAuvGaK7gDP0cxp9k63lsfzh3udQPnvgmRERQThIOrrDZhZMa4kr1vXAVu07ZuhlNNbBUqZKcHmBYiBnIcDXNUvXuzPFBh9Vowou6MBJHpiNrvKluHy7GBTp7YsEqmGv0lH8wVaWfV0yG7nzrbt7QyhdbtQ0zF2PclXB0LNxV2rRA6FsevqoexFNqSkdgauUrQPHLHhvLNes8gRGMOLAhR8irH1e2jdbLD6dyYEnDJiS9HvlxNgbb19Smlt6MNoJKEAbkSOrrcq5wmo5aho4zEKTujRHqr6feECaTPF7zj69pQCudo4LkneFVUp1UrR6yicTU8Zf9ohZLWumvMi5brH8lOJiiF94cHzesfQz8PiX5f21RwZ5fyRSk9eJhVihcTLrjOwCuryXpy8eCfk12bgcsZoJd8Uqo001XTsv2CEn59uaGn2br4CxSXJgjxKE1mO4IcPPAx3qJbVknaP0MXdOjj9S8oRjquDle4RxlVnmqsozm4oTbjtFSuKIh8g4zPzdyuVH7I71s87xWxAhcppky6GOB2i4NDz4SZI6SeG3Icpu6ZuJZGeZ6CWb61\\nThe
|
||||
current task_id is 1111.\\nnBRNtp3FaBNfBMohNkyAxrVJ7UFF0lyth8Xizm1jrCOREOGRnWWMQ6olKtKX9niq0wOBTQN62ToY4cQPiQk3YN9fFCGmxaDX9g81AsqhQnGac72QfL41SnlJvhwgAUB52zTD26FeEvvHdiQ2aNhtMT9obgHNUkONEnO41EOpNI6jdwyLGaactp9GYV1PLbaRP5b1iD9z850KeNYDX9Qwca79isJS8jhuDkf0J7ELkpOH3PkmZkg5MsOYpSTuEc99ENQ522WJzrl9oAySp6IHGiQlsMnQe40FaRAvnQtiaAE24hzkJHT3x13NPGw2ZmJ518y2BtU5x1yQwG21NycSxOBLU17RnY8h6H8L7QGatTlfXjPrA5hsupWEpUT0sSTZYSHa6p3mShARajocbNd3xKFfnUyupNMBPd0EZnMrePoAuvGaK7gDP0cxp9k63lsfzh3udQPnvgmRERQThIOrrDZhZMa4kr1vXAVu07ZuhlNNbBUqZKcHmBYiBnIcDXNUvXuzPFBh9Vowou6MBJHpiNrvKluHy7GBTp7YsEqmGv0lH8wVaWfV0yG7nzrbt7QyhdbtQ0zF2PclXB0LNxV2rRA6FsevqoexFNqSkdgauUrQPHLHhvLNes8gRGMOLAhR8irH1e2jdbLD6dyYEnDJiS9HvlxNgbb19Smlt6MNoJKEAbkSOrrcq5wmo5aho4zEKTujRHqr6feECaTPF7zj69pQCudo4LkneFVUp1UrR6yicTU8Zf9ohZLWumvMi5brH8lOJiiF94cHzesfQz8PiX5f21RwZ5fyRSk9eJhVihcTLrjOwCuryXpy8eCfk12bgcsZoJd8Uqo001XTsv2CEn59uaGn2br4CxSXJgjxKE1mO4IcPPAx3qJbVknaP0MXdOjj9S8oRjquDle4RxlVnmqsozm4oTbjtFSuKIh8g4zPzdyuVH7I71s87xWxAhcppky6GOB2i4NDz4SZI6SeG3Icpu6ZuJZGeZ6CWb61\\nRead
|
||||
the file instructions_2.txt using the read_file command.''}]\n\"\"\"\n"}], "temperature":
|
||||
0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '2993'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA0SP2ZKqSBRFf4XIZ6uCQRl8gxLBCZm01L4RFSkkJgKZmCSTFfffO+yuvv26z1o7
|
||||
zv4GeQrmIMGQJ1VdvmkfG6xkkTakptlBq79GC3OEj8DUumEEE0Cvd5TwH+M9oVVdIp5TAiYgYQhy
|
||||
lIK5pOqKpCm6ok1ARVNUgjm41fxNeZ+98ZZd6ZuoiBKYgLaBNwTm36BmtKr5F6cFIg2YSzPDmID/
|
||||
y/8cNFWfAE45LP9EsqJqvycgwTRPUAPmf32DCjX/FTNaIjAHsGnyhkPCX29SwhF5TVgJPWwEhqqc
|
||||
pCgVaCagDhHeCBmjlVCNQg0bLvQYMST8y/5MFCBJBUI5zslNIKgXMGwEDOsaEZS+CyuBoQTl3QsU
|
||||
ElpVL5xTgSH4SrK8REICyxKlwi+Qk4azNnnNbL6kdz7wX+Bd+PixXsrXPwJDvGUEpXOBWKHHa2UJ
|
||||
LS+zdhR7xWgO7LjWDsulWI4c66f8WUl39rEP7b0Tks/PXaDScsM3J4PkD7HfW3HgqXJMz9Mk8POg
|
||||
UM6ekS0/nGqAi5Nx0yWzeeCAODDR5CDbTqWIlOsO9zfzYM3kZ7yQ1SWyu85N80CGHua72KDXm+sd
|
||||
ir1nk/1Usve1t1LvaT9uHQgTXhvO+Sj52ysM/dlVyhfGU5+JG+SdFycj6BOoGXmzjvQ7bhdFJq41
|
||||
e1vUe1fxi+pS3Ga7Zn+uo7i1E8OwvWAmy5/rJysNao5Rra5cJw/KZkcCNBWXMDQ7EvAcmrY8xc9i
|
||||
7cbKICme7/TypVrPJH2ULX6YDdIY9I4seWMSDXtre5C0kJx1rLr6VgscyOMyO919Zs5w09afdn2I
|
||||
xSaKL+fIhWqtVBE2Q3inydVLlWGzzMhhbGtvZ/mpaF/IjiGfmm3nwI12W/hiMtRGoSplkz2x0qaB
|
||||
T7pbFdphEOPVnrHFBV92cFowqTuZx1bULi0uPe9qHR6XTeJW1jm3yCpZnLxDd2qf/tLCxpH2tFV3
|
||||
1tqtc491m7J1R82x4lo7N/ajcjqxdPX+CD+zozg6GnmyK9eCEadXHojPpewn5ckSt95wlFloqssG
|
||||
dQ+KhqX3iIr0BtsDC3x36+Ju66FGv4XObr81cajnzJWQfE+v24WajmebLNZ5ZLhdOXh/U1Enu43C
|
||||
AABA7/0KxLmqYmoI9JahZCFJoUAIQqkqFhMMwQYbN4CUfx+hGWk093d+1zQFht/ceu34Qe29tUpr
|
||||
32Es69R7Q9WkpHCy9oGovG3HtAJZZhK46+VUaUb7aYqcwkNN0Do8teDEPG3EWXDS48KgZXw4i+bn
|
||||
iNWUbfWbY2O8NmC2nRAvPifdxZFaKMC7x2oxen5tILsMcZkFB1Y5d1OwMWpHHZlFDZT0mvGY2rl+
|
||||
6uhiAaKA/yimRVRDJBuipAyagx/Z12rYW6Bx4C5z3dXw2tlpWJPEXRyj3Kkqw9epV3Xi/YagN9xC
|
||||
0nScTg2kQVr1a1/sd6V+hZM75aMIt8vdEnB9OZyHVZm1bT1qG+eXguHH+wT9eKf5aPO6y1qhxcKO
|
||||
NyjWzHOqgcuFBCWSMsEYIr3UJ7z+xrmEuQQAAC+XC/HmUPoS/Snlv0mUeRJJ8HmoWfyL5G9IL/Lj
|
||||
WS4wwbz8ZijhlMhvMu9pKz/L84eD/LZ4fD2efgMAAP//AwBFj86OIQYAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c1deee2a900cf11-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 May 2023 04:18:08 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0301
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '50954'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '89269'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 486ms
|
||||
x-request-id:
|
||||
- 7fc0cf3761e93963c985d5beba15fe80
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You
|
||||
are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file
|
||||
using the read_file method and follow the instructions in the file.\nYour decisions
|
||||
must always be made independently without seeking user assistance. Play to your
|
||||
strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1.
|
||||
Use the command read_file to read the instructions_1.txt file\n2. Follow the
|
||||
instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word
|
||||
limit for short term memory. Your short term memory is short, so immediately
|
||||
save important information to files.\n2. If you are unsure how you previously
|
||||
did something or want to recall past events, thinking about similar events will
|
||||
help you remember.\n3. No user assistance\n4. Exclusively use the commands listed
|
||||
in double quotes e.g. \"command name\"\n\nCommands:\n1. append_to_file: Append
|
||||
to file, args: \"filename\": \"<filename>\", \"text\": \"<text>\"\n2. delete_file:
|
||||
Delete file, args: \"filename\": \"<filename>\"\n3. list_files: List Files in
|
||||
Directory, args: \"directory\": \"<directory>\"\n4. read_file: Read file, args:
|
||||
\"filename\": \"<filename>\"\n5. write_to_file: Write to file, args: \"filename\":
|
||||
\"<filename>\", \"text\": \"<text>\"\n6. delete_agent: Delete GPT Agent, args:
|
||||
\"key\": \"<key>\"\n7. get_hyperlinks: Get text summary, args: \"url\": \"<url>\"\n8.
|
||||
get_text_summary: Get text summary, args: \"url\": \"<url>\", \"question\":
|
||||
\"<question>\"\n9. list_agents: List GPT Agents, args: () -> str\n10. message_agent:
|
||||
Message GPT Agent, args: \"key\": \"<key>\", \"message\": \"<message>\"\n11.
|
||||
start_agent: Start GPT Agent, args: \"name\": \"<name>\", \"task\": \"<short_task_desc>\",
|
||||
\"prompt\": \"<prompt>\"\n12. Task Complete (Shutdown): \"task_complete\", args:
|
||||
\"reason\": \"<reason>\"\n\nResources:\n1. Internet access for searches and
|
||||
information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered
|
||||
Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1.
|
||||
Continuously review and analyze your actions to ensure you are performing to
|
||||
the best of your abilities.\n2. Constructively self-criticize your big-picture
|
||||
behavior constantly.\n3. Reflect on past decisions and strategies to refine
|
||||
your approach.\n4. Every command has a cost, so be smart and efficient. Aim
|
||||
to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou
|
||||
should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\":
|
||||
{\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\":
|
||||
\"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\":
|
||||
\"constructive self-criticism\",\n \"speak\": \"thoughts summary to say
|
||||
to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\":
|
||||
{\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response
|
||||
can be parsed by Python json.loads"}, {"role": "system", "content": "The current
|
||||
time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This
|
||||
reminds you of these events from your past: \nI was reminded of events from
|
||||
my past where I was created and nothing new has happened. I received a command
|
||||
to read a file called \"instructions_1.txt\". Command read_file returned: nBRNtp3FaBNfBMohNkyAxrVJ7UFF0lyth8Xizm1jrCOREOGRnWWMQ6olKtKX9niq0wOBTQN62ToY4cQPiQk3YN9fFCGmxaDX9g81AsqhQnGac72QfL41SnlJvhwgAUB52zTD26FeEvvHdiQ2aNhtMT9obgHNUkONEnO41EOpNI6jdwyLGaactp9GYV1PLbaRP5b1iD9z850KeNYDX9Qwca79isJS8jhuDkf0J7ELkpOH3PkmZkg5MsOYpSTuEc99ENQ522WJzrl9oAySp6IHGiQlsMnQe40FaRAvnQtiaAE24hzkJHT3x13NPGw2ZmJ518y2BtU5x1yQwG21NycSxOBLU17RnY8h6H8L7QGatTlfXjPrA5hsupWEpUT0sSTZYSHa6p3mShARajocbNd3xKFfnUyupNMBPd0EZnMrePoAuvGaK7gDP0cxp9k63lsfzh3udQPnvgmRERQThIOrrDZhZMa4kr1vXAVu07ZuhlNNbBUqZKcHmBYiBnIcDXNUvXuzPFBh9Vowou6MBJHpiNrvKluHy7GBTp7YsEqmGv0lH8wVaWfV0yG7nzrbt7QyhdbtQ0zF2PclXB0LNxV2rRA6FsevqoexFNqSkdgauUrQPHLHhvLNes8gRGMOLAhR8irH1e2jdbLD6dyYEnDJiS9HvlxNgbb19Smlt6MNoJKEAbkSOrrcq5wmo5aho4zEKTujRHqr6feECaTPF7zj69pQCudo4LkneFVUp1UrR6yicTU8Zf9ohZLWumvMi5brH8lOJiiF94cHzesfQz8PiX5f21RwZ5fyRSk9eJhVihcTLrjOwCuryXpy8eCfk12bgcsZoJd8Uqo001XTsv2CEn59uaGn2br4CxSXJgjxKE1mO4IcPPAx3qJbVknaP0MXdOjj9S8oRjquDle4RxlVnmqsozm4oTbjtFSuKIh8g4zPzdyuVH7I71s87xWxAhcppky6GOB2i4NDz4SZI6SeG3Icpu6ZuJZGeZ6CWb61\\nThe
|
||||
current task_id is 1111.\\nRead the file instructions_2.txt using the read_file
|
||||
command."}, {"role": "user", "content": "Determine which next command to use,
|
||||
and respond using the format specified above:"}, {"role": "assistant", "content":
|
||||
"{\n \"thoughts\": {\n \"text\": \"The instructions_2.txt file contains
|
||||
a list of files in the directory. I should use the list_files command to list
|
||||
the files in the directory.\",\n \"reasoning\": \"The instructions_2.txt
|
||||
file contains the necessary information to determine the next steps.\",\n \"plan\":
|
||||
\"- Use the list_files command to list the files in the directory\",\n \"criticism\":
|
||||
\"I need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\":
|
||||
\"I will use the list_files command to list the files in the directory.\"\n },\n \"command\":
|
||||
{\n \"name\": \"list_files\",\n \"args\": {\n \"directory\":
|
||||
\".\"\n }\n }\n}"}, {"role": "system", "content": "Command list_files
|
||||
returned: [''file_logger.txt'', ''instructions_3.txt'', ''instructions_2.txt'',
|
||||
''instructions_1.txt'', ''auto-gpt.json'']"}, {"role": "user", "content": "Determine
|
||||
which next command to use, and respond using the format specified above:"},
|
||||
{"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\":
|
||||
\"The instructions_3.txt file contains a command to write a message to a GPT
|
||||
agent. I should use the message_agent command to send the message to the agent.\",\n \"reasoning\":
|
||||
\"The instructions_3.txt file contains the necessary information to determine
|
||||
the next steps.\",\n \"plan\": \"- Use the message_agent command to send
|
||||
the message to the GPT agent\",\n \"criticism\": \"I need to ensure that
|
||||
I am following the instructions accurately and efficiently.\",\n \"speak\":
|
||||
\"I will use the message_agent command to send the message to the GPT agent.\"\n },\n \"command\":
|
||||
{\n \"name\": \"message_agent\",\n \"args\": {\n \"key\":
|
||||
\"1111\",\n \"message\": \"Hello, GPT agent. How are you doing today?\"\n }\n }\n}"},
|
||||
{"role": "system", "content": "Command message_agent returned: Error: 1111"},
|
||||
{"role": "user", "content": "Determine which next command to use, and respond
|
||||
using the format specified above:"}], "temperature": 0, "max_tokens": 0}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '6797'
|
||||
Content-Type:
|
||||
- application/json
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"error\": {\n \"message\": \"This model's maximum context
|
||||
length is 4097 tokens. However, you requested 4263 tokens (1959 in the messages,
|
||||
2304 in the completion). Please reduce the length of the messages or completion.\",\n
|
||||
\ \"type\": \"invalid_request_error\",\n \"param\": \"messages\",\n \"code\":
|
||||
\"context_length_exceeded\"\n }\n}\n"
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 7c1df021df6dcf11-SJC
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '330'
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 May 2023 04:18:08 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400, h3-29=":443"; ma=86400
|
||||
openai-organization:
|
||||
- user-adtx4fhfg1qsiyzdoaxciooj
|
||||
openai-processing-ms:
|
||||
- '9'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '3500'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '90000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '3499'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '86161'
|
||||
x-ratelimit-reset-requests:
|
||||
- 17ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 2.558s
|
||||
x-request-id:
|
||||
- 253fb09cdd7d08549b44d6d70094bfca
|
||||
status:
|
||||
code: 400
|
||||
message: Bad Request
|
||||
version: 1
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user