diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index 8fd0bfa3b3..ff0d907421 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -100,6 +100,10 @@ jobs: sudo apt-get autoremove sudo apt-get autoclean pip install tox + # install Protobuf compiler + wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-linux-x86_64.zip + unzip protoc-3.11.4-linux-x86_64.zip -d protoc + sudo mv protoc/bin/protoc /usr/local/bin/protoc # install IPFS sudo apt-get install -y wget wget -O ./go-ipfs.tar.gz https://dist.ipfs.io/go-ipfs/v0.6.0/go-ipfs_v0.6.0_linux-amd64.tar.gz @@ -116,9 +120,27 @@ jobs: run: tox -e package_version_checks - name: Check package dependencies run: tox -e package_dependencies_checks + - name: Check generate protocols + run: tox -e check_generate_all_protocols - name: Generate Documentation run: tox -e docs + common_checks_5: + continue-on-error: False + runs-on: ubuntu-latest + timeout-minutes: 10 + if: github.event_name == 'pull_request' && github.head_ref == 'master' + steps: + - uses: actions/checkout@master + - uses: actions/setup-python@master + with: + python-version: 3.6 + - name: Check Docs links + run: tox -e check_doc_links + - name: Check API Docs updated + run: tox -e check_api_docs + + integration_checks: continue-on-error: True needs: @@ -142,7 +164,10 @@ jobs: sudo apt-get autoremove sudo apt-get autoclean pip install tox - sudo apt-get install -y protobuf-compiler + # install Protobuf compiler + wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-linux-x86_64.zip + unzip protoc-3.11.4-linux-x86_64.zip -d protoc + sudo mv protoc/bin/protoc /usr/local/bin/protoc - name: Sync AEA loop integration tests run: | tox -e py3.8 -- -m 'sync' # --aea-loop sync @@ -171,9 +196,6 @@ jobs: pip install tox - name: Integration tests run: tox -e py3.8 -- -m 'integration and not unstable and ledger' - continue-on-error: true - - name: Force green exit - run: exit 0 platform_checks: continue-on-error: True @@ -203,14 +225,23 @@ jobs: sudo apt-get autoremove sudo apt-get autoclean pip install tox - sudo apt-get install -y protobuf-compiler + # install Protobuf compiler + wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-linux-x86_64.zip + unzip protoc-3.11.4-linux-x86_64.zip -d protoc + sudo mv protoc/bin/protoc /usr/local/bin/protoc + # sudo apt-get install -y protobuf-compiler # use sudo rm /var/lib/apt/lists/lock above in line above update if dependency install failures persist # use sudo apt-get dist-upgrade above in line below update if dependency install failures persist - if: matrix.os == 'macos-latest' name: Install dependencies (macos-latest) run: | pip install tox - brew install protobuf + brew install gcc + # brew install protobuf + # brew install https://raw.githubusercontent.com/Homebrew/homebrew-core/72457f0166d5619a83f508f2345b22d0617b5021/Formula/protobuf.rb + wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-osx-x86_64.zip + unzip protoc-3.11.4-osx-x86_64.zip -d protoc + sudo mv protoc/bin/protoc /usr/local/bin/protoc - if: matrix.os == 'windows-latest' name: Install dependencies (windows-latest) env: @@ -218,7 +249,11 @@ jobs: run: | pip install tox echo "::add-path::C:\Program Files (x86)\Windows Kits\10\bin\10.0.18362.0\x64" - choco install protoc + choco install protoc --version 3.11.4 + choco install mingw -y + # wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-win64.zip + # unzip protoc-3.11.4-win64.zip -d protoc + # sudo mv protoc/bin/protoc /usr/local/bin/protoc python scripts/update_symlinks_cross_platform.py - if: matrix.os == 'ubuntu-latest' || matrix.python_version != '3.9' name: Unit tests @@ -248,7 +283,9 @@ jobs: sudo apt-get autoremove sudo apt-get autoclean pip install tox - sudo apt-get install -y protobuf-compiler + wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-linux-x86_64.zip + unzip protoc-3.11.4-linux-x86_64.zip -d protoc + sudo mv protoc/bin/protoc /usr/local/bin/protoc - name: Unit tests with sync agent loop run: | tox -e py3.8 -- --aea-loop sync -m 'not integration and not unstable' @@ -302,7 +339,11 @@ jobs: sudo apt-get autoremove sudo apt-get autoclean pip install tox - sudo apt-get install -y protobuf-compiler + # install Protobuf compiler + wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-linux-x86_64.zip + unzip protoc-3.11.4-linux-x86_64.zip -d protoc + sudo mv protoc/bin/protoc /usr/local/bin/protoc + # sudo apt-get install -y protobuf-compiler - name: Run all tests run: tox -e py3.7-cov -- --ignore=tests/test_docs --ignore=tests/test_examples --ignore=tests/test_packages/test_contracts --ignore=tests/test_packages/test_skills_integration -m 'not unstable' continue-on-error: true diff --git a/.gitignore b/.gitignore index bd6f55c3fb..d736bd090b 100644 --- a/.gitignore +++ b/.gitignore @@ -122,6 +122,8 @@ output_file !packages/fetchai/contracts/erc1155/build !packages/fetchai/contracts/staking_erc20/build !packages/fetchai/contracts/oracle/build +!packages/fetchai/contracts/oracle_client/build +!packages/fetchai/contracts/fet_erc20/build packages/fetchai/connections/p2p_libp2p/libp2p_node !tests/data/dummy_contract/build diff --git a/.pylintrc b/.pylintrc index 4a1d62c80a..b8dae00c48 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,5 +1,5 @@ [MASTER] -ignore-patterns=serialization.py,message.py,__main__.py,.*_pb2.py +ignore-patterns=serialization.py,message.py,__main__.py,.*_pb2.py,tac.sh,tac_local.sh [MESSAGES CONTROL] disable=C0103,C0201,C0301,C0302,C0330,W0105,W0107,W0707,W1202,W1203,R0902,R0913,R0914,R0801,R0911,R0912,R0901,R0916,R1702,R0915 @@ -30,7 +30,7 @@ disable=C0103,C0201,C0301,C0302,C0330,W0105,W0107,W0707,W1202,W1203,R0902,R0913, # R0801: similar lines, # too granular [IMPORTS] -ignored-modules=aiohttp,defusedxml,gym,fetch,matplotlib,memory_profiler,numpy,oef,openapi_core,psutil,tensorflow,temper,skimage,vyper,web3 +ignored-modules=aiohttp,defusedxml,gym,fetch,matplotlib,memory_profiler,numpy,oef,openapi_core,psutil,tensorflow,temper,skimage,vyper,web3,aioprometheus [DESIGN] min-public-methods=1 diff --git a/AUTHORS.md b/AUTHORS.md index e6ed8ddf47..512d6bb8c6 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -13,3 +13,4 @@ This is the official list of Fetch.AI authors for copyright purposes. * Lokman Rahmani [lrahmani](https://github.com/lrahmani) * Jiří Vestfál [MissingNO57](https://github.com/MissingNO57) * Ed Fitzgerald [ejfitzgerald](https://github.com/ejfitzgerald) +* James Riehl [jrriehl](https://github.com/jrriehl) diff --git a/HISTORY.md b/HISTORY.md index 990ff7526a..05c08dc08e 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,31 @@ # Release History +## 0.8.0 (2020-12-17) + +- Adds support for protocol dialogue rules validation +- Fixes url forwarding in http server connection +- Revises protocols to correctly define terminal states +- Adds a build command +- Adds build command support for libp2p connection +- Adds multiple fixes to libp2p connection +- Adds prometheus connection and protocol +- Adds tests for confirmation AW1 skill +- Adds oracle demo docs +- Replaces pickle with protobuf in all protocols +- Refactors oef models to account for semantic irregularities +- Updates docs for demos relying on Ganache +- Adds generic storage support +- Adds configurable dialogue offloading +- Fixes transaction generation on confirmation bugs +- Fixes transaction processing order in all buyer skills +- Extends ledger api protocol to query ledger state +- Adds remove-key command in CLI +- Multiple tac stability fixes +- Adds support for configurable error handler +- Multiple additional tests to improve stability +- Multiple docs updates based on user feedback +- Multiple additional tests and test stability fixes + ## 0.7.5 (2020-11-25) - Adds AW3 AEAs diff --git a/MANIFEST.in b/MANIFEST.in index 7a267950b3..cf69a128e5 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ include README.md LICENSE HISTORY.md AUTHORS.md SECURITY.md CODE_OF_CONDUCT.md Pipfile mkdocs.yml tox.ini pytest.ini strategy.ini -recursive-include aea *.json *.yaml *.proto *.ico *png *.html *.js *.css *.md *.cfg +recursive-include aea *.json *.yaml *.proto *.ico *png *.html *.js *.css *.md *.cfg json1.dll recursive-include docs * recursive-include examples * recursive-include packages * diff --git a/Makefile b/Makefile index f5b76372f5..85b147da70 100644 --- a/Makefile +++ b/Makefile @@ -78,17 +78,17 @@ common_checks: security misc_checks lint static docs .PHONY: test test: - pytest -rfE --doctest-modules aea packages/fetchai/protocols packages/fetchai/connections packages/fetchai/skills/generic_buyer packages/fetchai/skills/generic_seller packages/fetchai/skills/tac_control packages/fetchai/skills/tac_control_contract packages/fetchai/skills/tac_participation packages/fetchai/skills/tac_negotiation packages/fetchai/skills/simple_buyer packages/fetchai/skills/simple_data_request packages/fetchai/skills/simple_seller packages/fetchai/skills/simple_service_registration packages/fetchai/skills/simple_service_search tests/ --cov-report=html --cov-report=xml --cov-report=term-missing --cov-report=term --cov=aea --cov=packages/fetchai/protocols --cov=packages/fetchai/connections --cov=packages/fetchai/skills/generic_buyer --cov=packages/fetchai/skills/generic_seller --cov=packages/fetchai/skills/tac_control --cov=packages/fetchai/skills/tac_control_contract --cov=packages/fetchai/skills/tac_participation --cov=packages/fetchai/skills/tac_negotiation --cov=packages/fetchai/skills/simple_buyer --cov=packages/fetchai/skills/simple_data_request --cov=packages/fetchai/skills/simple_seller --cov=packages/fetchai/skills/simple_service_registration --cov=packages/fetchai/skills/simple_service_search --cov-config=.coveragerc + pytest -rfE --doctest-modules aea packages/fetchai/protocols packages/fetchai/connections packages/fetchai/skills/confirmation_aw1 packages/fetchai/skills/generic_buyer packages/fetchai/skills/generic_seller packages/fetchai/skills/tac_control packages/fetchai/skills/tac_control_contract packages/fetchai/skills/tac_participation packages/fetchai/skills/tac_negotiation packages/fetchai/skills/simple_buyer packages/fetchai/skills/simple_data_request packages/fetchai/skills/simple_seller packages/fetchai/skills/simple_service_registration packages/fetchai/skills/simple_service_search tests/ --cov-report=html --cov-report=xml --cov-report=term-missing --cov-report=term --cov=aea --cov=packages/fetchai/protocols --cov=packages/fetchai/connections --cov=packages/fetchai/skills/confirmation_aw1 --cov=packages/fetchai/skills/generic_buyer --cov=packages/fetchai/skills/generic_seller --cov=packages/fetchai/skills/tac_control --cov=packages/fetchai/skills/tac_control_contract --cov=packages/fetchai/skills/tac_participation --cov=packages/fetchai/skills/tac_negotiation --cov=packages/fetchai/skills/simple_buyer --cov=packages/fetchai/skills/simple_data_request --cov=packages/fetchai/skills/simple_seller --cov=packages/fetchai/skills/simple_service_registration --cov=packages/fetchai/skills/simple_service_search --cov-config=.coveragerc find . -name ".coverage*" -not -name ".coveragerc" -exec rm -fr "{}" \; .PHONY: test-sub test-sub: - pytest -rfE --doctest-modules aea packages/fetchai/connections packages/fetchai/protocols packages/fetchai/skills/generic_buyer packages/fetchai/skills/generic_seller packages/fetchai/skills/tac_control packages/fetchai/skills/tac_control_contract packages/fetchai/skills/tac_participation packages/fetchai/skills/tac_negotiation tests/test_$(tdir) --cov=aea.$(dir) --cov-report=html --cov-report=xml --cov-report=term-missing --cov-report=term --cov-config=.coveragerc + pytest -rfE --doctest-modules aea packages/fetchai/connections packages/fetchai/protocols packages/fetchai/skills/confirmation_aw1 packages/fetchai/skills/generic_buyer packages/fetchai/skills/generic_seller packages/fetchai/skills/tac_control packages/fetchai/skills/tac_control_contract packages/fetchai/skills/tac_participation packages/fetchai/skills/tac_negotiation tests/test_$(tdir) --cov=aea.$(dir) --cov-report=html --cov-report=xml --cov-report=term-missing --cov-report=term --cov-config=.coveragerc find . -name ".coverage*" -not -name ".coveragerc" -exec rm -fr "{}" \; .PHONY: test-sub-p test-sub-p: - pytest -rfE --doctest-modules aea packages/fetchai/connections packages/fetchai/protocols packages/fetchai/skills/generic_buyer packages/fetchai/skills/generic_seller packages/fetchai/skills/tac_control packages/fetchai/skills/tac_control_contract packages/fetchai/skills/tac_participation packages/fetchai/skills/tac_negotiation packages/fetchai/skills/simple_buyer packages/fetchai/skills/simple_data_request packages/fetchai/skills/simple_seller packages/fetchai/skills/simple_service_registration packages/fetchai/skills/simple_service_search tests/test_packages/test_$(tdir) --cov=packages.fetchai.$(dir) --cov-report=html --cov-report=xml --cov-report=term-missing --cov-report=term --cov-config=.coveragerc + pytest -rfE --doctest-modules aea packages/fetchai/connections packages/fetchai/protocols packages/fetchai/skills/confirmation_aw1 packages/fetchai/skills/generic_buyer packages/fetchai/skills/generic_seller packages/fetchai/skills/tac_control packages/fetchai/skills/tac_control_contract packages/fetchai/skills/tac_participation packages/fetchai/skills/tac_negotiation packages/fetchai/skills/simple_buyer packages/fetchai/skills/simple_data_request packages/fetchai/skills/simple_seller packages/fetchai/skills/simple_service_registration packages/fetchai/skills/simple_service_search tests/test_packages/test_$(tdir) --cov=packages.fetchai.$(dir) --cov-report=html --cov-report=xml --cov-report=term-missing --cov-report=term --cov-config=.coveragerc find . -name ".coverage*" -not -name ".coveragerc" -exec rm -fr "{}" \; diff --git a/Pipfile b/Pipfile index 72441c9351..f1bf26357f 100644 --- a/Pipfile +++ b/Pipfile @@ -10,6 +10,7 @@ name = "test-pypi" [dev-packages] aiohttp = "==3.6.2" +aioprometheus = "==20.0.1" bandit = "==1.6.2" black = "==19.10b0" bs4 = "==0.0.1" diff --git a/README.md b/README.md index ac1e8e630d..029f7e1453 100644 --- a/README.md +++ b/README.md @@ -121,6 +121,8 @@ You can have more control on the installed dependencies by leveraging the setupt The following dependency is **only relevant if you intend to contribute** to the repository: +- All Pull Requests should be opened against the `develop` branch. Do **not** open a Pull Request against `master`! + - The project uses [Google Protocol Buffers](https://developers.google.com/protocol-buffers/) compiler for message serialization. A guide on how to install it is found [here](https://fetchai.github.io/oef-sdk-python/user/install.html#protobuf-compiler). The following steps are **only relevant if you intend to contribute** to the repository. They are **not required** for agent development. @@ -137,34 +139,17 @@ The following steps are **only relevant if you intend to contribute** to the rep pip install -e ".[all]" -- To run tests: - - tox -e py3.7 - -- To run linters (code style checks): - - tox -e flake8 - tox -e pylint - -- To run static type checks: - - tox -e mypy - -- To run black code formatter: - - tox -e black +- To run tests: `tox -e py3.7` or `make test`. To only test specific modules try `make dir=PATH_TO_MODULE tdir=PATH_TO_TESTS test-sub` where (e.g. `make dir=cli tdir=cli test-sub`). -- To run isort code formatter: +- To run linters (code style checks) and code formatters: `tox -e flake8` and `tox -e black` and ` tox -e isort` or `make lint` - tox -e isort +- To run static type checks: `tox -e mypy` or `make static` -- To run bandit security checks: +- To run pylint: `tox -e pylint` or `make pylint` - tox -e bandit +- To run security checks: `tox -e bandit` and `tox -e safety` or `make security` -- To start a live-reloading docs server on localhost - - mkdocs serve +- To start a live-reloading docs server on localhost: `mkdocs serve` - To amend the docs, create a new documentation file in `docs/` and add a reference to it in `mkdocs.yml`. @@ -179,7 +164,7 @@ consider to cite it with the following BibTex entry: ``` @misc{agents-aea, - Author = {Marco Favorito and David Minarsch and Ali Hosseini and Aristotelis Triantafyllidis and Diarmid Campbell and Oleg Panasevych and Kevin Chen and Yuri Turchenkov and Lokman Rahmani and Jiří Vestfál}, + Author = {Marco Favorito and David Minarsch and Ali Hosseini and Aristotelis Triantafyllidis and Diarmid Campbell and Oleg Panasevych and Kevin Chen and Yuri Turchenkov and Lokman Rahmani and Jiří Vestfál and James Riehl}, Title = {Autonomous Economic Agent (AEA) Framework}, Year = {2019}, } diff --git a/SECURITY.md b/SECURITY.md index 17466bb4f4..a334d82966 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -8,8 +8,8 @@ The following table shows which versions of `aea` are currently being supported | Version | Supported | | ------- | ------------------ | -| 0.3.x | :white_check_mark: | -| < 0.3.0 | :x: | +| 0.7.x | :white_check_mark: | +| < 0.7.0 | :x: | ## Reporting a Vulnerability diff --git a/aea/__version__.py b/aea/__version__.py index 768ec73fe5..c9dbe40854 100644 --- a/aea/__version__.py +++ b/aea/__version__.py @@ -22,7 +22,7 @@ __title__ = "aea" __description__ = "Autonomous Economic Agent framework" __url__ = "https://github.com/fetchai/agents-aea.git" -__version__ = "0.7.5" +__version__ = "0.8.0" __author__ = "Fetch.AI Limited" __license__ = "Apache-2.0" __copyright__ = "2019 Fetch.AI Limited" diff --git a/aea/abstract_agent.py b/aea/abstract_agent.py index cb3e617dc1..4ea38998cb 100644 --- a/aea/abstract_agent.py +++ b/aea/abstract_agent.py @@ -103,6 +103,10 @@ def get_multiplexer_setup_options(self) -> Optional[Dict]: def connections(self) -> List[Connection]: """Return list of connections.""" + @abstractproperty + def storage_uri(self) -> Optional[str]: + """Return storage uri.""" + @abstractmethod def exception_handler( self, exception: Exception, function: Callable diff --git a/aea/aea.py b/aea/aea.py index 300b643273..94457df1f2 100644 --- a/aea/aea.py +++ b/aea/aea.py @@ -16,7 +16,6 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - """This module contains the implementation of an autonomous economic agent (AEA).""" import datetime from asyncio import AbstractEventLoop @@ -44,10 +43,11 @@ from aea.crypto.ledger_apis import DEFAULT_CURRENCY_DENOMINATIONS from aea.crypto.wallet import Wallet from aea.decision_maker.base import DecisionMakerHandler +from aea.error_handler.base import AbstractErrorHandler +from aea.error_handler.default import ErrorHandler as DefaultErrorHandler from aea.exceptions import AEAException, _StopRuntime from aea.helpers.exception_policy import ExceptionPolicyEnum from aea.helpers.logging import AgentLoggerAdapter, get_logger -from aea.helpers.temp_error_handler import ErrorHandler from aea.identity.base import Identity from aea.mail.base import Envelope from aea.protocols.base import Message, Protocol @@ -65,6 +65,8 @@ class AEA(Agent): } DEFAULT_RUN_LOOP: str = "async" + DEFAULT_BUILD_DIR_NAME = ".build" + def __init__( self, identity: Identity, @@ -74,6 +76,7 @@ def __init__( period: float = 0.05, execution_timeout: float = 0, max_reactions: int = 20, + error_handler_class: Optional[Type[AbstractErrorHandler]] = None, decision_maker_handler_class: Optional[Type[DecisionMakerHandler]] = None, skill_exception_policy: ExceptionPolicyEnum = ExceptionPolicyEnum.propagate, connection_exception_policy: ExceptionPolicyEnum = ExceptionPolicyEnum.propagate, @@ -85,6 +88,7 @@ def __init__( default_routing: Optional[Dict[PublicId, PublicId]] = None, connection_ids: Optional[Collection[PublicId]] = None, search_service_address: str = DEFAULT_SEARCH_SERVICE_ADDRESS, + storage_uri: Optional[str] = None, **kwargs, ) -> None: """ @@ -107,8 +111,8 @@ def __init__( :param default_routing: dictionary for default routing. :param connection_ids: active connection ids. Default: consider all the ones in the resources. :param search_service_address: the address of the search service used. + :param storage_uri: optional uri to set generic storage :param kwargs: keyword arguments to be attached in the agent context namespace. - :return: None """ @@ -126,6 +130,7 @@ def __init__( period=period, loop_mode=loop_mode, runtime_mode=runtime_mode, + storage_uri=storage_uri, logger=cast(Logger, aea_logger), ) @@ -142,6 +147,9 @@ def __init__( ) self.runtime.set_decision_maker(decision_maker_handler) + if error_handler_class is None: + error_handler_class = DefaultErrorHandler + self._error_handler_class = error_handler_class default_ledger_id = ( default_ledger if default_ledger is not None @@ -165,6 +173,8 @@ def __init__( default_routing if default_routing is not None else {}, search_service_address, decision_maker_handler.self_address, + storage_callable=lambda: self._runtime.storage, + build_dir=self.get_build_dir(), **kwargs, ) self._execution_timeout = execution_timeout @@ -176,6 +186,11 @@ def __init__( self._setup_loggers() + @classmethod + def get_build_dir(cls) -> str: + """Get agent build directory.""" + return cls.DEFAULT_BUILD_DIR_NAME + @property def context(self) -> AgentContext: """Get (agent) context.""" @@ -246,11 +261,9 @@ def get_multiplexer_setup_options(self) -> Optional[Dict]: default_connection=self.context.default_connection, ) - @staticmethod - def _get_error_handler() -> Type[ErrorHandler]: + def _get_error_handler(self) -> Type[AbstractErrorHandler]: """Get error handler.""" - handler = ErrorHandler - return handler + return self._error_handler_class def _get_msg_and_handlers_for_envelope( self, envelope: Envelope diff --git a/aea/aea_builder.py b/aea/aea_builder.py index faeb9530a5..18362a51ba 100644 --- a/aea/aea_builder.py +++ b/aea/aea_builder.py @@ -16,18 +16,17 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - - """This module contains utilities for building an AEA.""" - -import itertools +import ast import logging import logging.config import os import pprint +import sys from collections import defaultdict from copy import copy, deepcopy from pathlib import Path +from subprocess import check_call # nosec from typing import Any, Collection, Dict, List, Optional, Set, Tuple, Type, Union, cast import jsonschema @@ -63,6 +62,7 @@ ) from aea.configurations.constants import ( DEFAULT_SKILL, + DOTTED_PATH_MODULE_ELEMENT_SEPARATOR, FETCHAI, PROTOCOLS, SIGNING_PROTOCOL, @@ -76,8 +76,14 @@ from aea.crypto.ledger_apis import DEFAULT_CURRENCY_DENOMINATIONS from aea.crypto.wallet import Wallet from aea.decision_maker.base import DecisionMakerHandler -from aea.exceptions import AEAException, AEAValidationError -from aea.helpers.base import find_topological_order, load_env_file, load_module +from aea.error_handler.base import AbstractErrorHandler +from aea.exceptions import AEAException, AEAValidationError, enforce +from aea.helpers.base import ( + ensure_dir, + find_topological_order, + load_env_file, + load_module, +) from aea.helpers.exception_policy import ExceptionPolicyEnum from aea.helpers.install_dependency import install_dependency from aea.helpers.logging import AgentLoggerAdapter, WithLogger, get_logger @@ -297,7 +303,8 @@ class AEABuilder(WithLogger): # pylint: disable=too-many-public-methods DEFAULT_LOOP_MODE = "async" DEFAULT_RUNTIME_MODE = "threaded" DEFAULT_SEARCH_SERVICE_ADDRESS = _DEFAULT_SEARCH_SERVICE_ADDRESS - + AEA_CLASS = AEA + BUILD_TIMEOUT = 120 loader = ConfigLoader.from_configuration_type(PackageType.AGENT) # pylint: disable=attribute-defined-outside-init @@ -358,6 +365,7 @@ def _reset(self, is_full_reset: bool = False) -> None: if not is_full_reset: return self._default_ledger: Optional[str] = None + self._build_entrypoint: Optional[str] = None self._currency_denominations: Dict[str, str] = {} self._default_connection: Optional[PublicId] = None self._context_namespace: Dict[str, Any] = {} @@ -365,12 +373,14 @@ def _reset(self, is_full_reset: bool = False) -> None: self._execution_timeout: Optional[float] = None self._max_reactions: Optional[int] = None self._decision_maker_handler_class: Optional[Type[DecisionMakerHandler]] = None + self._error_handler_class: Optional[Type[AbstractErrorHandler]] = None self._skill_exception_policy: Optional[ExceptionPolicyEnum] = None self._connection_exception_policy: Optional[ExceptionPolicyEnum] = None self._default_routing: Dict[PublicId, PublicId] = {} self._loop_mode: Optional[str] = None self._runtime_mode: Optional[str] = None self._search_service_address: Optional[str] = None + self._storage_uri: Optional[str] = None self._package_dependency_manager = _DependenciesManager() if self._with_default_packages: @@ -428,7 +438,9 @@ def set_decision_maker_handler( :return: self """ - dotted_path, class_name = decision_maker_handler_dotted_path.split(":") + dotted_path, class_name = decision_maker_handler_dotted_path.split( + DOTTED_PATH_MODULE_ELEMENT_SEPARATOR + ) module = load_module(dotted_path, file_path) try: @@ -444,6 +456,35 @@ def set_decision_maker_handler( return self + def set_error_handler( + self, error_handler_dotted_path: str, file_path: Path + ) -> "AEABuilder": + """ + Set error handler class. + + :param error_handler_dotted_path: the dotted path to the error handler + :param file_path: the file path to the file which contains the error handler + + :return: self + """ + dotted_path, class_name = error_handler_dotted_path.split( + DOTTED_PATH_MODULE_ELEMENT_SEPARATOR + ) + module = load_module(dotted_path, file_path) + + try: + _class = getattr(module, class_name) + self._error_handler_class = _class + except Exception as e: # pragma: nocover + self.logger.error( + "Could not locate error handler for dotted path '{}', class name '{}' and file path '{}'. Error message: {}".format( + dotted_path, class_name, file_path, e + ) + ) + raise # log and re-raise because we should not build an agent from an. invalid configuration + + return self + def set_skill_exception_policy( self, skill_exception_policy: Optional[ExceptionPolicyEnum] ) -> "AEABuilder": # pragma: nocover @@ -509,6 +550,18 @@ def set_runtime_mode( self._runtime_mode = runtime_mode return self + def set_storage_uri( + self, storage_uri: Optional[str] + ) -> "AEABuilder": # pragma: nocover + """ + Set the storage uri. + + :param storage uri: storage uri + :return: self + """ + self._storage_uri = storage_uri + return self + def set_search_service_address( self, search_service_address: str ) -> "AEABuilder": # pragma: nocover @@ -659,6 +712,18 @@ def set_default_ledger( self._default_ledger = identifier return self + def set_build_entrypoint( + self, build_entrypoint: Optional[str] + ) -> "AEABuilder": # pragma: nocover + """ + Set build entrypoint. + + :param build_entrypoint: path to the builder script. + :return: the AEABuilder + """ + self._build_entrypoint = build_entrypoint + return self + def set_currency_denominations( self, currency_denominations: Dict[str, str] ) -> "AEABuilder": # pragma: nocover @@ -691,6 +756,7 @@ def add_component( configuration = load_component_configuration( component_type, directory, skip_consistency_check ) + self._set_component_build_directory(configuration) self._check_can_add(configuration) # update dependency graph self._package_dependency_manager.add_component(configuration) @@ -698,6 +764,24 @@ def add_component( return self + def _set_component_build_directory( + self, configuration: ComponentConfiguration + ) -> None: + """ + Set component build directory, create if not presents. + + :param configuration: component configuration + + :return: None + """ + configuration.build_directory = os.path.join( + self.AEA_CLASS.get_build_dir(), + configuration.component_type.value, + configuration.author, + configuration.name, + ) + ensure_dir(configuration.build_directory) + def add_component_instance(self, component: Component) -> "AEABuilder": """ Add already initialized component object to resources or connections. @@ -819,6 +903,75 @@ def remove_contract(self, public_id: PublicId) -> "AEABuilder": self.remove_component(ComponentId(ComponentType.CONTRACT, public_id)) return self + def call_all_build_entrypoints(self): + """Call all the build entrypoints.""" + for config in self._package_dependency_manager._dependencies.values(): # type: ignore # pylint: disable=protected-access + self.run_build_for_component_configuration(config, logger=self.logger) + + if self._build_entrypoint: + self.logger.info("Building AEA package...") + source_directory = "." + target_directory = os.path.abspath(self.AEA_CLASS.get_build_dir()) + build_entrypoint = cast(str, self._build_entrypoint) + self._run_build_entrypoint( + build_entrypoint, source_directory, target_directory, logger=self.logger + ) + + @classmethod + def run_build_for_component_configuration( + cls, config: ComponentConfiguration, logger: Optional[logging.Logger] = None + ) -> None: + """Run a build entrypoint script for component configuration.""" + if not config.build_entrypoint: + return + + enforce(bool(config.build_directory), f"{config}.build_directory is not set!") + + if not config.build_directory: # pragma: nocover + return + + if logger: + logger.info(f"Building package {config.component_id}...") + + source_directory = cast(str, config.directory) + target_directory = os.path.abspath(config.build_directory) + build_entrypoint = cast(str, config.build_entrypoint) + cls._run_build_entrypoint( + build_entrypoint, source_directory, target_directory, logger=logger + ) + + @classmethod + def _run_build_entrypoint( + cls, + build_entrypoint: str, + source_directory: str, + target_directory: str, + logger: Optional[logging.Logger] = None, + ) -> None: + """ + Run a build entrypoint script. + + :param build_entrypoint: the path to the build script relative to directory. + :param directory: the directory root for the entrypoint path. + :param logger: logger + + :return: None + """ + cls._check_valid_entrypoint(build_entrypoint, source_directory) + + command = [sys.executable, build_entrypoint, target_directory] + command_str = " ".join(command) + if logger: + logger.info(f"Running command '{command_str}'") + try: + check_call( + command, cwd=source_directory, timeout=cls.BUILD_TIMEOUT + ) # nosec + except Exception as e: + raise AEAException( + f"An error occurred while running command '{command_str}': {str(e)}" + ) from e + def _build_identity_from_wallet(self, wallet: Wallet) -> Identity: """ Get the identity associated to a wallet. @@ -936,7 +1089,7 @@ def build(self, connection_ids: Optional[Collection[PublicId]] = None,) -> AEA: crypto_store=wallet.connection_cryptos, ) connection_ids = self._process_connection_ids(connection_ids) - aea = AEA( + aea = self.AEA_CLASS( identity, wallet, resources, @@ -944,6 +1097,7 @@ def build(self, connection_ids: Optional[Collection[PublicId]] = None,) -> AEA: period=self._get_agent_act_period(), execution_timeout=self._get_execution_timeout(), max_reactions=self._get_max_reactions(), + error_handler_class=self._get_error_handler_class(), decision_maker_handler_class=self._get_decision_maker_handler_class(), skill_exception_policy=self._get_skill_exception_policy(), connection_exception_policy=self._get_connection_exception_policy(), @@ -954,6 +1108,7 @@ def build(self, connection_ids: Optional[Collection[PublicId]] = None,) -> AEA: runtime_mode=self._get_runtime_mode(), connection_ids=connection_ids, search_service_address=self._get_search_service_address(), + storage_uri=self._get_storage_uri(), **deepcopy(self._context_namespace), ) self._load_and_add_components( @@ -1002,6 +1157,14 @@ def _get_max_reactions(self) -> int: else self.DEFAULT_MAX_REACTIONS ) + def _get_error_handler_class(self,) -> Optional[Type]: + """ + Return the error handler class. + + :return: error handler class + """ + return self._error_handler_class + def _get_decision_maker_handler_class( self, ) -> Optional[Type[DecisionMakerHandler]]: @@ -1086,6 +1249,14 @@ def _get_runtime_mode(self) -> str: else self.DEFAULT_RUNTIME_MODE ) + def _get_storage_uri(self) -> Optional[str]: + """ + Return the storage uri. + + :return: the storage uri + """ + return self._storage_uri + def _get_search_service_address(self) -> str: """ Return the search service address. @@ -1208,6 +1379,35 @@ def _try_to_load_agent_configuration_file(aea_project_path: Path) -> None: ) ) + @staticmethod + def _check_valid_entrypoint(build_entrypoint: str, directory: str): + """ + Check a configuration has a valid entrypoint. + + :param build_entrypoint: the build entrypoint. + :param directory: the directory from where to start reading the script. + :return: None + """ + enforce( + build_entrypoint is not None, + "Package has not a build entrypoint specified.", + ) + build_entrypoint = cast(str, build_entrypoint) + script_path = Path(directory) / build_entrypoint + enforce( + script_path.exists(), f"File '{build_entrypoint}' does not exists.", + ) + enforce( + script_path.is_file(), f"'{build_entrypoint}' is not a file.", + ) + try: + ast.parse(script_path.read_text()) + except SyntaxError as e: + message = f"{str(e)}: {e.text}" + raise AEAException( + f"The Python script at '{build_entrypoint}' has a syntax error: {message}" + ) from e + def set_from_configuration( self, agent_configuration: AgentConfig, @@ -1226,6 +1426,7 @@ def set_from_configuration( # set name and other configurations self.set_name(agent_configuration.name) self.set_default_ledger(agent_configuration.default_ledger) + self.set_build_entrypoint(agent_configuration.build_entrypoint) self.set_currency_denominations(agent_configuration.currency_denominations) self.set_default_connection(agent_configuration.default_connection) self.set_period(agent_configuration.period) @@ -1235,6 +1436,10 @@ def set_from_configuration( dotted_path = agent_configuration.decision_maker_handler["dotted_path"] file_path = agent_configuration.decision_maker_handler["file_path"] self.set_decision_maker_handler(dotted_path, file_path) + if agent_configuration.error_handler != {}: + dotted_path = agent_configuration.error_handler["dotted_path"] + file_path = agent_configuration.error_handler["file_path"] + self.set_error_handler(dotted_path, file_path) if agent_configuration.skill_exception_policy is not None: self.set_skill_exception_policy( ExceptionPolicyEnum(agent_configuration.skill_exception_policy) @@ -1246,6 +1451,7 @@ def set_from_configuration( self.set_default_routing(agent_configuration.default_routing) self.set_loop_mode(agent_configuration.loop_mode) self.set_runtime_mode(agent_configuration.runtime_mode) + self.set_storage_uri(agent_configuration.storage_uri) # load private keys for ( @@ -1263,65 +1469,19 @@ def set_from_configuration( ledger_identifier, private_key_path, is_connection=True ) - component_ids = itertools.chain( - [ - ComponentId(ComponentType.PROTOCOL, p_id) - for p_id in agent_configuration.protocols - ], - [ - ComponentId(ComponentType.CONTRACT, p_id) - for p_id in agent_configuration.contracts - ], - ) - for component_id in component_ids: - component_path = self.find_component_directory_from_component_id( - aea_project_path, component_id - ) - self.add_component( - component_id.component_type, - component_path, - skip_consistency_check=skip_consistency_check, - ) - - connection_ids = [ - ComponentId(ComponentType.CONNECTION, p_id) - for p_id in agent_configuration.connections - ] - if len(connection_ids) != 0: - connection_import_order = self._find_import_order( - connection_ids, aea_project_path, skip_consistency_check + for component_type in [ + ComponentType.PROTOCOL, + ComponentType.CONTRACT, + ComponentType.CONNECTION, + ComponentType.SKILL, + ]: + self._add_components_of_type( + component_type, + agent_configuration, + aea_project_path, + skip_consistency_check, ) - for connection_id in connection_import_order: - component_path = self.find_component_directory_from_component_id( - aea_project_path, connection_id - ) - self.add_component( - connection_id.component_type, - component_path, - skip_consistency_check=skip_consistency_check, - ) - - skill_ids = [ - ComponentId(ComponentType.SKILL, p_id) - for p_id in agent_configuration.skills - ] - - if len(skill_ids) == 0: - return - - skill_import_order = self._find_import_order( - skill_ids, aea_project_path, skip_consistency_check - ) - for skill_id in skill_import_order: - component_path = self.find_component_directory_from_component_id( - aea_project_path, skill_id - ) - self.add_component( - skill_id.component_type, - component_path, - skip_consistency_check=skip_consistency_check, - ) self._custom_component_configurations = ( agent_configuration.component_configurations ) @@ -1496,7 +1656,6 @@ def _overwrite_custom_configuration(self, configuration: ComponentConfiguration) It deep-copies the configuration, to avoid undesired side-effects. :param configuration: the configuration object. - :param custom_config: the configurations to apply. :return: the new configuration instance. """ new_configuration = deepcopy(configuration) @@ -1506,6 +1665,43 @@ def _overwrite_custom_configuration(self, configuration: ComponentConfiguration) new_configuration.update(custom_config) return new_configuration + def _add_components_of_type( + self, + component_type: ComponentType, + agent_configuration: AgentConfig, + aea_project_path: Path, + skip_consistency_check: bool, + ): + """ + Add components of a given type. + + :param component_type: the type of components to add. + :param agent_configuration: the agent configuration from where to retrieve the components. + :param aea_project_path: path to the AEA project. + :param skip_consistency_check: if true, skip consistency checks. + :return: None + """ + public_ids = getattr(agent_configuration, component_type.to_plural()) + component_ids = [ + ComponentId(component_type, public_id) for public_id in public_ids + ] + if component_type in {ComponentType.PROTOCOL, ComponentType.CONTRACT}: + # if protocols or contracts, import order doesn't matter. + import_order = component_ids + else: + import_order = self._find_import_order( + component_ids, aea_project_path, skip_consistency_check + ) + for component_id in import_order: + component_path = self.find_component_directory_from_component_id( + aea_project_path, component_id + ) + self.add_component( + component_id.component_type, + component_path, + skip_consistency_check=skip_consistency_check, + ) + def make_component_logger( configuration: ComponentConfiguration, agent_name: str, diff --git a/aea/agent.py b/aea/agent.py index 39eaefabd5..3b051a13b8 100644 --- a/aea/agent.py +++ b/aea/agent.py @@ -55,6 +55,7 @@ def __init__( period: float = 1.0, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, + storage_uri: Optional[str] = None, logger: Logger = _default_logger, ) -> None: """ @@ -66,6 +67,7 @@ def __init__( :param period: period to call agent's act :param loop_mode: loop_mode to choose agent run loop. :param runtime_mode: runtime mode to up agent. + :param storage_uri: optional uri to set generic storage :return: None """ @@ -75,6 +77,8 @@ def __init__( self._period = period self._tick = 0 self._runtime_mode = runtime_mode or self.DEFAULT_RUNTIME + self._storage_uri = storage_uri + runtime_cls = self._get_runtime_class() self._runtime: BaseRuntime = runtime_cls( agent=self, loop_mode=loop_mode, loop=loop @@ -88,6 +92,11 @@ def connections(self) -> List[Connection]: """Return list of connections.""" return self._connections + @property + def storage_uri(self) -> Optional[str]: + """Return storage uri.""" + return self._storage_uri + @property def active_connections(self) -> List[Connection]: """Return list of active connections.""" diff --git a/aea/cli/build.py b/aea/cli/build.py new file mode 100644 index 0000000000..bb06c0154c --- /dev/null +++ b/aea/cli/build.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +"""Implementation of the 'aea build' subcommand.""" +from pathlib import Path +from typing import cast + +import click + +from aea.aea_builder import AEABuilder +from aea.cli.utils.context import Context +from aea.cli.utils.decorators import check_aea_project + + +@click.command() +@click.pass_context +@check_aea_project +def build(click_context): + """Build the agent and its components.""" + ctx = cast(Context, click_context.obj) + skip_consistency_check = ctx.config.get("skip_consistency_check", False) + build_aea(skip_consistency_check) + + +def build_aea(skip_consistency_check: bool) -> None: + """ + Build an AEA. + + That is, run the 'build entrypoint' script of each AEA package of the project. + + :param skip_consistency_check: the skip consistency check boolean. + :return: None + """ + try: + builder = AEABuilder.from_aea_project( + Path("."), skip_consistency_check=skip_consistency_check + ) + builder.call_all_build_entrypoints() + except Exception as e: + raise click.ClickException(str(e)) + click.echo("Build completed!") diff --git a/aea/cli/core.py b/aea/cli/core.py index 2942998eaf..901359bed3 100644 --- a/aea/cli/core.py +++ b/aea/cli/core.py @@ -25,6 +25,7 @@ import aea from aea.cli.add import add from aea.cli.add_key import add_key +from aea.cli.build import build from aea.cli.config import config from aea.cli.create import create from aea.cli.delete import delete @@ -49,6 +50,7 @@ from aea.cli.push import push from aea.cli.register import register from aea.cli.remove import remove +from aea.cli.remove_key import remove_key from aea.cli.reset_password import reset_password from aea.cli.run import run from aea.cli.scaffold import scaffold @@ -72,7 +74,7 @@ is_flag=True, required=False, default=False, - help="Skip consistency checks.", + help="Skip consistency checks of agent during command execution.", ) @click.pass_context def cli(click_context, skip_consistency_check: bool) -> None: @@ -118,6 +120,7 @@ def _init_gui() -> None: cli.add_command(_list) cli.add_command(add_key) cli.add_command(add) +cli.add_command(build) cli.add_command(create) cli.add_command(config) cli.add_command(delete) @@ -141,6 +144,7 @@ def _init_gui() -> None: cli.add_command(push) cli.add_command(register) cli.add_command(remove) +cli.add_command(remove_key) cli.add_command(reset_password) cli.add_command(run) cli.add_command(scaffold) diff --git a/aea/cli/fetch.py b/aea/cli/fetch.py index 51dc5e86b1..6cce541104 100644 --- a/aea/cli/fetch.py +++ b/aea/cli/fetch.py @@ -16,9 +16,7 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - """Implementation of the 'aea fetch' subcommand.""" - import os from distutils.dir_util import copy_tree from typing import Optional, cast @@ -43,6 +41,7 @@ PROTOCOL, SKILL, ) +from aea.exceptions import enforce @click.command(name="fetch") @@ -106,7 +105,10 @@ def fetch_agent_locally( source_path = try_get_item_source_path( packages_path, public_id.author, AGENTS, public_id.name ) - + enforce( + ctx.config.get("is_local") is True or ctx.config.get("is_mixed") is True, + "Please use `ctx.set_config('is_local', True)` or `ctx.set_config('is_mixed', True)` to fetch agent and all components locally.", + ) try_to_load_agent_config(ctx, agent_src_path=source_path) if not _is_version_correct(ctx, public_id): raise click.ClickException( @@ -136,7 +138,6 @@ def fetch_agent_locally( ctx.agent_config, open(os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE), "w") ) - # add dependencies _fetch_agent_deps(ctx) click.echo("Agent {} successfully fetched.".format(public_id.name)) diff --git a/aea/cli/generate.py b/aea/cli/generate.py index 77c68819c5..e0e440aaad 100644 --- a/aea/cli/generate.py +++ b/aea/cli/generate.py @@ -112,14 +112,14 @@ def _generate_item(ctx: Context, item_type: str, specification_path: str): ctx.agent_loader.dump( ctx.agent_config, open(os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE), "w") ) - except FileExistsError: # pragma: no cover - raise click.ClickException( + except FileExistsError: + raise click.ClickException( # pragma: no cover "A {} with this name already exists. Please choose a different name and try again.".format( item_type ) ) except ProtocolSpecificationParseError as e: - raise click.ClickException( + raise click.ClickException( # pragma: no cover "The following error happened while parsing the protocol specification: " + str(e) ) diff --git a/aea/cli/generate_wealth.py b/aea/cli/generate_wealth.py index b3161362bd..22896733ef 100644 --- a/aea/cli/generate_wealth.py +++ b/aea/cli/generate_wealth.py @@ -19,7 +19,7 @@ """Implementation of the 'aea generate_wealth' subcommand.""" -from typing import cast +from typing import Optional, cast import click @@ -37,23 +37,27 @@ type=click.Choice(list(faucet_apis_registry.supported_ids)), required=True, ) +@click.argument("url", metavar="URL", type=str, required=False, default=None) @click.option( "--sync", is_flag=True, help="For waiting till the faucet has released the funds." ) @click.pass_context @check_aea_project -def generate_wealth(click_context, sync, type_): +def generate_wealth(click_context, sync, url, type_): """Generate wealth for the agent on a test network.""" ctx = cast(Context, click_context.obj) - _try_generate_wealth(ctx, type_, sync) + _try_generate_wealth(ctx, type_, url, sync) -def _try_generate_wealth(ctx: Context, type_: str, sync: bool) -> None: +def _try_generate_wealth( + ctx: Context, type_: str, url: Optional[str], sync: bool +) -> None: """ Try generate wealth for the provided network identifier. - :param click_context: the click context + :param ctx: the click context :param type_: the network type + :param url: the url :param sync: whether to sync or not :return: None """ @@ -67,7 +71,7 @@ def _try_generate_wealth(ctx: Context, type_: str, sync: bool) -> None: address, testnet ) ) - try_generate_testnet_wealth(type_, address, sync) + try_generate_testnet_wealth(type_, address, url, sync) except ValueError as e: # pragma: no cover raise click.ClickException(str(e)) diff --git a/aea/cli/publish.py b/aea/cli/publish.py index 79c55a44ba..bf6c756223 100644 --- a/aea/cli/publish.py +++ b/aea/cli/publish.py @@ -93,7 +93,7 @@ def _validate_pkp(private_key_paths: CRUDCollection) -> None: """ if private_key_paths.read_all() != []: raise click.ClickException( - "You are not allowed to publish agents with non-empty private_key_paths. Change to `private_key_paths: {}` in `aea-config.yaml`" + "You are not allowed to publish agents with non-empty private_key_paths. Use the `aea remove-key` command to remove key paths from `private_key_paths: {}` in `aea-config.yaml`." ) diff --git a/aea/cli/remove_key.py b/aea/cli/remove_key.py new file mode 100644 index 0000000000..d2c6ffbd43 --- /dev/null +++ b/aea/cli/remove_key.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2020 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""Implementation of the 'aea remove_key' subcommand.""" + +import os +from typing import cast + +import click + +from aea.cli.utils.context import Context +from aea.cli.utils.decorators import check_aea_project +from aea.configurations.constants import DEFAULT_AEA_CONFIG_FILE +from aea.crypto.registries import crypto_registry + + +@click.command() +@click.argument( + "type_", + metavar="TYPE", + type=click.Choice(list(crypto_registry.supported_ids)), + required=True, +) +@click.option( + "--connection", is_flag=True, help="For removing a private key for connections." +) +@click.pass_context +@check_aea_project +def remove_key(click_context, type_, connection): + """Remove a private key from the wallet of the agent.""" + _remove_private_key(click_context, type_, connection) + + +def _remove_private_key( + click_context: click.core.Context, type_: str, connection: bool = False, +) -> None: + """ + Remove private key to the wallet. + + :param click_context: click context object. + :param type_: type. + :param connection: whether or not it is a private key for a connection + + :return: None + """ + ctx = cast(Context, click_context.obj) + _try_remove_key(ctx, type_, connection) + + +def _try_remove_key(ctx: Context, type_: str, connection: bool = False): + private_keys = ( + ctx.agent_config.connection_private_key_paths + if connection + else ctx.agent_config.private_key_paths + ) + existing_keys = private_keys.keys() + if type_ not in existing_keys: + raise click.ClickException( + f"There is no {'connection ' if connection else ''}key registered with id {type_}." + ) + ctx.agent_loader.dump( + ctx.agent_config, open(os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE), "w") + ) diff --git a/aea/cli/scaffold.py b/aea/cli/scaffold.py index 1adeafce6b..449bd675be 100644 --- a/aea/cli/scaffold.py +++ b/aea/cli/scaffold.py @@ -48,6 +48,7 @@ DEFAULT_PROTOCOL_CONFIG_FILE, DEFAULT_SKILL_CONFIG_FILE, DEFAULT_VERSION, + DOTTED_PATH_MODULE_ELEMENT_SEPARATOR, PROTOCOL, SCAFFOLD_PUBLIC_ID, SKILL, @@ -109,6 +110,13 @@ def decision_maker_handler(ctx: Context): _scaffold_dm_handler(ctx) +@scaffold.command() +@pass_ctx +def error_handler(ctx: Context): + """Add an error scaffolding to the configuration file and agent.""" + _scaffold_error_handler(ctx) + + @clean_after def scaffold_item(ctx: Context, item_type: str, item_name: str) -> None: """ @@ -198,35 +206,63 @@ def scaffold_item(ctx: Context, item_type: str, item_name: str) -> None: def _scaffold_dm_handler(ctx: Context): - """Add a scaffolded decision maker handler to the project and configuration.""" - existing_dm_handler = ctx.agent_config.decision_maker_handler + """Scaffold the decision maker handler.""" + _scaffold_non_package_item( + ctx, + "decision_maker_handler", + "decision maker handler", + "DecisionMakerHandler", + "decision_maker", + ) + + +def _scaffold_error_handler(ctx): + """Scaffold the error handler.""" + _scaffold_non_package_item( + ctx, "error_handler", "error handler", "ErrorHandler", "error_handler" + ) + + +def _scaffold_non_package_item( + ctx: Context, item_type: str, type_name: str, class_name: str, aea_dir: str +): + """ + Scaffold a non-package item (e.g. decision maker handler, or error handler). - # check if we already have a decision maker in the project - if existing_dm_handler != {}: + :param ctx: the CLI context. + :param item_type: the item type (e.g. 'decision_maker_handler') + :param type_name: the type name (e.g. "decision maker") + :param class_name: the class name (e.g. "DecisionMakerHandler") + :param aea_dir: the AEA directory that contains the scaffold module + :return: None + """ + existing_item = getattr(ctx.agent_config, item_type) + if existing_item != {}: raise click.ClickException( - "A decision maker handler specification already exists. Aborting..." + f"A {type_name} specification already exists. Aborting..." ) - dest = Path("decision_maker.py") + dest = Path(f"{item_type}.py") agent_name = ctx.agent_config.agent_name - click.echo("Adding decision maker scaffold to the agent '{}'...".format(agent_name)) - + click.echo(f"Adding {type_name} scaffold to the agent '{agent_name}'...") # create the file name - dotted_path = ".decision_maker::DecisionMakerHandler" + dotted_path = f".{item_type}{DOTTED_PATH_MODULE_ELEMENT_SEPARATOR}{class_name}" try: # copy the item package into the agent project. - src = Path(os.path.join(AEA_DIR, "decision_maker", "scaffold.py")) - logger.debug("Copying decision maker. src={} dst={}".format(src, dest)) + src = Path(os.path.join(AEA_DIR, aea_dir, "scaffold.py")) + logger.debug(f"Copying error handler. src={src} dst={dest}") shutil.copyfile(src, dest) # add the item to the configurations. - logger.debug( - "Registering the decision_maker into {}".format(DEFAULT_AEA_CONFIG_FILE) + logger.debug(f"Registering the {type_name} into {DEFAULT_AEA_CONFIG_FILE}") + setattr( + ctx.agent_config, + item_type, + { + "dotted_path": str(dotted_path), + "file_path": str(os.path.join(".", dest)), + }, ) - ctx.agent_config.decision_maker_handler = { - "dotted_path": str(dotted_path), - "file_path": str(os.path.join(".", dest)), - } ctx.agent_loader.dump( ctx.agent_config, open(os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE), "w") ) diff --git a/aea/cli/utils/config.py b/aea/cli/utils/config.py index 1b506f2705..e7fae977d9 100644 --- a/aea/cli/utils/config.py +++ b/aea/cli/utils/config.py @@ -86,6 +86,7 @@ def try_to_load_agent_config( path = Path(os.path.join(agent_src_path, DEFAULT_AEA_CONFIG_FILE)) with path.open(mode="r", encoding="utf-8") as fp: ctx.agent_config = ctx.agent_loader.load(fp) + ctx.agent_config.directory = Path(agent_src_path) logging.config.dictConfig(ctx.agent_config.logging_config) except FileNotFoundError: if is_exit_on_except: diff --git a/aea/cli/utils/generic.py b/aea/cli/utils/generic.py index 8108c68aa7..cb87d7a4ee 100644 --- a/aea/cli/utils/generic.py +++ b/aea/cli/utils/generic.py @@ -16,9 +16,7 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - """Module with generic utils of the aea cli.""" - import os from typing import Dict, List diff --git a/aea/common.py b/aea/common.py index 907be61ca3..107712bb82 100644 --- a/aea/common.py +++ b/aea/common.py @@ -18,4 +18,13 @@ # ------------------------------------------------------------------------------ """This module contains the common types and interfaces used in the aea framework.""" +from typing import Any, Dict, List, Optional, Union + + Address = str + +Primitive = Union[str, int, bool, float] +_JSONDict = Dict[Any, Any] # temporary placeholder +_JSONList = List[Any] # temporary placeholder +_JSONType = Optional[Union[Primitive, _JSONDict, _JSONList]] +JSONLike = Dict[str, _JSONType] diff --git a/aea/components/base.py b/aea/components/base.py index c6099dd34b..0044f97c90 100644 --- a/aea/components/base.py +++ b/aea/components/base.py @@ -16,7 +16,6 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - """This module contains definitions of agent components.""" import importlib.util import logging @@ -106,6 +105,11 @@ def directory(self, path: Path) -> None: raise ValueError("Directory already set.") self._directory = path + @property + def build_directory(self) -> Optional[str]: + """Get build directory for the component.""" + return self.configuration.build_directory + def load_aea_package(configuration: ComponentConfiguration) -> None: """ diff --git a/aea/configurations/base.py b/aea/configurations/base.py index 6fcd39ed77..9b3210b6eb 100644 --- a/aea/configurations/base.py +++ b/aea/configurations/base.py @@ -18,7 +18,6 @@ # ------------------------------------------------------------------------------ """Classes to handle AEA configurations.""" - import functools import pprint import re @@ -562,6 +561,10 @@ def read_all(self) -> List[Tuple[str, T]]: (k, v) for k, v in self._items_by_id.items() ] + def keys(self) -> Set[str]: + """Get the set of keys.""" + return set(self._items_by_id.keys()) + class PublicId(JSONSerializable): """This class implement a public identifier. @@ -990,6 +993,7 @@ def __init__( aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, + build_entrypoint: Optional[str] = None, ): """ Initialize a package configuration. @@ -1004,6 +1008,7 @@ def __init__( The fixed version is interpreted with the specifier '=='. :param fingerprint: the fingerprint. :param fingerprint_ignore_patterns: a list of file patterns to ignore files to fingerprint. + :param build_entrypoint: path to a script to execute at build time. """ super().__init__() if name is None or author is None: # pragma: nocover @@ -1018,6 +1023,7 @@ def __init__( if fingerprint_ignore_patterns is not None else [] ) + self.build_entrypoint = build_entrypoint self.aea_version = aea_version if aea_version != "" else __aea_version__ self._aea_version_specifiers = self._parse_aea_version_specifier(aea_version) @@ -1102,6 +1108,8 @@ def __init__( aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, + build_entrypoint: Optional[str] = None, + build_directory: Optional[str] = None, dependencies: Optional[Dependencies] = None, ): """Set component configuration.""" @@ -1113,8 +1121,20 @@ def __init__( aea_version, fingerprint, fingerprint_ignore_patterns, + build_entrypoint, ) self.pypi_dependencies: Dependencies = dependencies if dependencies is not None else {} + self._build_directory = build_directory + + @property + def build_directory(self) -> Optional[str]: + """Get the component type.""" + return self._build_directory + + @build_directory.setter + def build_directory(self, value: Optional[str]) -> None: + """Get the component type.""" + self._build_directory = value @property def component_type(self) -> ComponentType: @@ -1196,6 +1216,8 @@ def __init__( aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, + build_entrypoint: Optional[str] = None, + build_directory: Optional[str] = None, class_name: str = "", protocols: Optional[Set[PublicId]] = None, connections: Optional[Set[PublicId]] = None, @@ -1236,6 +1258,8 @@ def __init__( aea_version, fingerprint, fingerprint_ignore_patterns, + build_entrypoint, + build_directory, dependencies, ) self.class_name = class_name @@ -1273,7 +1297,7 @@ def is_abstract_component(self) -> bool: @property def json(self) -> Dict: """Return the JSON representation.""" - return OrderedDict( + result = OrderedDict( { "name": self.name, "author": self.author, @@ -1296,6 +1320,11 @@ def json(self) -> Dict: "is_abstract": self.is_abstract, } ) + if self.build_entrypoint: + result["build_entrypoint"] = self.build_entrypoint + if self.build_directory: + result["build_directory"] = self.build_directory + return result @classmethod def from_json(cls, obj: Dict): @@ -1319,6 +1348,8 @@ def from_json(cls, obj: Dict): fingerprint_ignore_patterns=cast( Sequence[str], obj.get("fingerprint_ignore_patterns") ), + build_entrypoint=cast(Optional[str], obj.get("build_entrypoint")), + build_directory=cast(Optional[str], obj.get("build_directory")), class_name=cast(str, obj.get("class_name")), protocols=cast(Set[PublicId], protocols), connections=cast(Set[PublicId], connections), @@ -1360,6 +1391,8 @@ def __init__( license_: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, + build_entrypoint: Optional[str] = None, + build_directory: Optional[str] = None, aea_version: str = "", dependencies: Optional[Dependencies] = None, description: str = "", @@ -1373,6 +1406,8 @@ def __init__( aea_version, fingerprint, fingerprint_ignore_patterns, + build_entrypoint, + build_directory, dependencies, ) self.dependencies = dependencies if dependencies is not None else {} @@ -1381,7 +1416,7 @@ def __init__( @property def json(self) -> Dict: """Return the JSON representation.""" - return OrderedDict( + result = OrderedDict( { "name": self.name, "author": self.author, @@ -1395,6 +1430,11 @@ def json(self) -> Dict: "dependencies": dependencies_to_json(self.dependencies), } ) + if self.build_entrypoint: + result["build_entrypoint"] = self.build_entrypoint + if self.build_directory: + result["build_directory"] = self.build_directory + return result @classmethod def from_json(cls, obj: Dict): @@ -1410,6 +1450,8 @@ def from_json(cls, obj: Dict): fingerprint_ignore_patterns=cast( Sequence[str], obj.get("fingerprint_ignore_patterns") ), + build_entrypoint=cast(Optional[str], obj.get("build_entrypoint")), + build_directory=cast(Optional[str], obj.get("build_directory")), dependencies=dependencies, description=cast(str, obj.get("description", "")), ) @@ -1464,6 +1506,8 @@ def __init__( aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, + build_entrypoint: Optional[str] = None, + build_directory: Optional[str] = None, connections: Optional[Set[PublicId]] = None, protocols: Optional[Set[PublicId]] = None, contracts: Optional[Set[PublicId]] = None, @@ -1481,6 +1525,8 @@ def __init__( aea_version, fingerprint, fingerprint_ignore_patterns, + build_entrypoint, + build_directory, dependencies, ) self.connections = connections if connections is not None else set() @@ -1550,6 +1596,10 @@ def json(self) -> Dict: "is_abstract": self.is_abstract, } ) + if self.build_entrypoint: + result["build_entrypoint"] = self.build_entrypoint + if self.build_directory: + result["build_directory"] = self.build_directory return result @classmethod @@ -1564,6 +1614,7 @@ def from_json(cls, obj: Dict): fingerprint_ignore_patterns = cast( Sequence[str], obj.get("fingerprint_ignore_patterns") ) + build_entrypoint = cast(Optional[str], obj.get("build_entrypoint")) connections = {PublicId.from_str(id_) for id_ in obj.get(CONNECTIONS, set())} protocols = {PublicId.from_str(id_) for id_ in obj.get(PROTOCOLS, set())} contracts = {PublicId.from_str(id_) for id_ in obj.get(CONTRACTS, set())} @@ -1578,6 +1629,7 @@ def from_json(cls, obj: Dict): aea_version=aea_version_specifiers, fingerprint=fingerprint, fingerprint_ignore_patterns=fingerprint_ignore_patterns, + build_entrypoint=build_entrypoint, connections=connections, protocols=protocols, contracts=contracts, @@ -1585,6 +1637,7 @@ def from_json(cls, obj: Dict): dependencies=dependencies, description=description, is_abstract=obj.get("is_abstract", False), + build_directory=cast(Optional[str], obj.get("build_directory")), ) for behaviour_id, behaviour_data in obj.get("behaviours", {}).items(): @@ -1673,6 +1726,7 @@ class AgentConfig(PackageConfiguration): "default_connection", "default_ledger", "default_routing", + "storage_uri", ] ) @@ -1685,12 +1739,14 @@ def __init__( aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, + build_entrypoint: Optional[str] = None, registry_path: str = DEFAULT_REGISTRY_NAME, description: str = "", logging_config: Optional[Dict] = None, period: Optional[float] = None, execution_timeout: Optional[float] = None, max_reactions: Optional[int] = None, + error_handler: Optional[Dict] = None, decision_maker_handler: Optional[Dict] = None, skill_exception_policy: Optional[str] = None, connection_exception_policy: Optional[str] = None, @@ -1700,6 +1756,7 @@ def __init__( default_routing: Optional[Dict[str, str]] = None, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, + storage_uri: Optional[str] = None, component_configurations: Optional[Dict[ComponentId, Dict]] = None, ): """Instantiate the agent configuration object.""" @@ -1711,6 +1768,7 @@ def __init__( aea_version, fingerprint, fingerprint_ignore_patterns, + build_entrypoint, ) self.agent_name = self.name self.registry_path = registry_path @@ -1744,6 +1802,7 @@ def __init__( self.skill_exception_policy: Optional[str] = skill_exception_policy self.connection_exception_policy: Optional[str] = connection_exception_policy + self.error_handler = error_handler if error_handler is not None else {} self.decision_maker_handler = ( decision_maker_handler if decision_maker_handler is not None else {} ) @@ -1758,6 +1817,7 @@ def __init__( ) # type: Dict[PublicId, PublicId] self.loop_mode = loop_mode self.runtime_mode = runtime_mode + self.storage_uri = storage_uri # this attribute will be set through the setter below self._component_configurations: Dict[ComponentId, Dict] = {} self.component_configurations = ( @@ -1874,6 +1934,9 @@ def json(self) -> Dict: } ) # type: Dict[str, Any] + if self.build_entrypoint: + config["build_entrypoint"] = self.build_entrypoint + # framework optional configs are only printed if defined. if self.period is not None: config["period"] = self.period @@ -1881,6 +1944,8 @@ def json(self) -> Dict: config["execution_timeout"] = self.execution_timeout if self.max_reactions is not None: config["max_reactions"] = self.max_reactions + if self.error_handler != {}: + config["error_handler"] = self.error_handler if self.decision_maker_handler != {}: config["decision_maker_handler"] = self.decision_maker_handler if self.skill_exception_policy is not None: @@ -1891,6 +1956,8 @@ def json(self) -> Dict: config["loop_mode"] = self.loop_mode if self.runtime_mode is not None: config["runtime_mode"] = self.runtime_mode + if self.storage_uri is not None: + config["storage_uri"] = self.storage_uri if self.currency_denominations != {}: config["currency_denominations"] = self.currency_denominations @@ -1911,10 +1978,12 @@ def from_json(cls, obj: Dict): fingerprint_ignore_patterns=cast( Sequence[str], obj.get("fingerprint_ignore_patterns") ), + build_entrypoint=cast(Optional[str], obj.get("build_entrypoint")), logging_config=cast(Dict, obj.get("logging_config", {})), period=cast(float, obj.get("period")), execution_timeout=cast(float, obj.get("execution_timeout")), max_reactions=cast(int, obj.get("max_reactions")), + error_handler=cast(Dict, obj.get("error_handler", {})), decision_maker_handler=cast(Dict, obj.get("decision_maker_handler", {})), skill_exception_policy=cast(str, obj.get("skill_exception_policy")), connection_exception_policy=cast( @@ -1926,6 +1995,7 @@ def from_json(cls, obj: Dict): default_routing=cast(Dict, obj.get("default_routing", {})), loop_mode=cast(str, obj.get("loop_mode")), runtime_mode=cast(str, obj.get("runtime_mode")), + storage_uri=cast(str, obj.get("storage_uri")), component_configurations=None, ) @@ -2001,20 +2071,6 @@ def __init__(self, **args): """Initialize a speech_act content configuration.""" super().__init__() self.args = args # type: Dict[str, str] - self._check_consistency() - - def _check_consistency(self): - """Check consistency of the args.""" - for content_name, content_type in self.args.items(): - if not isinstance(content_name, str) or not isinstance(content_type, str): - raise ProtocolSpecificationParseError( - "Contents' names and types must be string." - ) - # Check each content definition key/value (i.e. content name/type) is not empty - if content_name == "" or content_type == "": - raise ProtocolSpecificationParseError( - "Contents' names and types cannot be empty." - ) @property def json(self) -> Dict: @@ -2075,7 +2131,7 @@ def dialogue_config(self, dialogue_config: Dict): @property def json(self) -> Dict: """Return the JSON representation.""" - return OrderedDict( + result: Dict[str, Any] = OrderedDict( { "name": self.name, "author": self.author, @@ -2089,6 +2145,7 @@ def json(self) -> Dict: }, } ) + return result @classmethod def from_json(cls, obj: Dict): @@ -2108,35 +2165,8 @@ def from_json(cls, obj: Dict): protocol_specification.speech_acts.create( speech_act, speech_act_content_config ) - protocol_specification._check_consistency() # pylint: disable=protected-access return protocol_specification - def _check_consistency(self): - """Validate the correctness of the speech_acts.""" - if len(self.speech_acts.read_all()) == 0: - raise ProtocolSpecificationParseError( - "There should be at least one performative defined in the speech_acts." - ) - content_dict = {} - for performative, speech_act_content_config in self.speech_acts.read_all(): - if not isinstance(performative, str): - raise ProtocolSpecificationParseError( - "A 'performative' is not specified as a string." - ) - if performative == "": - raise ProtocolSpecificationParseError( - "A 'performative' cannot be an empty string." - ) - for content_name, content_type in speech_act_content_config.args.items(): - if content_name in content_dict.keys(): - if content_type != content_dict[content_name]: # pragma: no cover - raise ProtocolSpecificationParseError( - "The content '{}' appears more than once with different types in speech_acts.".format( - content_name - ) - ) - content_dict[content_name] = content_type - class ContractConfig(ComponentConfiguration): """Handle contract configuration.""" @@ -2155,6 +2185,8 @@ def __init__( aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, + build_entrypoint: Optional[str] = None, + build_directory: Optional[str] = None, dependencies: Optional[Dependencies] = None, description: str = "", contract_interface_paths: Optional[Dict[str, str]] = None, @@ -2169,6 +2201,8 @@ def __init__( aea_version, fingerprint, fingerprint_ignore_patterns, + build_entrypoint, + build_directory, dependencies, ) self.dependencies = dependencies if dependencies is not None else {} @@ -2181,7 +2215,7 @@ def __init__( @property def json(self) -> Dict: """Return the JSON representation.""" - return OrderedDict( + result = OrderedDict( { "name": self.name, "author": self.author, @@ -2197,6 +2231,11 @@ def json(self) -> Dict: "dependencies": dependencies_to_json(self.dependencies), } ) + if self.build_entrypoint: + result["build_entrypoint"] = self.build_entrypoint + if self.build_directory: + result["build_directory"] = self.build_directory + return result @classmethod def from_json(cls, obj: Dict): @@ -2214,6 +2253,8 @@ def from_json(cls, obj: Dict): fingerprint_ignore_patterns=cast( Sequence[str], obj.get("fingerprint_ignore_patterns") ), + build_entrypoint=cast(Optional[str], obj.get("build_entrypoint")), + build_directory=cast(Optional[str], obj.get("build_directory")), dependencies=dependencies, description=cast(str, obj.get("description", "")), contract_interface_paths=cast( diff --git a/aea/configurations/constants.py b/aea/configurations/constants.py index 2f037ff46c..d7ed3d0547 100644 --- a/aea/configurations/constants.py +++ b/aea/configurations/constants.py @@ -77,3 +77,5 @@ IMPORT_TEMPLATE_1 = "from packages.{author}.{type}.{name}" IMPORT_TEMPLATE_2 = "import packages.{author}.{type}.{name}" DEFAULT_ENV_DOTFILE = ".env" +DOTTED_PATH_MODULE_ELEMENT_SEPARATOR = ":" +LIBPROTOC_VERSION = "libprotoc 3.11.4" diff --git a/aea/configurations/project.py b/aea/configurations/project.py index 0bf21753da..87e5cfc713 100644 --- a/aea/configurations/project.py +++ b/aea/configurations/project.py @@ -60,11 +60,13 @@ def load( """ ctx = Context(cwd=working_dir, registry_path=registry_path) ctx.set_config("skip_consistency_check", skip_consistency_check) + path = os.path.join(working_dir, public_id.author, public_id.name) target_dir = os.path.join(public_id.author, public_id.name) if not is_restore and not os.path.exists(target_dir): if is_local: + ctx.set_config("is_local", True) fetch_agent_locally(ctx, public_id, target_dir=target_dir) else: fetch_agent(ctx, public_id, target_dir=target_dir) diff --git a/aea/configurations/schemas/aea-config_schema.json b/aea/configurations/schemas/aea-config_schema.json index 5bee353403..a8a495c46c 100644 --- a/aea/configurations/schemas/aea-config_schema.json +++ b/aea/configurations/schemas/aea-config_schema.json @@ -40,6 +40,9 @@ "fingerprint_ignore_patterns": { "$ref": "definitions.json#/definitions/fingerprint_ignore_patterns" }, + "build_entrypoint": { + "$ref": "definitions.json#/definitions/build_entrypoint" + }, "registry_path": { "type": "string" }, @@ -116,6 +119,9 @@ "decision_maker_handler": { "type": "object" }, + "error_handler": { + "type": "object" + }, "skill_exception_policy": { "$ref": "definitions.json#/definitions/skill_exception_policy" }, @@ -136,6 +142,9 @@ }, "runtime_mode": { "$ref": "definitions.json#/definitions/runtime_mode" + }, + "storage_uri": { + "$ref": "definitions.json#/definitions/storage_uri" } } } diff --git a/aea/configurations/schemas/connection-config_schema.json b/aea/configurations/schemas/connection-config_schema.json index afc9e18a3a..ee94c0eaed 100644 --- a/aea/configurations/schemas/connection-config_schema.json +++ b/aea/configurations/schemas/connection-config_schema.json @@ -40,6 +40,9 @@ "fingerprint_ignore_patterns": { "$ref": "definitions.json#/definitions/fingerprint_ignore_patterns" }, + "build_entrypoint": { + "$ref": "definitions.json#/definitions/build_entrypoint" + }, "class_name": { "type": "string" }, diff --git a/aea/configurations/schemas/contract-config_schema.json b/aea/configurations/schemas/contract-config_schema.json index cf5766141e..fa26c7bdf0 100644 --- a/aea/configurations/schemas/contract-config_schema.json +++ b/aea/configurations/schemas/contract-config_schema.json @@ -37,6 +37,9 @@ "fingerprint_ignore_patterns": { "$ref": "definitions.json#/definitions/fingerprint_ignore_patterns" }, + "build_entrypoint": { + "$ref": "definitions.json#/definitions/build_entrypoint" + }, "dependencies": { "$ref": "definitions.json#/definitions/dependencies" }, diff --git a/aea/configurations/schemas/definitions.json b/aea/configurations/schemas/definitions.json index 7285312379..b71039b5c5 100644 --- a/aea/configurations/schemas/definitions.json +++ b/aea/configurations/schemas/definitions.json @@ -106,6 +106,9 @@ "$ref": "definitions.json#/definitions/ignore_pattern" } }, + "build_entrypoint": { + "type": "string" + }, "ledger_id": { "type": "string", "pattern": "^[A-Za-z_][A-Za-z0-9_]{0,127}$" @@ -141,6 +144,12 @@ "runtime_mode": { "type": "string", "enum": ["async", "threaded"] + }, + "storage_uri": { + "type": "string" + }, + "keep_terminal_state_dialogues": { + "type": "boolean" } } } diff --git a/aea/configurations/schemas/protocol-config_schema.json b/aea/configurations/schemas/protocol-config_schema.json index a6f23f63c6..55792479d4 100644 --- a/aea/configurations/schemas/protocol-config_schema.json +++ b/aea/configurations/schemas/protocol-config_schema.json @@ -36,6 +36,9 @@ "fingerprint_ignore_patterns": { "$ref": "definitions.json#/definitions/fingerprint_ignore_patterns" }, + "build_entrypoint": { + "$ref": "definitions.json#/definitions/build_entrypoint" + }, "dependencies": { "$ref": "definitions.json#/definitions/dependencies" }, diff --git a/aea/configurations/schemas/protocol-specification_schema.json b/aea/configurations/schemas/protocol-specification_schema.json index 2a8f4d8a66..d92f30cd4a 100644 --- a/aea/configurations/schemas/protocol-specification_schema.json +++ b/aea/configurations/schemas/protocol-specification_schema.json @@ -49,4 +49,4 @@ "type": "object" } } -} \ No newline at end of file +} diff --git a/aea/configurations/schemas/skill-config_schema.json b/aea/configurations/schemas/skill-config_schema.json index 35ee15fa65..e5ce9dda11 100644 --- a/aea/configurations/schemas/skill-config_schema.json +++ b/aea/configurations/schemas/skill-config_schema.json @@ -39,6 +39,9 @@ "fingerprint_ignore_patterns": { "$ref": "definitions.json#/definitions/fingerprint_ignore_patterns" }, + "build_entrypoint": { + "$ref": "definitions.json#/definitions/build_entrypoint" + }, "connections": { "type": "array", "additionalProperties": false, diff --git a/aea/connections/scaffold/connection.py b/aea/connections/scaffold/connection.py index 5211f67334..6dc111e835 100644 --- a/aea/connections/scaffold/connection.py +++ b/aea/connections/scaffold/connection.py @@ -66,7 +66,7 @@ async def disconnect(self) -> None: """ raise NotImplementedError # pragma: no cover - async def send(self, envelope: "Envelope") -> None: + async def send(self, envelope: Envelope) -> None: """ Send an envelope. @@ -75,7 +75,7 @@ async def send(self, envelope: "Envelope") -> None: """ raise NotImplementedError # pragma: no cover - async def receive(self, *args, **kwargs) -> Optional["Envelope"]: + async def receive(self, *args, **kwargs) -> Optional[Envelope]: """ Receive an envelope. Blocking. diff --git a/aea/connections/scaffold/connection.yaml b/aea/connections/scaffold/connection.yaml index ea583e8e8e..acaa94642a 100644 --- a/aea/connections/scaffold/connection.yaml +++ b/aea/connections/scaffold/connection.yaml @@ -5,10 +5,10 @@ type: connection description: The scaffold connection provides a scaffold for a connection to be implemented by the developer. license: Apache-2.0 -aea_version: '>=0.7.0, <0.8.0' +aea_version: '>=0.8.0, <0.9.0' fingerprint: __init__.py: QmZvYZ5ECcWwqiNGh8qNTg735wu51HqaLxTSifUxkQ4KGj - connection.py: QmT7MNg8gkmWMzthN3k77i6UVhwXBeC2bGiNrUmXQcjWit + connection.py: QmcSunS5gm2sxatq52AEhywtr3f2LaLqnK1xmdxwM7qZ2b readme.md: Qmdt71SaCCwAG1c24VktXDm4pxgUBiPMg4bWfUTiqorypf fingerprint_ignore_patterns: [] connections: [] diff --git a/aea/context/base.py b/aea/context/base.py index 365884e796..095782646b 100644 --- a/aea/context/base.py +++ b/aea/context/base.py @@ -17,14 +17,16 @@ # # ------------------------------------------------------------------------------ + """This module contains the agent context class.""" from queue import Queue from types import SimpleNamespace -from typing import Any, Dict, Optional +from typing import Any, Callable, Dict, Optional from aea.common import Address from aea.configurations.base import PublicId +from aea.helpers.storage.generic_storage import Storage from aea.identity.base import Identity from aea.multiplexer import MultiplexerStatus, OutBox from aea.skills.tasks import TaskManager @@ -47,6 +49,7 @@ def __init__( default_routing: Dict[PublicId, PublicId], search_service_address: Address, decision_maker_address: Address, + storage_callable: Callable[[], Optional[Storage]] = lambda: None, **kwargs ): """ @@ -64,6 +67,7 @@ def __init__( :param default_routing: the default routing :param search_service_address: the address of the search service :param decision_maker_address: the address of the decision maker + :param storage_callable: function that returns optional storage attached to agent. :param kwargs: keyword arguments to be attached in the agent context namespace. """ self._shared_state = {} # type: Dict[str, Any] @@ -79,8 +83,14 @@ def __init__( self._currency_denominations = currency_denominations self._default_connection = default_connection self._default_routing = default_routing + self._storage_callable = storage_callable self._namespace = SimpleNamespace(**kwargs) + @property + def storage(self) -> Optional[Storage]: + """Return storage instance if enabled in AEA.""" + return self._storage_callable() + @property def shared_state(self) -> Dict[str, Any]: """ diff --git a/aea/contracts/base.py b/aea/contracts/base.py index 53b8d3e821..6f0a8c752c 100644 --- a/aea/contracts/base.py +++ b/aea/contracts/base.py @@ -24,6 +24,7 @@ from pathlib import Path from typing import Any, Dict, Optional, cast +from aea.common import JSONLike from aea.components.base import Component, load_aea_package from aea.configurations.base import ComponentType, ContractConfig, PublicId from aea.configurations.constants import CONTRACTS @@ -129,7 +130,7 @@ def from_config(cls, configuration: ContractConfig, **kwargs) -> "Contract": @classmethod def get_deploy_transaction( cls, ledger_api: LedgerApi, deployer_address: str, **kwargs - ) -> Dict[str, Any]: + ) -> Optional[JSONLike]: """ Handler method for the 'GET_DEPLOY_TRANSACTION' requests. @@ -150,7 +151,7 @@ def get_deploy_transaction( @classmethod def get_raw_transaction( cls, ledger_api: LedgerApi, contract_address: str, **kwargs - ) -> Dict[str, Any]: + ) -> Optional[JSONLike]: """ Handler method for the 'GET_RAW_TRANSACTION' requests. @@ -166,7 +167,7 @@ def get_raw_transaction( @classmethod def get_raw_message( cls, ledger_api: LedgerApi, contract_address: str, **kwargs - ) -> Dict[str, Any]: + ) -> Optional[bytes]: """ Handler method for the 'GET_RAW_MESSAGE' requests. @@ -182,7 +183,7 @@ def get_raw_message( @classmethod def get_state( cls, ledger_api: LedgerApi, contract_address: str, **kwargs - ) -> Dict[str, Any]: + ) -> Optional[JSONLike]: """ Handler method for the 'GET_STATE' requests. diff --git a/aea/contracts/scaffold/contract.py b/aea/contracts/scaffold/contract.py index e3da5378aa..6312e385c0 100644 --- a/aea/contracts/scaffold/contract.py +++ b/aea/contracts/scaffold/contract.py @@ -19,8 +19,7 @@ """This module contains the scaffold contract definition.""" -from typing import Any, Dict - +from aea.common import JSONLike from aea.configurations.base import PublicId from aea.contracts.base import Contract from aea.crypto.base import LedgerApi @@ -34,7 +33,7 @@ class MyScaffoldContract(Contract): @classmethod def get_raw_transaction( cls, ledger_api: LedgerApi, contract_address: str, **kwargs - ) -> Dict[str, Any]: + ) -> JSONLike: """ Handler method for the 'GET_RAW_TRANSACTION' requests. @@ -50,7 +49,7 @@ def get_raw_transaction( @classmethod def get_raw_message( cls, ledger_api: LedgerApi, contract_address: str, **kwargs - ) -> Dict[str, Any]: + ) -> bytes: """ Handler method for the 'GET_RAW_MESSAGE' requests. @@ -66,7 +65,7 @@ def get_raw_message( @classmethod def get_state( cls, ledger_api: LedgerApi, contract_address: str, **kwargs - ) -> Dict[str, Any]: + ) -> JSONLike: """ Handler method for the 'GET_STATE' requests. diff --git a/aea/contracts/scaffold/contract.yaml b/aea/contracts/scaffold/contract.yaml index eb8dd39aa9..c71e435241 100644 --- a/aea/contracts/scaffold/contract.yaml +++ b/aea/contracts/scaffold/contract.yaml @@ -4,10 +4,10 @@ version: 0.1.0 type: contract description: The scaffold contract scaffolds a contract to be implemented by the developer. license: Apache-2.0 -aea_version: '>=0.7.0, <0.8.0' +aea_version: '>=0.8.0, <0.9.0' fingerprint: __init__.py: QmPBwWhEg3wcH1q9612srZYAYdANVdWLDFWKs7TviZmVj6 - contract.py: QmQrasJcMjhnVS26sXNxPTNH7yBZhRm3ZcNSguTFLV9xaS + contract.py: QmQ8tVoA33z1Tg6KjsGuk3KUeu21ygntr8TCTFmVc2JqwT fingerprint_ignore_patterns: [] class_name: MyScaffoldContract contract_interface_paths: {} diff --git a/aea/crypto/base.py b/aea/crypto/base.py index 0c5fdd28f0..b15313009b 100644 --- a/aea/crypto/base.py +++ b/aea/crypto/base.py @@ -23,7 +23,7 @@ from pathlib import Path from typing import Any, BinaryIO, Dict, Generic, Optional, Tuple, TypeVar -from aea.common import Address +from aea.common import Address, JSONLike EntityClass = TypeVar("EntityClass") @@ -121,9 +121,9 @@ def sign_message(self, message: bytes, is_deprecated_mode: bool = False) -> str: """ @abstractmethod - def sign_transaction(self, transaction: Any) -> Any: + def sign_transaction(self, transaction: JSONLike) -> JSONLike: """ - Sign a transaction in bytes string form. + Sign a transaction in dict form. :param transaction: the transaction to be signed :return: signed transaction @@ -144,7 +144,7 @@ class Helper(ABC): @staticmethod @abstractmethod - def is_transaction_settled(tx_receipt: Any) -> bool: + def is_transaction_settled(tx_receipt: JSONLike) -> bool: """ Check whether a transaction is settled or not. @@ -155,7 +155,7 @@ def is_transaction_settled(tx_receipt: Any) -> bool: @staticmethod @abstractmethod def is_transaction_valid( - tx: Any, seller: Address, client: Address, tx_nonce: str, amount: int, + tx: JSONLike, seller: Address, client: Address, tx_nonce: str, amount: int, ) -> bool: """ Check whether a transaction is valid or not. @@ -259,6 +259,19 @@ def get_balance(self, address: Address) -> Optional[int]: :return: the balance. """ + @abstractmethod + def get_state(self, callable_name: str, *args, **kwargs) -> Optional[JSONLike]: + """ + Call a specified function on the underlying ledger API. + + This usually takes the form of a web request to be waited synchronously. + + :param callable_name: the name of the API function to be called. + :param args: the positional arguments for the API function. + :param kwargs: the keyword arguments for the API function. + :return: the ledger API response. + """ + @abstractmethod def get_transfer_transaction( self, @@ -268,7 +281,7 @@ def get_transfer_transaction( tx_fee: int, tx_nonce: str, **kwargs, - ) -> Optional[Any]: + ) -> Optional[JSONLike]: """ Submit a transfer transaction to the ledger. @@ -281,7 +294,7 @@ def get_transfer_transaction( """ @abstractmethod - def send_signed_transaction(self, tx_signed: Any) -> Optional[str]: + def send_signed_transaction(self, tx_signed: JSONLike) -> Optional[str]: """ Send a signed transaction and wait for confirmation. @@ -291,7 +304,7 @@ def send_signed_transaction(self, tx_signed: Any) -> Optional[str]: """ @abstractmethod - def get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: + def get_transaction_receipt(self, tx_digest: str) -> Optional[JSONLike]: """ Get the transaction receipt for a transaction digest. @@ -300,7 +313,7 @@ def get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: """ @abstractmethod - def get_transaction(self, tx_digest: str) -> Optional[Any]: + def get_transaction(self, tx_digest: str) -> Optional[JSONLike]: """ Get the transaction for a transaction digest. @@ -323,7 +336,7 @@ def get_contract_instance( @abstractmethod def get_deploy_transaction( self, contract_interface: Dict[str, str], deployer_address: Address, **kwargs, - ) -> Dict[str, Any]: + ) -> Optional[JSONLike]: """ Get the transaction to deploy the smart contract. @@ -332,6 +345,15 @@ def get_deploy_transaction( :returns tx: the transaction dictionary. """ + @abstractmethod + def update_with_gas_estimate(self, transaction: JSONLike) -> JSONLike: + """ + Attempts to update the transaction with a gas estimate + + :param transaction: the transaction + :return: the updated transaction + """ + class FaucetApi(ABC): """Interface for testnet faucet APIs.""" @@ -340,10 +362,11 @@ class FaucetApi(ABC): network_name = "testnet" # type: str @abstractmethod - def get_wealth(self, address: Address) -> None: + def get_wealth(self, address: Address, url: Optional[str] = None) -> None: """ Get wealth from the faucet for the provided address. :param address: the address. + :param url: the url :return: None """ diff --git a/aea/crypto/cosmos.py b/aea/crypto/cosmos.py index 54d1e76c27..b919dcdb57 100644 --- a/aea/crypto/cosmos.py +++ b/aea/crypto/cosmos.py @@ -30,14 +30,14 @@ import time from collections import namedtuple from pathlib import Path -from typing import Any, BinaryIO, Dict, List, Optional, Tuple +from typing import Any, BinaryIO, Collection, Dict, List, Optional, Tuple, cast import requests from bech32 import bech32_decode, bech32_encode, convertbits from ecdsa import SECP256k1, SigningKey, VerifyingKey from ecdsa.util import sigencode_string_canonize -from aea.common import Address +from aea.common import Address, JSONLike from aea.crypto.base import Crypto, FaucetApi, Helper, LedgerApi from aea.exceptions import AEAEnforceError from aea.helpers.base import try_decorator @@ -60,7 +60,7 @@ class CosmosHelper(Helper): address_prefix = _COSMOS @staticmethod - def is_transaction_settled(tx_receipt: Any) -> bool: + def is_transaction_settled(tx_receipt: JSONLike) -> bool: """ Check whether a transaction is settled or not. @@ -69,12 +69,17 @@ def is_transaction_settled(tx_receipt: Any) -> bool: """ is_successful = False if tx_receipt is not None: - is_successful = True + code = tx_receipt.get("code", None) + is_successful = code is None + if not is_successful: + _default_logger.warning( + f"Transaction not settled. Raw log: {tx_receipt.get('raw_log')}" + ) return is_successful @staticmethod def is_transaction_valid( - tx: Any, seller: Address, client: Address, tx_nonce: str, amount: int, + tx: JSONLike, seller: Address, client: Address, tx_nonce: str, amount: int, ) -> bool: """ Check whether a transaction is valid or not. @@ -90,7 +95,7 @@ def is_transaction_valid( return False # pragma: no cover try: - _tx = tx.get("tx").get("value").get("msg")[0] + _tx = cast(dict, tx.get("tx", {})).get("value", {}).get("msg", [])[0] recovered_amount = int(_tx.get("value").get("amount")[0].get("amount")) sender = _tx.get("value").get("from_address") recipient = _tx.get("value").get("to_address") @@ -273,8 +278,8 @@ def sign_message( # pylint: disable=unused-argument @staticmethod def format_default_transaction( - transaction: Any, signature: str, base64_pbk: str - ) -> Any: + transaction: JSONLike, signature: str, base64_pbk: str + ) -> JSONLike: """ Format default CosmosSDK transaction and add signature. @@ -284,7 +289,7 @@ def format_default_transaction( :return: formatted transaction with signature """ - pushable_tx = { + pushable_tx: JSONLike = { "tx": { "msg": transaction["msgs"], "fee": transaction["fee"], @@ -307,8 +312,8 @@ def format_default_transaction( @staticmethod def format_wasm_transaction( - transaction: Any, signature: str, base64_pbk: str - ) -> Any: + transaction: JSONLike, signature: str, base64_pbk: str + ) -> JSONLike: """ Format CosmWasm transaction and add signature. @@ -318,7 +323,7 @@ def format_wasm_transaction( :return: formatted transaction with signature """ - pushable_tx = { + pushable_tx: JSONLike = { "type": "cosmos-sdk/StdTx", "value": { "msg": transaction["msgs"], @@ -337,7 +342,7 @@ def format_wasm_transaction( } return pushable_tx - def sign_transaction(self, transaction: Any) -> Any: + def sign_transaction(self, transaction: JSONLike) -> JSONLike: """ Sign a transaction in bytes string form. @@ -349,12 +354,8 @@ def sign_transaction(self, transaction: Any) -> Any: signed_transaction = self.sign_message(transaction_bytes) base64_pbk = base64.b64encode(bytes.fromhex(self.public_key)).decode("utf-8") - if ( - "msgs" in transaction - and len(transaction["msgs"]) == 1 - and "type" in transaction["msgs"][0] - and "wasm" in transaction["msgs"][0]["type"] - ): + msgs = cast(list, transaction.get("msgs", [])) + if len(msgs) == 1 and "type" in msgs[0] and "wasm" in msgs[0]["type"]: return self.format_wasm_transaction( transaction, signed_transaction, base64_pbk ) @@ -417,6 +418,34 @@ def _try_get_balance(self, address: Address) -> Optional[int]: balance = int(result[0]["amount"]) return balance + def get_state(self, callable_name: str, *args, **kwargs) -> Optional[JSONLike]: + """ + Call a specified function on the ledger API. + + Based on the cosmos REST + API specification, which takes a path (strings separated by '/'). The + convention here is to define the root of the path (txs, blocks, etc.) + as the callable_name and the rest of the path as args. + """ + response = self._try_get_state(callable_name, *args, **kwargs) + return response + + @try_decorator( + "Encountered exception when trying get state: {}", + logger_method=_default_logger.warning, + ) + def _try_get_state( # pylint: disable=unused-argument + self, callable_name: str, *args, **kwargs + ) -> Optional[JSONLike]: + """Try to call a function on the ledger API.""" + result: Optional[JSONLike] = None + query = "/".join(args) + url = self.network_address + f"/{callable_name}/{query}" + response = requests.get(url=url) + if response.status_code == 200: + result = response.json() + return result + def get_deploy_transaction( # pylint: disable=arguments-differ self, contract_interface: Dict[str, str], @@ -427,14 +456,14 @@ def get_deploy_transaction( # pylint: disable=arguments-differ memo: str = "", chain_id: Optional[str] = None, **kwargs, - ) -> Dict[str, Any]: + ) -> Optional[JSONLike]: """ Create a CosmWasm bytecode deployment transaction. :param sender_address: the sender address of the message initiator. :param filename: the path to wasm bytecode file. :param gas: Maximum amount of gas to be used on executing command. - :param memo: Any string comment. + :param memo: any string comment. :param chain_id: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet). :return: the unsigned CosmWasm contract deploy message """ @@ -443,6 +472,8 @@ def get_deploy_transaction( # pylint: disable=arguments-differ account_number, sequence = self._try_get_account_number_and_sequence( deployer_address ) + if account_number is None or sequence is None: + return None deploy_msg = { "type": "wasm/store-code", "value": { @@ -476,7 +507,7 @@ def get_init_transaction( label: str = "", memo: str = "", chain_id: Optional[str] = None, - ) -> Optional[Any]: + ) -> Optional[JSONLike]: """ Create a CosmWasm InitMsg transaction. @@ -487,7 +518,7 @@ def get_init_transaction( :param gas: Maximum amount of gas to be used on executing command. :param denom: the name of the denomination of the contract funds :param label: the label name of the contract - :param memo: Any string comment. + :param memo: any string comment. :param chain_id: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet). :return: the unsigned CosmWasm InitMsg """ @@ -496,6 +527,8 @@ def get_init_transaction( account_number, sequence = self._try_get_account_number_and_sequence( deployer_address ) + if account_number is None or sequence is None: + return None instantiate_msg = { "type": "wasm/instantiate", "value": { @@ -529,7 +562,7 @@ def get_handle_transaction( gas: int = 80000, memo: str = "", chain_id: Optional[str] = None, - ) -> Optional[Any]: + ) -> Optional[JSONLike]: """ Create a CosmWasm HandleMsg transaction. @@ -537,7 +570,7 @@ def get_handle_transaction( :param contract_address: the address of the smart contract. :param handle_msg: HandleMsg in JSON format. :param gas: Maximum amount of gas to be used on executing command. - :param memo: Any string comment. + :param memo: any string comment. :param chain_id: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet). :return: the unsigned CosmWasm HandleMsg """ @@ -546,6 +579,8 @@ def get_handle_transaction( account_number, sequence = self._try_get_account_number_and_sequence( sender_address ) + if account_number is None or sequence is None: + return None execute_msg = { "type": "wasm/execute", "value": { @@ -573,7 +608,7 @@ def get_handle_transaction( logger_method=_default_logger.warning, ) def try_execute_wasm_transaction( - tx_signed: Any, signed_tx_filename: str = "tx.signed" + tx_signed: JSONLike, signed_tx_filename: str = "tx.signed" ) -> Optional[str]: """ Execute a CosmWasm Transaction. QueryMsg doesn't require signing. @@ -604,7 +639,7 @@ def try_execute_wasm_transaction( logger_method=_default_logger.warning, ) def try_execute_wasm_query( - contract_address: Address, query_msg: Any + contract_address: Address, query_msg: JSONLike ) -> Optional[str]: """ Execute a CosmWasm QueryMsg. QueryMsg doesn't require signing. @@ -641,7 +676,7 @@ def get_transfer_transaction( # pylint: disable=arguments-differ memo: str = "", chain_id: Optional[str] = None, **kwargs, - ) -> Optional[Any]: + ) -> Optional[JSONLike]: """ Submit a transfer transaction to the ledger. @@ -661,6 +696,8 @@ def get_transfer_transaction( # pylint: disable=arguments-differ account_number, sequence = self._try_get_account_number_and_sequence( sender_address ) + if account_number is None or sequence is None: + return None transfer_msg = { "type": "cosmos-sdk/MsgSend", "value": { @@ -690,8 +727,8 @@ def _get_transaction( gas: int, memo: str, sequence: int, - msg: Dict[str, Any], - ) -> Dict[str, Any]: + msg: Dict[str, Collection[str]], + ) -> JSONLike: """ Get a transaction. @@ -705,7 +742,7 @@ def _get_transaction( :param sequence: the sequence. :return: the transaction """ - tx = { + tx: JSONLike = { "account_number": str(account_number), "chain_id": chain_id, "fee": { @@ -741,7 +778,7 @@ def _try_get_account_number_and_sequence( ) return result - def send_signed_transaction(self, tx_signed: Any) -> Optional[str]: + def send_signed_transaction(self, tx_signed: JSONLike) -> Optional[str]: """ Send a signed transaction and wait for confirmation. @@ -762,22 +799,22 @@ def send_signed_transaction(self, tx_signed: Any) -> Optional[str]: return tx_digest @staticmethod - def is_cosmwasm_transaction(tx_signed: Any) -> bool: + def is_cosmwasm_transaction(tx_signed: JSONLike) -> bool: """Check whether it is a cosmwasm tx.""" try: - _type = tx_signed["value"]["msg"][0]["type"] + _type = cast(dict, tx_signed.get("value", {})).get("msg", [])[0]["type"] result = _type in ["wasm/store-code", "wasm/instantiate", "wasm/execute"] - except KeyError: # pragma: nocover + except (KeyError, IndexError): # pragma: nocover result = False return result @staticmethod - def is_transfer_transaction(tx_signed: Any) -> bool: + def is_transfer_transaction(tx_signed: JSONLike) -> bool: """Check whether it is a transfer tx.""" try: - _type = tx_signed["tx"]["msg"][0]["type"] + _type = cast(dict, tx_signed.get("tx", {})).get("msg", [])[0]["type"] result = _type in ["cosmos-sdk/MsgSend"] - except KeyError: # pragma: nocover + except (KeyError, IndexError): # pragma: nocover result = False return result @@ -785,7 +822,7 @@ def is_transfer_transaction(tx_signed: Any) -> bool: "Encountered exception when trying to send tx: {}", logger_method=_default_logger.warning, ) - def _try_send_signed_transaction(self, tx_signed: Any) -> Optional[str]: + def _try_send_signed_transaction(self, tx_signed: JSONLike) -> Optional[str]: """ Try send the signed transaction. @@ -801,7 +838,7 @@ def _try_send_signed_transaction(self, tx_signed: Any) -> Optional[str]: _default_logger.error("Cannot send transaction: {}".format(response.json())) return tx_digest - def get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: + def get_transaction_receipt(self, tx_digest: str) -> Optional[JSONLike]: """ Get the transaction receipt for a transaction digest. @@ -815,21 +852,21 @@ def get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: "Encountered exception when trying to get transaction receipt: {}", logger_method=_default_logger.warning, ) - def _try_get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: + def _try_get_transaction_receipt(self, tx_digest: str) -> Optional[JSONLike]: """ Try get the transaction receipt for a transaction digest. :param tx_digest: the digest associated to the transaction. :return: the tx receipt, if present """ - result = None # type: Optional[Any] + result: Optional[JSONLike] = None url = self.network_address + f"/txs/{tx_digest}" response = requests.get(url=url) if response.status_code == 200: result = response.json() return result - def get_transaction(self, tx_digest: str) -> Optional[Any]: + def get_transaction(self, tx_digest: str) -> Optional[JSONLike]: """ Get the transaction for a transaction digest. @@ -892,6 +929,17 @@ def get_contract_address(self, code_id: int) -> str: return res[-1]["address"] + def update_with_gas_estimate(self, transaction: JSONLike) -> JSONLike: + """ + Attempts to update the transaction with a gas estimate + + :param transaction: the transaction + :return: the updated transaction + """ + raise NotImplementedError( # pragma: nocover + "No gas estimation has been implemented." + ) + class CosmosApi(_CosmosApi, CosmosHelper): """Class to interact with the Cosmos SDK via a HTTP APIs.""" @@ -929,22 +977,23 @@ def __init__(self, poll_interval=None): """Initialize CosmosFaucetApi.""" self._poll_interval = float(poll_interval or 1) - def get_wealth(self, address: Address) -> None: + def get_wealth(self, address: Address, url: Optional[str] = None) -> None: """ Get wealth from the faucet for the provided address. :param address: the address. + :param url: the url :return: None :raises: RuntimeError of explicit faucet failures """ - uid = self._try_create_faucet_claim(address) + uid = self._try_create_faucet_claim(address, url) if uid is None: # pragma: nocover raise RuntimeError("Unable to create faucet claim") while True: # lookup status form the claim uid - status = self._try_check_faucet_claim(uid) + status = self._try_check_faucet_claim(uid, url) if status is None: # pragma: nocover raise RuntimeError("Failed to check faucet claim status") @@ -964,16 +1013,18 @@ def get_wealth(self, address: Address) -> None: "An error occured while attempting to request a faucet request:\n{}", logger_method=_default_logger.error, ) - def _try_create_faucet_claim(cls, address: Address) -> Optional[str]: + def _try_create_faucet_claim( + cls, address: Address, url: Optional[str] = None + ) -> Optional[str]: """ Create a token faucet claim request :param address: the address to request funds + :param url: the url :return: None on failure, otherwise the request uid """ - response = requests.post( - url=cls._faucet_request_uri(), data={"Address": address} - ) + uri = cls._faucet_request_uri(url) + response = requests.post(url=uri, data={"Address": address}) uid = None if response.status_code == 200: @@ -993,14 +1044,17 @@ def _try_create_faucet_claim(cls, address: Address) -> Optional[str]: "An error occured while attempting to request a faucet request:\n{}", logger_method=_default_logger.error, ) - def _try_check_faucet_claim(cls, uid: str) -> Optional[CosmosFaucetStatus]: + def _try_check_faucet_claim( + cls, uid: str, url: Optional[str] = None + ) -> Optional[CosmosFaucetStatus]: """ Check the status of a faucet request :param uid: The request uid to be checked + :param url: the url :return: None on failure otherwise a CosmosFaucetStatus for the specified uid """ - response = requests.get(cls._faucet_status_uri(uid)) + response = requests.get(cls._faucet_status_uri(uid, url)) if response.status_code != 200: # pragma: nocover _default_logger.warning( "Response: {}, Text: {}".format(response.status_code, response.text) @@ -1016,13 +1070,18 @@ def _try_check_faucet_claim(cls, uid: str) -> Optional[CosmosFaucetStatus]: ) @classmethod - def _faucet_request_uri(cls) -> str: - """Generates the request URI derived from `cls.faucet_base_url`.""" + def _faucet_request_uri(cls, url: Optional[str] = None) -> str: + """ + Generates the request URI derived from `cls.faucet_base_url` or provided url. + + :param url: the url + """ if cls.testnet_faucet_url is None: # pragma: nocover raise ValueError("Testnet faucet url not set.") - return f"{cls.testnet_faucet_url}/claim/requests" + url = cls.testnet_faucet_url if url is None else url + return f"{url}/claim/requests" @classmethod - def _faucet_status_uri(cls, uid: str) -> str: + def _faucet_status_uri(cls, uid: str, url: Optional[str] = None) -> str: """Generates the status URI derived from `cls.faucet_base_url`.""" - return f"{cls._faucet_request_uri()}/{uid}" + return f"{cls._faucet_request_uri(url)}/{uid}" diff --git a/aea/crypto/ethereum.py b/aea/crypto/ethereum.py index c5e7a129d3..f51c741ba7 100644 --- a/aea/crypto/ethereum.py +++ b/aea/crypto/ethereum.py @@ -24,17 +24,17 @@ import time import warnings from pathlib import Path -from typing import Any, BinaryIO, Dict, Optional, Tuple, Union, cast +from typing import Any, BinaryIO, Callable, Dict, Optional, Tuple, Union, cast import requests from eth_account import Account -from eth_account.datastructures import SignedTransaction +from eth_account.datastructures import HexBytes, SignedTransaction from eth_account.messages import encode_defunct from eth_keys import keys from web3 import HTTPProvider, Web3 -from web3.types import TxParams +from web3.datastructures import AttributeDict -from aea.common import Address +from aea.common import Address, JSONLike from aea.crypto.base import Crypto, FaucetApi, Helper, LedgerApi from aea.exceptions import enforce from aea.helpers.base import try_decorator @@ -44,7 +44,6 @@ _ETHEREUM = "ethereum" GAS_ID = "gwei" -ETHEREUM_TESTNET_FAUCET_URL = "TBD" TESTNET_NAME = "ganache" DEFAULT_ADDRESS = "http://127.0.0.1:8545" DEFAULT_CHAIN_ID = 1337 @@ -54,6 +53,116 @@ _BYTECODE = "bytecode" +class SignedTransactionTranslator: + """Translator for SignedTransaction.""" + + @staticmethod + def to_dict(signed_transaction: SignedTransaction) -> Dict[str, Union[str, int]]: + """Write SignedTransaction to dict.""" + signed_transaction_dict = { + "raw_transaction": signed_transaction.rawTransaction.hex(), + "hash": signed_transaction.hash.hex(), + "r": signed_transaction.r, + "s": signed_transaction.s, + "v": signed_transaction.v, + } + return signed_transaction_dict + + @staticmethod + def from_dict(signed_transaction_dict: JSONLike) -> SignedTransaction: + """Get SignedTransaction from dict.""" + if ( + not isinstance(signed_transaction_dict, dict) + and len(signed_transaction_dict) == 5 + ): + raise ValueError( # pragma: nocover + f"Invalid for conversion. Found object: {signed_transaction_dict}." + ) + signed_transaction = SignedTransaction( + rawTransaction=HexBytes(signed_transaction_dict["raw_transaction"]), + hash=HexBytes(signed_transaction_dict["hash"]), + r=signed_transaction_dict["r"], + s=signed_transaction_dict["s"], + v=signed_transaction_dict["v"], + ) + return signed_transaction + + +class AttributeDictTranslator: + """Translator for AttributeDict.""" + + @classmethod + def _remove_hexbytes(cls, value): + """Process value to remove hexbytes.""" + if value is None: + return value + if isinstance(value, HexBytes): + return value.hex() + if isinstance(value, list): + return cls._process_list(value, cls._remove_hexbytes) + if type(value) in (bool, int, float, str, bytes): + return value + if isinstance(value, AttributeDict): + return cls.to_dict(value) + raise NotImplementedError( # pragma: nocover + f"Unknown type conversion. Found type: {type(value)}" + ) + + @classmethod + def _add_hexbytes(cls, value): + """Process value to add hexbytes.""" + if value is None: + return value + if isinstance(value, str): + try: + int(value, 16) + return HexBytes(value) + except Exception: # pylint: disable=broad-except + return value + if isinstance(value, list): + return cls._process_list(value, cls._add_hexbytes) + if isinstance(value, dict): + return cls.from_dict(value) + if type(value) in (bool, int, float, bytes): + return value + raise NotImplementedError( # pragma: nocover + f"Unknown type conversion. Found type: {type(value)}" + ) + + @classmethod + def _process_list(cls, li: list, callable_name: Callable): + """Simplify a list with process value.""" + return [callable_name(el) for el in li] + + @classmethod + def _valid_key(cls, key: Any) -> str: + """Check validity of key.""" + if isinstance(key, str): + return key + raise ValueError("Key must be string.") # pragma: nocover + + @classmethod + def to_dict(cls, attr_dict: AttributeDict) -> JSONLike: + """Simplify to dict.""" + if not isinstance(attr_dict, AttributeDict): + raise ValueError("No AttributeDict provided.") # pragma: nocover + result = { + cls._valid_key(key): cls._remove_hexbytes(value) + for key, value in attr_dict.items() + } + return result + + @classmethod + def from_dict(cls, di: JSONLike) -> AttributeDict: + """Get back attribute dict.""" + if not isinstance(di, dict): + raise ValueError("No dict provided.") # pragma: nocover + processed_dict = { + cls._valid_key(key): cls._add_hexbytes(value) for key, value in di.items() + } + return AttributeDict(processed_dict) + + class EthereumCrypto(Crypto[Account]): """Class wrapping the Account Generation from Ethereum ledger.""" @@ -132,7 +241,7 @@ def sign_message(self, message: bytes, is_deprecated_mode: bool = False) -> str: signed_msg = signature["signature"].hex() return signed_msg - def sign_transaction(self, transaction: Any) -> Any: + def sign_transaction(self, transaction: JSONLike) -> JSONLike: """ Sign a transaction in bytes string form. @@ -141,7 +250,10 @@ def sign_transaction(self, transaction: Any) -> Any: """ signed_transaction = self.entity.sign_transaction(transaction_dict=transaction) # Note: self.entity.signTransaction(transaction_dict=transaction) == signed_transaction # noqa: E800 - return signed_transaction + signed_transaction_dict = SignedTransactionTranslator.to_dict( + signed_transaction + ) + return cast(JSONLike, signed_transaction_dict) @classmethod def generate_private_key(cls) -> Account: @@ -163,7 +275,7 @@ class EthereumHelper(Helper): """Helper class usable as Mixin for EthereumApi or as standalone class.""" @staticmethod - def is_transaction_settled(tx_receipt: Any) -> bool: + def is_transaction_settled(tx_receipt: JSONLike) -> bool: """ Check whether a transaction is settled or not. @@ -172,12 +284,12 @@ def is_transaction_settled(tx_receipt: Any) -> bool: """ is_successful = False if tx_receipt is not None: - is_successful = tx_receipt.status == 1 + is_successful = tx_receipt.get("status", 0) == 1 return is_successful @staticmethod def is_transaction_valid( - tx: Any, seller: Address, client: Address, tx_nonce: str, amount: int, + tx: dict, seller: Address, client: Address, tx_nonce: str, amount: int, ) -> bool: """ Check whether a transaction is valid or not. @@ -311,6 +423,31 @@ def _try_get_balance(self, address: Address) -> Optional[int]: """Get the balance of a given account.""" return self._api.eth.getBalance(address) # pylint: disable=no-member + def get_state(self, callable_name: str, *args, **kwargs) -> Optional[JSONLike]: + """Call a specified function on the ledger API.""" + response = self._try_get_state(callable_name, *args, **kwargs) + return response + + @try_decorator("Unable to get state: {}", logger_method="warning") + def _try_get_state( # pylint: disable=unused-argument + self, callable_name: str, *args, **kwargs + ) -> Optional[JSONLike]: + """Try to call a function on the ledger API.""" + + function = getattr(self._api.eth, callable_name) + response = function(*args, **kwargs) + + if isinstance(response, AttributeDict): + result = AttributeDictTranslator.to_dict(response) + return result + + if type(response) in (int, float, bytes, str, list, dict): # pragma: nocover + # missing full checks for nested objects + return {f"{callable_name}_result": response} + raise NotImplementedError( # pragma: nocover + f"Response must be of types=int, float, bytes, str, list, dict. Found={type(response)}." + ) + def get_transfer_transaction( # pylint: disable=arguments-differ self, sender_address: Address, @@ -321,7 +458,7 @@ def get_transfer_transaction( # pylint: disable=arguments-differ chain_id: Optional[int] = None, gas_price: Optional[str] = None, **kwargs, - ) -> Optional[Any]: + ) -> Optional[JSONLike]: """ Submit a transfer transaction to the ledger. @@ -334,10 +471,12 @@ def get_transfer_transaction( # pylint: disable=arguments-differ :param gas_price: the gas price :return: the transfer transaction """ + transaction: Optional[JSONLike] = None chain_id = chain_id if chain_id is not None else self._chain_id gas_price = gas_price if gas_price is not None else self._gas_price nonce = self._try_get_transaction_count(sender_address) - + if nonce is None: + return transaction transaction = { "nonce": nonce, "chainId": chain_id, @@ -347,16 +486,7 @@ def get_transfer_transaction( # pylint: disable=arguments-differ "gasPrice": self._api.toWei(gas_price, GAS_ID), "data": tx_nonce, } - - gas_estimate = self._try_get_gas_estimate(transaction) - if gas_estimate is not None and tx_fee <= gas_estimate: # pragma: no cover - _default_logger.warning( - "Needed to increase tx_fee to cover the gas consumption of the transaction. Estimated gas consumption is: {}.".format( - gas_estimate - ) - ) - transaction["gas"] = gas_estimate - + transaction = self.update_with_gas_estimate(transaction) return transaction @try_decorator("Unable to retrieve transaction count: {}", logger_method="warning") @@ -367,17 +497,33 @@ def _try_get_transaction_count(self, address: Address) -> Optional[int]: ) return nonce + def update_with_gas_estimate(self, transaction: JSONLike) -> JSONLike: + """ + Attempts to update the transaction with a gas estimate + + :param transaction: the transaction + :return: the updated transaction + """ + gas_estimate = self._try_get_gas_estimate(transaction) + if gas_estimate is not None: + specified_gas = transaction["gas"] + if specified_gas < gas_estimate: + # eventually; there should be some specifiable strategy + _default_logger.warning( # pragma: nocover + f"Needed to increase gas to cover the gas consumption of the transaction. Estimated gas consumption is: {gas_estimate}. Specified gas was: {specified_gas}." + ) + transaction["gas"] = gas_estimate + return transaction + @try_decorator("Unable to retrieve gas estimate: {}", logger_method="warning") - def _try_get_gas_estimate( - self, transaction: Dict[str, Union[str, int, None]] - ) -> Optional[int]: + def _try_get_gas_estimate(self, transaction: JSONLike) -> Optional[int]: """Try get the gas estimate.""" gas_estimate = self._api.eth.estimateGas( # pylint: disable=no-member - transaction=transaction + transaction=AttributeDictTranslator.from_dict(transaction) ) return gas_estimate - def send_signed_transaction(self, tx_signed: Any) -> Optional[str]: + def send_signed_transaction(self, tx_signed: JSONLike) -> Optional[str]: """ Send a signed transaction and wait for confirmation. @@ -388,16 +534,16 @@ def send_signed_transaction(self, tx_signed: Any) -> Optional[str]: return tx_digest @try_decorator("Unable to send transaction: {}", logger_method="warning") - def _try_send_signed_transaction(self, tx_signed: Any) -> Optional[str]: + def _try_send_signed_transaction(self, tx_signed: JSONLike) -> Optional[str]: """ Try send a signed transaction. :param tx_signed: the signed transaction :return: tx_digest, if present """ - tx_signed = cast(SignedTransaction, tx_signed) + signed_transaction = SignedTransactionTranslator.from_dict(tx_signed) hex_value = self._api.eth.sendRawTransaction( # pylint: disable=no-member - tx_signed.rawTransaction + signed_transaction.rawTransaction ) tx_digest = hex_value.hex() _default_logger.debug( @@ -405,7 +551,7 @@ def _try_send_signed_transaction(self, tx_signed: Any) -> Optional[str]: ) return tx_digest - def get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: + def get_transaction_receipt(self, tx_digest: str) -> Optional[JSONLike]: """ Get the transaction receipt for a transaction digest. @@ -418,7 +564,7 @@ def get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: @try_decorator( "Error when attempting getting tx receipt: {}", logger_method="debug" ) - def _try_get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: + def _try_get_transaction_receipt(self, tx_digest: str) -> Optional[JSONLike]: """ Try get the transaction receipt. @@ -428,9 +574,9 @@ def _try_get_transaction_receipt(self, tx_digest: str) -> Optional[Any]: tx_receipt = self._api.eth.getTransactionReceipt( # pylint: disable=no-member tx_digest ) - return tx_receipt + return AttributeDictTranslator.to_dict(tx_receipt) - def get_transaction(self, tx_digest: str) -> Optional[Any]: + def get_transaction(self, tx_digest: str) -> Optional[JSONLike]: """ Get the transaction for a transaction digest. @@ -441,7 +587,7 @@ def get_transaction(self, tx_digest: str) -> Optional[Any]: return tx @try_decorator("Error when attempting getting tx: {}", logger_method="debug") - def _try_get_transaction(self, tx_digest: str) -> Optional[Any]: + def _try_get_transaction(self, tx_digest: str) -> Optional[JSONLike]: """ Get the transaction. @@ -449,7 +595,7 @@ def _try_get_transaction(self, tx_digest: str) -> Optional[Any]: :return: the tx, if found """ tx = self._api.eth.getTransaction(tx_digest) # pylint: disable=no-member - return tx + return AttributeDictTranslator.to_dict(tx) def get_contract_instance( self, contract_interface: Dict[str, str], contract_address: Optional[str] = None @@ -481,7 +627,7 @@ def get_deploy_transaction( # pylint: disable=arguments-differ value: int = 0, gas: int = 0, **kwargs, - ) -> Dict[str, Any]: + ) -> Optional[JSONLike]: """ Get the transaction to deploy the smart contract. @@ -492,37 +638,23 @@ def get_deploy_transaction( # pylint: disable=arguments-differ :returns tx: the transaction dictionary. """ # create the transaction dict + transaction: Optional[JSONLike] = None _deployer_address = self.api.toChecksumAddress(deployer_address) nonce = self.api.eth.getTransactionCount(_deployer_address) + if nonce is None: + return transaction instance = self.get_contract_instance(contract_interface) data = instance.constructor(**kwargs).buildTransaction().get("data", "0x") - tx = { - "from": deployer_address, # only 'from' address, don't insert 'to' address! + transaction = { + "from": _deployer_address, # only 'from' address, don't insert 'to' address! "value": value, # transfer as part of deployment "gas": gas, "gasPrice": self.api.eth.gasPrice, "nonce": nonce, "data": data, } - tx = self.try_estimate_gas(tx) - return tx - - def try_estimate_gas(self, tx: Dict[str, Any]) -> Dict[str, Any]: - """ - Attempts to update the transaction with a gas estimate. - - :param tx: the transaction - :return: the transaction (potentially updated) - """ - try: - # try estimate the gas and update the transaction dict - _tx = cast(TxParams, tx) - gas_estimate = self.api.eth.estimateGas(transaction=_tx) - _default_logger.debug("gas estimate: {}".format(gas_estimate)) - tx["gas"] = gas_estimate - except Exception as e: # pylint: disable=broad-except # pragma: nocover - _default_logger.debug("Error when trying to estimate gas: {}".format(e)) - return tx + transaction = self.update_with_gas_estimate(transaction) + return transaction @classmethod def is_valid_address(cls, address: Address) -> bool: @@ -540,28 +672,34 @@ class EthereumFaucetApi(FaucetApi): identifier = _ETHEREUM testnet_name = TESTNET_NAME - def get_wealth(self, address: Address) -> None: + def get_wealth(self, address: Address, url: Optional[str] = None) -> None: """ Get wealth from the faucet for the provided address. :param address: the address. + :param url: the url :return: None """ - self._try_get_wealth(address) + self._try_get_wealth(address, url) @staticmethod @try_decorator( "An error occured while attempting to generate wealth:\n{}", logger_method="error", ) - def _try_get_wealth(address: Address) -> None: + def _try_get_wealth(address: Address, url: Optional[str] = None) -> None: """ Get wealth from the faucet for the provided address. :param address: the address. + :param url: the url :return: None """ - response = requests.get(ETHEREUM_TESTNET_FAUCET_URL + address) + if url is None: + raise ValueError( + "Url is none, no default url provided. Please provide a faucet url." + ) + response = requests.get(url + address) if response.status_code // 100 == 5: _default_logger.error("Response: {}".format(response.status_code)) elif response.status_code // 100 in [3, 4]: # pragma: nocover diff --git a/aea/crypto/helpers.py b/aea/crypto/helpers.py index 2adc4d7efa..a12e038614 100644 --- a/aea/crypto/helpers.py +++ b/aea/crypto/helpers.py @@ -23,6 +23,7 @@ import os import sys from pathlib import Path +from typing import Optional from aea.configurations.base import AgentConfig, PackageType from aea.configurations.constants import ( @@ -136,16 +137,17 @@ def create_private_key(ledger_id: str, private_key_file: str) -> None: def try_generate_testnet_wealth( - identifier: str, address: str, _sync: bool = True + identifier: str, address: str, url: Optional[str] = None, _sync: bool = True ) -> None: """ Try generate wealth on a testnet. :param identifier: the identifier of the ledger :param address: the address to check for + :param url: the url :param _sync: whether to wait to sync or not; currently unused :return: None """ faucet_api = make_faucet_api(identifier) if faucet_api is not None: - faucet_api.get_wealth(address) + faucet_api.get_wealth(address, url) diff --git a/aea/crypto/registries/base.py b/aea/crypto/registries/base.py index 5b35539ba0..a5cbdf5548 100644 --- a/aea/crypto/registries/base.py +++ b/aea/crypto/registries/base.py @@ -24,6 +24,7 @@ from typing import Any, Dict, Generic, Optional, Set, Type, TypeVar, Union from aea.configurations.base import PublicId +from aea.configurations.constants import DOTTED_PATH_MODULE_ELEMENT_SEPARATOR from aea.exceptions import AEAException from aea.helpers.base import RegexConstrainedString @@ -65,7 +66,11 @@ class EntryPoint(Generic[ItemType], RegexConstrainedString): path.to.module:className """ - REGEX = re.compile(r"^({pyid}(?:\.{pyid})*):({pyid})$".format(pyid=PY_ID_REGEX)) + REGEX = re.compile( + r"^({pyid}(?:\.{pyid})*){sep}({pyid})$".format( + pyid=PY_ID_REGEX, sep=DOTTED_PATH_MODULE_ELEMENT_SEPARATOR + ) + ) def __init__(self, seq): """Initialize the entrypoint.""" diff --git a/aea/crypto/wallet.py b/aea/crypto/wallet.py index bfa7060a81..0a1ae77ca6 100644 --- a/aea/crypto/wallet.py +++ b/aea/crypto/wallet.py @@ -22,6 +22,7 @@ import logging from typing import Any, Dict, Optional, cast +from aea.common import JSONLike from aea.crypto.base import Crypto from aea.crypto.registries import make_crypto @@ -157,7 +158,7 @@ def sign_message( signature = crypto_object.sign_message(message, is_deprecated_mode) return signature - def sign_transaction(self, crypto_id: str, transaction: Any) -> Optional[Any]: + def sign_transaction(self, crypto_id: str, transaction: Any) -> Optional[JSONLike]: """ Sign a tx. diff --git a/aea/error_handler/__init__.py b/aea/error_handler/__init__.py new file mode 100644 index 0000000000..4c72d3b51b --- /dev/null +++ b/aea/error_handler/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""This module contains the error handler modules.""" diff --git a/aea/error_handler/base.py b/aea/error_handler/base.py new file mode 100644 index 0000000000..15ff4a67ec --- /dev/null +++ b/aea/error_handler/base.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +"""This module contains the abstract error handler class.""" +from abc import ABC, abstractmethod +from logging import Logger + +from aea.mail.base import Envelope + + +class AbstractErrorHandler(ABC): + """Error handler class for handling problematic envelopes.""" + + @classmethod + @abstractmethod + def send_unsupported_protocol(cls, envelope: Envelope, logger: Logger) -> None: + """ + Handle the received envelope in case the protocol is not supported. + + :param envelope: the envelope + :param logger: the logger + :return: None + """ + + @classmethod + @abstractmethod + def send_decoding_error(cls, envelope: Envelope, logger: Logger) -> None: + """ + Handle a decoding error. + + :param envelope: the envelope + :return: None + """ + + @classmethod + @abstractmethod + def send_unsupported_skill(cls, envelope: Envelope, logger: Logger) -> None: + """ + Handle the received envelope in case the skill is not supported. + + :param envelope: the envelope + :return: None + """ diff --git a/aea/helpers/temp_error_handler.py b/aea/error_handler/default.py similarity index 94% rename from aea/helpers/temp_error_handler.py rename to aea/error_handler/default.py index cbca88c374..e4de36b25a 100644 --- a/aea/helpers/temp_error_handler.py +++ b/aea/error_handler/default.py @@ -16,13 +16,16 @@ # limitations under the License. # # ------------------------------------------------------------------------------ -"""Temporary error handler.""" + +"""This module contains the default error handler class.""" + from logging import Logger +from aea.error_handler.base import AbstractErrorHandler from aea.mail.base import Envelope -class ErrorHandler: +class ErrorHandler(AbstractErrorHandler): """Error handler class for handling problematic envelopes.""" unsupported_protocol_count = 0 diff --git a/aea/error_handler/scaffold.py b/aea/error_handler/scaffold.py new file mode 100644 index 0000000000..03e451dd95 --- /dev/null +++ b/aea/error_handler/scaffold.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""This module contains a scaffold of the error handler class.""" +from logging import Logger + +from aea.error_handler.base import AbstractErrorHandler +from aea.mail.base import Envelope + + +class ErrorHandler(AbstractErrorHandler): + """This class implements the error handler.""" + + @classmethod + def send_unsupported_protocol(cls, envelope: Envelope, logger: Logger) -> None: + """ + Handle the received envelope in case the protocol is not supported. + + :param envelope: the envelope + :param logger: the logger + :return: None + """ + raise NotImplementedError + + @classmethod + def send_decoding_error(cls, envelope: Envelope, logger: Logger) -> None: + """ + Handle a decoding error. + + :param envelope: the envelope + :return: None + """ + raise NotImplementedError + + @classmethod + def send_unsupported_skill(cls, envelope: Envelope, logger: Logger) -> None: + """ + Handle the received envelope in case the skill is not supported. + + :param envelope: the envelope + :return: None + """ + raise NotImplementedError diff --git a/aea/helpers/async_utils.py b/aea/helpers/async_utils.py index 168b3da694..5321d2615d 100644 --- a/aea/helpers/async_utils.py +++ b/aea/helpers/async_utils.py @@ -670,8 +670,8 @@ async def _wait(self) -> None: def stop(self, force: bool = False) -> None: """Stop runnable.""" _default_logger.debug(f"{self} is going to be stopped {self._task}") - if not self._task or not self._loop: - return # pragma: nocover + if not self._task or not self._loop: # pragma: nocover + return if self._task.done(): return diff --git a/aea/helpers/base.py b/aea/helpers/base.py index 749cfc1bdf..481673ab56 100644 --- a/aea/helpers/base.py +++ b/aea/helpers/base.py @@ -16,9 +16,7 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - """Miscellaneous helpers.""" - import builtins import contextlib import importlib.util @@ -35,6 +33,7 @@ from copy import copy from functools import wraps from pathlib import Path +from threading import RLock from typing import Any, Callable, Deque, Dict, List, Set, TypeVar, Union from dotenv import load_dotenv @@ -499,3 +498,69 @@ def reachable_nodes( queue.extendleft(successors) visited.add(current) return result + + +_NOT_FOUND = object() + + +# copied from python3.8 functools +class cached_property: # pragma: nocover + """Cached property from python3.8 functools.""" + + def __init__(self, func): + """Init cached property.""" + self.func = func + self.attrname = None + self.__doc__ = func.__doc__ + self.lock = RLock() + + def __set_name__(self, _, name): + """Set name.""" + if self.attrname is None: + self.attrname = name + elif name != self.attrname: + raise TypeError( + "Cannot assign the same cached_property to two different names " + f"({self.attrname!r} and {name!r})." + ) + + def __get__(self, instance, _=None): + """Get instance.""" + if instance is None: + return self + if self.attrname is None: + raise TypeError( + "Cannot use cached_property instance without calling __set_name__ on it." + ) + try: + cache = instance.__dict__ + except AttributeError: # not all objects have __dict__ (e.g. class defines slots) + msg = ( + f"No '__dict__' attribute on {type(instance).__name__!r} " + f"instance to cache {self.attrname!r} property." + ) + raise TypeError(msg) from None + val = cache.get(self.attrname, _NOT_FOUND) + if val is _NOT_FOUND: + with self.lock: + # check if another thread filled cache while we awaited lock + val = cache.get(self.attrname, _NOT_FOUND) + if val is _NOT_FOUND: + val = self.func(instance) + try: + cache[self.attrname] = val + except TypeError: + msg = ( + f"The '__dict__' attribute on {type(instance).__name__!r} instance " + f"does not support item assignment for caching {self.attrname!r} property." + ) + raise TypeError(msg) from None + return val + + +def ensure_dir(dir_path: str) -> None: + """Check if dir_path is a directory or create it.""" + if not os.path.exists(dir_path): + os.makedirs(dir_path) + else: + enforce(os.path.isdir(dir_path), f"{dir_path} is not a directory!") diff --git a/aea/helpers/file_io.py b/aea/helpers/file_io.py index 7c34a3116b..13f57aa396 100644 --- a/aea/helpers/file_io.py +++ b/aea/helpers/file_io.py @@ -20,6 +20,7 @@ """Read to and write from file with envelopes.""" +import codecs import logging from contextlib import contextmanager from logging import Logger @@ -66,7 +67,9 @@ def _decode(e: bytes, separator: bytes = SEPARATOR): # protobuf messages cannot be delimited as they can contain an arbitrary byte sequence; however # we know everything remaining constitutes the protobuf message. message = SEPARATOR.join(split[3:-1]) - # message = codecs.decode(message, "unicode-escape").encode("utf-8") # noqa: E800 + if b"\\x" in message: # pragma: nocover + # hack to account for manual usage of `echo` + message = codecs.decode(message, "unicode-escape").encode("utf-8") return Envelope(to=to, sender=sender, protocol_id=protocol_id, message=message) diff --git a/aea/helpers/profiling.py b/aea/helpers/profiling.py index f53ad63e5e..cee4959cd1 100644 --- a/aea/helpers/profiling.py +++ b/aea/helpers/profiling.py @@ -128,7 +128,7 @@ async def run(self) -> None: while True: await asyncio.sleep(self._period) self.output_profile_data() - except CancelledError: + except CancelledError: # pragma: nocover pass except Exception: # pragma: nocover _default_logger.exception("Exception in Profiling") diff --git a/aea/helpers/search/models.proto b/aea/helpers/search/models.proto new file mode 100644 index 0000000000..556f1c3feb --- /dev/null +++ b/aea/helpers/search/models.proto @@ -0,0 +1,150 @@ +syntax = "proto3"; + +package aea.helpers.search.models; + +message Query { + message Attribute { + enum Type { + DOUBLE = 0; + INT = 1; + BOOL = 2; + STRING = 3; + LOCATION = 4; + } + string name = 1; + Type type = 2; + bool required = 3; + string description = 4; + } + message DataModel { + string name = 1; + repeated Attribute attributes = 2; + string description = 3; + } + message Location { + double lon = 1; + double lat = 2; + } + message Value { + oneof value { + string string = 1; + double double = 2; + bool boolean = 3; + int64 integer = 4; + Location location = 5; + } + } + message KeyValue { + string key = 1; + Value value = 2; + } + message Instance { + DataModel model = 1; + repeated KeyValue values = 2; + } + message StringPair { + string first = 1; + string second = 2; + } + message IntPair { + int64 first = 1; + int64 second = 2; + } + message DoublePair { + double first = 1; + double second = 2; + } + message LocationPair { + Location first = 1; + Location second = 2; + } + message Range { + oneof pair { + StringPair string_pair = 1; + IntPair integer_pair = 2; + DoublePair double_pair = 3; + LocationPair location_pair = 4; + } + } + message Distance { + Location center = 1; + double distance = 2; + } + message Relation { + enum Operator { + EQ = 0; // = + LT = 1; // < + LTEQ = 2; // <= + GT = 3; // > + GTEQ = 4; // >= + NOTEQ = 5; // !=, <> + } + Operator operator = 1; + Value value = 2; + } + message Set { + message Values { + message Ints { + repeated int64 values = 1; + } + message Doubles { + repeated double values = 1; + } + message Strings { + repeated string values = 1; + } + message Bools { + repeated bool values = 1; + } + message Locations { + repeated Location values = 1; + } + oneof values { + Strings string = 1; + Doubles double = 2; + Bools boolean = 3; + Ints integer = 4; + Locations location = 5; + } + } + enum Operator { + IN = 0; + NOTIN = 1; + } + Operator operator = 1; + Values values = 2; + } + message ConstraintExpr { + message Or { + repeated ConstraintExpr expression = 1; + } + message And { + repeated ConstraintExpr expression = 1; + } + message Not { + ConstraintExpr expression = 1; + } + message Constraint { + string attribute_name = 1; + oneof constraint { + Set set_ = 2; + Range range_ = 3; + Relation relation = 4; + Distance distance = 5; + } + } + oneof expression { + Or or_ = 1; + And and_ = 2; + Not not_ = 3; + Constraint constraint = 4; + } + } + message Model { + repeated ConstraintExpr constraints = 1; + DataModel model = 2; + } +} + +// option optimize_for = LITE_RUNTIME; +option optimize_for = SPEED; \ No newline at end of file diff --git a/aea/helpers/search/models.py b/aea/helpers/search/models.py index 1e615d9474..1eed886e40 100644 --- a/aea/helpers/search/models.py +++ b/aea/helpers/search/models.py @@ -20,18 +20,67 @@ """Useful classes for the OEF search.""" import logging -import pickle # nosec from abc import ABC, abstractmethod from copy import deepcopy from enum import Enum from math import asin, cos, radians, sin, sqrt from typing import Any, Dict, List, Mapping, Optional, Tuple, Type, Union, cast +import aea.helpers.search.models_pb2 as models_pb2 from aea.exceptions import enforce _default_logger = logging.getLogger(__name__) +proto_value = { + "string": "string", + "double": "double", + "boolean": "boolean", + "integer": "integer", + "location": "location", +} + +proto_range_pairs = { + "string": "string_pair", + "integer": "integer_pair", + "double": "double_pair", + "location": "location_pair", +} + +proto_set_values = { + "string": "string", + "double": "double", + "boolean": "boolean", + "integer": "integer", + "location": "location", +} + +proto_constraint = { + "set": "set_", + "range": "range_", + "relation": "relation", + "distance": "distance", +} + +proto_expression = { + "or": "or_", + "and": "and_", + "not": "not_", + "constraint": "constraint", +} + +CONSTRAINT_CATEGORY_RELATION = "relation" +CONSTRAINT_CATEGORY_RANGE = "range" +CONSTRAINT_CATEGORY_SET = "set" +CONSTRAINT_CATEGORY_DISTANCE = "distance" + +CONSTRAINT_CATEGORIES = [ + CONSTRAINT_CATEGORY_RELATION, + CONSTRAINT_CATEGORY_RANGE, + CONSTRAINT_CATEGORY_SET, + CONSTRAINT_CATEGORY_DISTANCE, +] + class Location: """Data structure to represent locations (i.e. a pair of latitude and longitude).""" @@ -49,7 +98,7 @@ def __init__(self, latitude: float, longitude: float): @property def tuple(self) -> Tuple[float, float]: """Get the tuple representation of a location.""" - return (self.latitude, self.longitude) + return self.latitude, self.longitude def distance(self, other: "Location") -> float: """ @@ -72,6 +121,29 @@ def __str__(self): self.latitude, self.longitude ) + def encode(self) -> models_pb2.Query.Location: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + location_pb = models_pb2.Query.Location() # type: ignore + location_pb.lat = self.latitude + location_pb.lon = self.longitude + return location_pb + + @classmethod + def decode(cls, location_pb) -> "Location": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param location_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + latitude = location_pb.lat + longitude = location_pb.lon + return cls(latitude, longitude) + """ The allowable types that an Attribute can have @@ -94,6 +166,14 @@ class AttributeInconsistencyException(Exception): class Attribute: """Implements an attribute for an OEF data model.""" + _attribute_type_to_pb = { + bool: models_pb2.Query.Attribute.BOOL, # type: ignore + int: models_pb2.Query.Attribute.INT, # type: ignore + float: models_pb2.Query.Attribute.DOUBLE, # type: ignore + str: models_pb2.Query.Attribute.STRING, # type: ignore + Location: models_pb2.Query.Attribute.LOCATION, # type: ignore + } + def __init__( self, name: str, @@ -105,7 +185,7 @@ def __init__( Initialize an attribute. :param name: the name of the attribute. - :param type: the type of the attribute. + :param type_: the type of the attribute. :param is_required: whether the attribute is required by the data model. :param description: an (optional) human-readable description for the attribute. """ @@ -129,6 +209,36 @@ def __str__(self): self.name, self.type, self.is_required ) + def encode(self) -> models_pb2.Query.Attribute: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + attribute = models_pb2.Query.Attribute() # type: ignore + attribute.name = self.name + attribute.type = self._attribute_type_to_pb[self.type] + attribute.required = self.is_required + if self.description is not None: + attribute.description = self.description + return attribute + + @classmethod + def decode(cls, attribute_pb) -> "Attribute": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param attribute_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + _pb_to_attribute_type = {v: k for k, v in cls._attribute_type_to_pb.items()} + return cls( + attribute_pb.name, + _pb_to_attribute_type[attribute_pb.type], + attribute_pb.required, + attribute_pb.description if attribute_pb.description else None, + ) + class DataModel: """Implements an OEF data model.""" @@ -138,7 +248,7 @@ def __init__(self, name: str, attributes: List[Attribute], description: str = "" Initialize a data model. :param name: the name of the data model. - :param attributes: the attributes of the data model. + :param attributes: the attributes of the data model. """ self.name: str = name self.attributes = sorted( @@ -172,6 +282,32 @@ def __str__(self): self.name, {a.name: str(a) for a in self.attributes}, self.description ) + def encode(self) -> models_pb2.Query.DataModel: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + model = models_pb2.Query.DataModel() # type: ignore + model.name = self.name + model.attributes.extend([attr.encode() for attr in self.attributes]) + if self.description is not None: + model.description = self.description + return model + + @classmethod + def decode(cls, data_model_pb) -> "DataModel": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param data_model_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + name = data_model_pb.name + attributes = [Attribute.decode(attr_pb) for attr_pb in data_model_pb.attributes] + description = data_model_pb.description + return cls(name, attributes, description) + def generate_data_model( model_name: str, attribute_values: Mapping[str, ATTRIBUTE_TYPES] @@ -292,36 +428,118 @@ def __str__(self): self._values, self.data_model ) + @staticmethod + def _to_key_value_pb(key: str, value: ATTRIBUTE_TYPES) -> models_pb2.Query.KeyValue: # type: ignore + """ + From a (key, attribute value) pair to the associated Protobuf object. + + :param key: the key of the attribute. + :param value: the value of the attribute. + + :return: the associated Protobuf object. + """ + + kv = models_pb2.Query.KeyValue() # type: ignore + kv.key = key + if type(value) == bool: # pylint: disable=unidiomatic-typecheck + kv.value.boolean = value + elif type(value) == int: # pylint: disable=unidiomatic-typecheck + kv.value.integer = value + elif type(value) == float: # pylint: disable=unidiomatic-typecheck + kv.value.double = value + elif type(value) == str: # pylint: disable=unidiomatic-typecheck + kv.value.string = value + elif type(value) == Location: # pylint: disable=unidiomatic-typecheck + kv.value.location.CopyFrom(value.encode()) # type: ignore + + return kv + + def _encode(self) -> models_pb2.Query.Instance: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + instance = models_pb2.Query.Instance() # type: ignore + instance.model.CopyFrom(self.data_model.encode()) + instance.values.extend( + [self._to_key_value_pb(key, value) for key, value in self.values.items()] + ) + return instance + @classmethod - def encode( - cls, description_protobuf_object, description_object: "Description" - ) -> None: + def encode(cls, description_pb, description: "Description") -> None: """ Encode an instance of this class into the protocol buffer object. - The protocol buffer object in the description_protobuf_object argument must be matched with the instance of this class in the 'description_object' argument. + The protocol buffer object in the description_protobuf_object argument must be matched + with the instance of this class in the 'description_object' argument. + + :param description_pb: the protocol buffer object whose type corresponds with this class. + :param description: an instance of this class to be encoded in the protocol buffer object. - :param description_protobuf_object: the protocol buffer object whose type corresponds with this class. - :param description_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - description_from_message_bytes = pickle.dumps(description_object) # nosec - description_protobuf_object.description = description_from_message_bytes + description_bytes_pb = description._encode() # pylint: disable=protected-access + description_bytes_bytes = description_bytes_pb.SerializeToString() + description_pb.description_bytes = description_bytes_bytes + + @staticmethod + def _extract_value(value: models_pb2.Query.Value) -> ATTRIBUTE_TYPES: # type: ignore + """ + From a Protobuf query value object to attribute type. + + :param value: an instance of models_pb2.Query.Value. + :return: the associated attribute type. + """ + value_case = value.WhichOneof("value") + + if value_case == proto_value["string"]: + result = value.string + elif value_case == proto_value["boolean"]: + result = bool(value.boolean) + elif value_case == proto_value["integer"]: + result = value.integer + elif value_case == proto_value["double"]: + result = value.double + elif value_case == proto_value["location"]: + result = Location.decode(value.location) + else: + raise ValueError( # pragma: nocover + f"Incorrect value. Expected either of {list(proto_value.values())}. Found {value_case}." + ) + + return result @classmethod - def decode(cls, description_protobuf_object) -> "Description": + def _decode(cls, description_pb) -> "Description": """ Decode a protocol buffer object that corresponds with this class into an instance of this class. - A new instance of this class must be created that matches the protocol buffer object in the 'description_protobuf_object' argument. + :param description_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + model = DataModel.decode(description_pb.model) + values = { + attr.key: cls._extract_value(attr.value) for attr in description_pb.values + } + return cls(values, model) - :param description_protobuf_object: the protocol buffer object whose type corresponds with this class. + @classmethod + def decode(cls, description_pb) -> "Description": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + A new instance of this class must be created that matches the protocol + buffer object in the 'description_protobuf_object' argument. + + :param description_pb: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'description_protobuf_object' argument. """ - service_description = pickle.loads( # nosec - description_protobuf_object.description - ) - return service_description + description_bytes_pb = models_pb2.Query.Instance() # type: ignore + description_bytes_pb.ParseFromString(description_pb.description_bytes) + description = cls._decode(description_bytes_pb) + return description class ConstraintTypes(Enum): @@ -358,8 +576,8 @@ class ConstraintType: >>> not_equal_london = ConstraintType("!=", "London") >>> less_than_pi = ConstraintType("<", 3.14) >>> within_range = ConstraintType("within", (-10.0, 10.0)) - >>> in_a_set = ConstraintType("in", [1, 2, 3]) - >>> not_in_a_set = ConstraintType("not_in", {"C", "Java", "Python"}) + >>> in_a_set = ConstraintType("in", (1, 2, 3)) + >>> not_in_a_set = ConstraintType("not_in", ("C", "Java", "Python")) """ @@ -367,7 +585,7 @@ def __init__(self, type_: Union[ConstraintTypes, str], value: Any): """ Initialize a constraint type. - :param type: the type of the constraint. + :param type_: the type of the constraint. | Either an instance of the ConstraintTypes enum, | or a string representation associated with the type. :param value: the value that defines the constraint. @@ -416,9 +634,10 @@ def check_validity(self): f"Expected one of type in (int, float, str), got {self.value}", ) elif self.type == ConstraintTypes.WITHIN: + allowed_sub_types = (int, float, str) enforce( - isinstance(self.value, (list, tuple)), - f"Expected one of type in (list, tuple), got {self.value}", + isinstance(self.value, tuple), + f"Expected tuple, got {type(self.value)}", ) enforce( len(self.value) == 2, f"Expected length=2, got {len(self.value)}" @@ -429,10 +648,18 @@ def check_validity(self): enforce( isinstance(self.value[1], type(self.value[0])), "Invalid types." ) + enforce( + isinstance(self.value[0], allowed_sub_types), + f"Invalid type for first element. Expected either of {allowed_sub_types}. Found {type(self.value[0])}.", + ) + enforce( + isinstance(self.value[1], allowed_sub_types), + f"Invalid type for second element. Expected either of {allowed_sub_types}. Found {type(self.value[1])}.", + ) elif self.type == ConstraintTypes.IN: enforce( - isinstance(self.value, (list, tuple, set)), - f"Expected one of type in (list, tuple, set), got {self.value}", + isinstance(self.value, tuple), + f"Expected tuple, got {type(self.value)}", ) if len(self.value) > 0: _type = type(next(iter(self.value))) @@ -442,8 +669,8 @@ def check_validity(self): ) elif self.type == ConstraintTypes.NOT_IN: enforce( - isinstance(self.value, (list, tuple, set)), - f"Expected one of type in (list, tuple, set), got {self.value}", + isinstance(self.value, tuple), + f"Expected tuple, got {type(self.value)}", ) if len(self.value) > 0: _type = type(next(iter(self.value))) @@ -453,8 +680,8 @@ def check_validity(self): ) elif self.type == ConstraintTypes.DISTANCE: enforce( - isinstance(self.value, (list, tuple)), - f"Expected one of type in (list, tuple), got {self.value}", + isinstance(self.value, tuple), + f"Expected tuple, got {type(self.value)}", ) enforce( len(self.value) == 2, f"Expected length=2, got {len(self.value)}" @@ -529,17 +756,17 @@ def check(self, value: ATTRIBUTE_TYPES) -> bool: :raises ValueError: if the constraint type is not recognized. """ if self.type == ConstraintTypes.EQUAL: - return self.value == value + return value == self.value if self.type == ConstraintTypes.NOT_EQUAL: - return self.value != value + return value != self.value if self.type == ConstraintTypes.LESS_THAN: - return self.value < value + return value < self.value if self.type == ConstraintTypes.LESS_THAN_EQ: - return self.value <= value + return value <= self.value if self.type == ConstraintTypes.GREATER_THAN: - return self.value > value + return value > self.value if self.type == ConstraintTypes.GREATER_THAN_EQ: - return self.value >= value + return value >= self.value if self.type == ConstraintTypes.WITHIN: low = self.value[0] high = self.value[1] @@ -568,6 +795,221 @@ def __str__(self): """Get the string representation of the constraint type.""" return "ConstraintType(value={},type={})".format(self.value, self.type) + def encode(self): + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + encoding = None + + if ( + self.type == ConstraintTypes.EQUAL + or self.type == ConstraintTypes.NOT_EQUAL + or self.type == ConstraintTypes.LESS_THAN + or self.type == ConstraintTypes.LESS_THAN_EQ + or self.type == ConstraintTypes.GREATER_THAN + or self.type == ConstraintTypes.GREATER_THAN_EQ + ): + relation = models_pb2.Query.Relation() + + if self.type == ConstraintTypes.EQUAL: + relation.operator = models_pb2.Query.Relation.EQ + elif self.type == ConstraintTypes.NOT_EQUAL: + relation.operator = models_pb2.Query.Relation.NOTEQ + elif self.type == ConstraintTypes.LESS_THAN: + relation.operator = models_pb2.Query.Relation.LT + elif self.type == ConstraintTypes.LESS_THAN_EQ: + relation.operator = models_pb2.Query.Relation.LTEQ + elif self.type == ConstraintTypes.GREATER_THAN: + relation.operator = models_pb2.Query.Relation.GT + elif self.type == ConstraintTypes.GREATER_THAN_EQ: + relation.operator = models_pb2.Query.Relation.GTEQ + + query_value = models_pb2.Query.Value() + + if isinstance(self.value, bool): + query_value.boolean = self.value + elif isinstance(self.value, int): + query_value.integer = self.value + elif isinstance(self.value, float): + query_value.double = self.value + elif isinstance(self.value, str): + query_value.string = self.value + relation.value.CopyFrom(query_value) + + encoding = relation + + elif self.type == ConstraintTypes.WITHIN: + range_ = models_pb2.Query.Range() + + if type(self.value[0]) == str: # pylint: disable=unidiomatic-typecheck + values = models_pb2.Query.StringPair() + values.first = self.value[0] + values.second = self.value[1] + range_.string_pair.CopyFrom(values) + elif type(self.value[0]) == int: # pylint: disable=unidiomatic-typecheck + values = models_pb2.Query.IntPair() + values.first = self.value[0] + values.second = self.value[1] + range_.integer_pair.CopyFrom(values) + elif type(self.value[0]) == float: # pylint: disable=unidiomatic-typecheck + values = models_pb2.Query.DoublePair() + values.first = self.value[0] + values.second = self.value[1] + range_.double_pair.CopyFrom(values) + encoding = range_ + + elif self.type == ConstraintTypes.IN or self.type == ConstraintTypes.NOT_IN: + set_ = models_pb2.Query.Set() + + if self.type == ConstraintTypes.IN: + set_.operator = models_pb2.Query.Set.IN + elif self.type == ConstraintTypes.NOT_IN: + set_.operator = models_pb2.Query.Set.NOTIN + + value_type = type(self.value[0]) if len(self.value) > 0 else str + + if value_type == str: + values = models_pb2.Query.Set.Values.Strings() + values.values.extend(self.value) + set_.values.string.CopyFrom(values) + elif value_type == bool: + values = models_pb2.Query.Set.Values.Bools() + values.values.extend(self.value) + set_.values.boolean.CopyFrom(values) + elif value_type == int: + values = models_pb2.Query.Set.Values.Ints() + values.values.extend(self.value) + set_.values.integer.CopyFrom(values) + elif value_type == float: + values = models_pb2.Query.Set.Values.Doubles() + values.values.extend(self.value) + set_.values.double.CopyFrom(values) + elif value_type == Location: + values = models_pb2.Query.Set.Values.Locations() + values.values.extend([value.encode() for value in self.value]) + set_.values.location.CopyFrom(values) + + encoding = set_ + + elif self.type == ConstraintTypes.DISTANCE: + distance_pb = models_pb2.Query.Distance() + distance_pb.distance = self.value[1] + distance_pb.center.CopyFrom(self.value[0].encode()) + + encoding = distance_pb + + return encoding + + @classmethod + def decode(cls, constraint_type_pb, category: str) -> "ConstraintType": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param constraint_type_pb: the protocol buffer object corresponding with this class. + :param category: the category of the constraint ('relation', 'set', 'range', 'distance). + + :return: A new instance of this class matching the protocol buffer object + """ + decoding: ConstraintType + + relation_type_from_pb = { + models_pb2.Query.Relation.Operator.GTEQ: ConstraintTypes.GREATER_THAN_EQ, # type: ignore + models_pb2.Query.Relation.Operator.GT: ConstraintTypes.GREATER_THAN, # type: ignore + models_pb2.Query.Relation.Operator.LTEQ: ConstraintTypes.LESS_THAN_EQ, # type: ignore + models_pb2.Query.Relation.Operator.LT: ConstraintTypes.LESS_THAN, # type: ignore + models_pb2.Query.Relation.Operator.NOTEQ: ConstraintTypes.NOT_EQUAL, # type: ignore + models_pb2.Query.Relation.Operator.EQ: ConstraintTypes.EQUAL, # type: ignore + } + set_type_from_pb = { + models_pb2.Query.Set.Operator.IN: ConstraintTypes.IN, # type: ignore + models_pb2.Query.Set.Operator.NOTIN: ConstraintTypes.NOT_IN, # type: ignore + } + + if category == CONSTRAINT_CATEGORY_RELATION: + relation_enum = relation_type_from_pb[constraint_type_pb.operator] + value_case = constraint_type_pb.value.WhichOneof("value") + if value_case == proto_value["string"]: + decoding = ConstraintType( + relation_enum, constraint_type_pb.value.string + ) + elif value_case == proto_value["boolean"]: + decoding = ConstraintType( + relation_enum, constraint_type_pb.value.boolean + ) + elif value_case == proto_value["integer"]: + decoding = ConstraintType( + relation_enum, constraint_type_pb.value.integer + ) + elif value_case == proto_value["double"]: + decoding = ConstraintType( + relation_enum, constraint_type_pb.value.double + ) + elif category == CONSTRAINT_CATEGORY_RANGE: + range_enum = ConstraintTypes.WITHIN + range_case = constraint_type_pb.WhichOneof("pair") + if range_case == proto_range_pairs["string"]: + decoding = ConstraintType( + range_enum, + ( + constraint_type_pb.string_pair.first, + constraint_type_pb.string_pair.second, + ), + ) + elif range_case == proto_range_pairs["integer"]: + decoding = ConstraintType( + range_enum, + ( + constraint_type_pb.integer_pair.first, + constraint_type_pb.integer_pair.second, + ), + ) + elif range_case == proto_range_pairs["double"]: + decoding = ConstraintType( + range_enum, + ( + constraint_type_pb.double_pair.first, + constraint_type_pb.double_pair.second, + ), + ) + elif category == CONSTRAINT_CATEGORY_SET: + set_enum = set_type_from_pb[constraint_type_pb.operator] + value_case = constraint_type_pb.values.WhichOneof("values") + if value_case == proto_set_values["string"]: + decoding = ConstraintType( + set_enum, tuple(constraint_type_pb.values.string.values), + ) + elif value_case == proto_set_values["boolean"]: + decoding = ConstraintType( + set_enum, tuple(constraint_type_pb.values.boolean.values), + ) + elif value_case == proto_set_values["integer"]: + decoding = ConstraintType( + set_enum, tuple(constraint_type_pb.values.integer.values), + ) + elif value_case == proto_set_values["double"]: + decoding = ConstraintType( + set_enum, tuple(constraint_type_pb.values.double.values), + ) + elif value_case == proto_set_values["location"]: + locations = [ + Location.decode(loc) + for loc in constraint_type_pb.values.location.values + ] + location_tuple = tuple(locations) + decoding = ConstraintType(set_enum, location_tuple) + elif category == CONSTRAINT_CATEGORY_DISTANCE: + distance_enum = ConstraintTypes.DISTANCE + center = Location.decode(constraint_type_pb.center) + distance = constraint_type_pb.distance + decoding = ConstraintType(distance_enum, (center, distance)) + else: + raise ValueError( + f"Incorrect category. Expected either of {CONSTRAINT_CATEGORIES}. Found {category}." + ) + return decoding + class ConstraintExpr(ABC): """Implementation of the constraint language to query the OEF node.""" @@ -602,6 +1044,57 @@ def check_validity(self) -> None: # pylint: disable=no-self-use # pragma: noco """ return None + @staticmethod + def _encode(expression) -> models_pb2.Query.ConstraintExpr: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + constraint_expression_pb = models_pb2.Query.ConstraintExpr() # type: ignore + expression_pb = expression.encode() + if isinstance(expression, And): + constraint_expression_pb.and_.CopyFrom(expression_pb) + elif isinstance(expression, Or): + constraint_expression_pb.or_.CopyFrom(expression_pb) + elif isinstance(expression, Not): + constraint_expression_pb.not_.CopyFrom(expression_pb) + elif isinstance(expression, Constraint): + constraint_expression_pb.constraint.CopyFrom(expression_pb) + else: + raise ValueError( + f"Invalid expression type. Expected either of 'And', 'Or', 'Not', 'Constraint'. Found {type(expression)}." + ) + + return constraint_expression_pb + + @staticmethod + def _decode(constraint_expression_pb) -> "ConstraintExpr": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param constraint_expression_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + expression = constraint_expression_pb.WhichOneof("expression") + + result: Optional[Union[And, Or, Not, Constraint]] = None + + if expression == proto_expression["and"]: + result = And.decode(constraint_expression_pb.and_) + elif expression == proto_expression["or"]: + result = Or.decode(constraint_expression_pb.or_) + elif expression == proto_expression["not"]: + result = Not.decode(constraint_expression_pb.not_) + elif expression == proto_expression["constraint"]: + result = Constraint.decode(constraint_expression_pb.constraint) + else: # pragma: nocover + raise ValueError( + f"Incorrect argument. Expected either of {list(proto_expression.keys())}. Found {expression}." + ) + + return result + class And(ConstraintExpr): """Implementation of the 'And' constraint expression.""" @@ -613,6 +1106,7 @@ def __init__(self, constraints: List[ConstraintExpr]): :param constraints: the list of constraints expression (in conjunction). """ self.constraints = constraints + self.check_validity() def check(self, description: Description) -> bool: """ @@ -621,7 +1115,7 @@ def check(self, description: Description) -> bool: :param description: the description to check. :return: True if the description satisfy the constraint expression, False otherwise. """ - return all(expr.check(description) for expr in self.constraints) + return all(expression.check(description) for expression in self.constraints) def is_valid(self, data_model: DataModel) -> bool: """ @@ -651,6 +1145,30 @@ def __eq__(self, other): # pragma: nocover """Compare with another object.""" return isinstance(other, And) and self.constraints == other.constraints + def encode(self) -> models_pb2.Query.ConstraintExpr.And: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + and_pb = models_pb2.Query.ConstraintExpr.And() # type: ignore + constraint_expression_pbs = [ + ConstraintExpr._encode(constraint) for constraint in self.constraints + ] + and_pb.expression.extend(constraint_expression_pbs) + return and_pb + + @classmethod + def decode(cls, and_pb) -> "And": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param and_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + expression = [cls._decode(c) for c in and_pb.expression] + return cls(expression) + class Or(ConstraintExpr): """Implementation of the 'Or' constraint expression.""" @@ -662,6 +1180,7 @@ def __init__(self, constraints: List[ConstraintExpr]): :param constraints: the list of constraints expressions (in disjunction). """ self.constraints = constraints + self.check_validity() def check(self, description: Description) -> bool: """ @@ -670,7 +1189,7 @@ def check(self, description: Description) -> bool: :param description: the description to check. :return: True if the description satisfy the constraint expression, False otherwise. """ - return any(expr.check(description) for expr in self.constraints) + return any(expression.check(description) for expression in self.constraints) def is_valid(self, data_model: DataModel) -> bool: """ @@ -700,6 +1219,30 @@ def __eq__(self, other): # pragma: nocover """Compare with another object.""" return isinstance(other, Or) and self.constraints == other.constraints + def encode(self) -> models_pb2.Query.ConstraintExpr.Or: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + or_pb = models_pb2.Query.ConstraintExpr.Or() # type: ignore + constraint_expression_pbs = [ + ConstraintExpr._encode(constraint) for constraint in self.constraints + ] + or_pb.expression.extend(constraint_expression_pbs) + return or_pb + + @classmethod + def decode(cls, or_pb) -> "Or": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param or_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + expression = [ConstraintExpr._decode(c) for c in or_pb.expression] + return cls(expression) + class Not(ConstraintExpr): """Implementation of the 'Not' constraint expression.""" @@ -734,6 +1277,28 @@ def __eq__(self, other): # pragma: nocover """Compare with another object.""" return isinstance(other, Not) and self.constraint == other.constraint + def encode(self) -> models_pb2.Query.ConstraintExpr.Not: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + not_pb = models_pb2.Query.ConstraintExpr.Not() # type: ignore + constraint_expression_pb = ConstraintExpr._encode(self.constraint) + not_pb.expression.CopyFrom(constraint_expression_pb) + return not_pb + + @classmethod + def decode(cls, not_pb) -> "Not": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param not_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + expression = ConstraintExpr._decode(not_pb.expression) + return cls(expression) + class Constraint(ConstraintExpr): """The atomic component of a constraint expression.""" @@ -761,7 +1326,7 @@ def check(self, description: Description) -> bool: >>> attr_genre = Attribute("genre", str, True, "The genre of the book.") >>> c1 = Constraint("author", ConstraintType("==", "Stephen King")) >>> c2 = Constraint("year", ConstraintType(">", 1990)) - >>> c3 = Constraint("genre", ConstraintType("in", {"horror", "science_fiction"})) + >>> c3 = Constraint("genre", ConstraintType("in", ("horror", "science_fiction"))) >>> book_1 = Description({"author": "Stephen King", "year": 1991, "genre": "horror"}) >>> book_2 = Description({"author": "George Orwell", "year": 1948, "genre": "horror"}) @@ -840,6 +1405,63 @@ def __str__(self): self.attribute_name, self.constraint_type ) + def encode(self) -> models_pb2.Query.ConstraintExpr.Constraint: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + constraint = models_pb2.Query.ConstraintExpr.Constraint() # type: ignore + constraint.attribute_name = self.attribute_name + + if ( + self.constraint_type.type == ConstraintTypes.EQUAL + or self.constraint_type.type == ConstraintTypes.NOT_EQUAL + or self.constraint_type.type == ConstraintTypes.LESS_THAN + or self.constraint_type.type == ConstraintTypes.LESS_THAN_EQ + or self.constraint_type.type == ConstraintTypes.GREATER_THAN + or self.constraint_type.type == ConstraintTypes.GREATER_THAN_EQ + ): + constraint.relation.CopyFrom(self.constraint_type.encode()) + elif self.constraint_type.type == ConstraintTypes.WITHIN: + constraint.range_.CopyFrom(self.constraint_type.encode()) + elif ( + self.constraint_type.type == ConstraintTypes.IN + or self.constraint_type.type == ConstraintTypes.NOT_IN + ): + constraint.set_.CopyFrom(self.constraint_type.encode()) + elif self.constraint_type.type == ConstraintTypes.DISTANCE: + constraint.distance.CopyFrom(self.constraint_type.encode()) + else: # pragma: nocover + raise ValueError( + f"Incorrect constraint type. Expected a ConstraintTypes. Found {self.constraint_type.type}." + ) + return constraint + + @classmethod + def decode(cls, constraint_pb) -> "Constraint": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param constraint_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + constraint_case = constraint_pb.WhichOneof("constraint") + if constraint_case == proto_constraint["relation"]: + constraint_type = ConstraintType.decode(constraint_pb.relation, "relation") + elif constraint_case == proto_constraint["set"]: + constraint_type = ConstraintType.decode(constraint_pb.set_, "set") + elif constraint_case == proto_constraint["range"]: + constraint_type = ConstraintType.decode(constraint_pb.range_, "range") + elif constraint_case == proto_constraint["distance"]: + constraint_type = ConstraintType.decode(constraint_pb.distance, "distance") + else: + raise ValueError( # pragma: nocover + f"Incorrect argument. Expected either of ['relation', 'set_', 'range_', 'distance']. Found {constraint_case}." + ) + + return cls(constraint_pb.attribute_name, constraint_type) + class Query: """This class lets you build a query for the OEF.""" @@ -918,31 +1540,70 @@ def __str__(self): [str(c) for c in self.constraints], self.model ) + def _encode(self) -> models_pb2.Query.Model: # type: ignore + """ + Encode an instance of this class into a protocol buffer object. + + :return: the matching protocol buffer object + """ + query = models_pb2.Query.Model() # type: ignore + constraint_expression_pbs = [ + ConstraintExpr._encode(constraint) # pylint: disable=protected-access + for constraint in self.constraints + ] + query.constraints.extend(constraint_expression_pbs) + + if self.model is not None: + query.model.CopyFrom(self.model.encode()) + return query + @classmethod - def encode(cls, query_protobuf_object, query_object: "Query") -> None: + def encode(cls, query_pb, query: "Query") -> None: """ Encode an instance of this class into the protocol buffer object. - The protocol buffer object in the query_protobuf_object argument must be matched with the instance of this class in the 'query_object' argument. + The protocol buffer object in the query_protobuf_object argument must be matched + with the instance of this class in the 'query_object' argument. + + :param query_pb: the protocol buffer object wrapping an object that corresponds with this class. + :param query: an instance of this class to be encoded in the protocol buffer object. - :param query_protobuf_object: the protocol buffer object whose type corresponds with this class. - :param query_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - query_bytes = pickle.dumps(query_object) # nosec - query_protobuf_object.query_bytes = query_bytes + query_bytes_pb = query._encode() # pylint: disable=protected-access + query_bytes_bytes = query_bytes_pb.SerializeToString() + query_pb.query_bytes = query_bytes_bytes + + @classmethod + def _decode(cls, query_pb) -> "Query": + """ + Decode a protocol buffer object that corresponds with this class into an instance of this class. + + :param query_pb: the protocol buffer object corresponding with this class. + :return: A new instance of this class matching the protocol buffer object + """ + constraints = [ + ConstraintExpr._decode(c) # pylint: disable=protected-access + for c in query_pb.constraints + ] + data_model = DataModel.decode(query_pb.model) + + return cls(constraints, data_model if query_pb.HasField("model") else None,) @classmethod - def decode(cls, query_protobuf_object) -> "Query": + def decode(cls, query_pb) -> "Query": """ Decode a protocol buffer object that corresponds with this class into an instance of this class. - A new instance of this class must be created that matches the protocol buffer object in the 'query_protobuf_object' argument. + A new instance of this class must be created that matches the protocol + buffer object in the 'query_protobuf_object' argument. - :param query_protobuf_object: the protocol buffer object whose type corresponds with this class. + :param query_pb: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'query_protobuf_object' argument. """ - query = pickle.loads(query_protobuf_object.query_bytes) # nosec + query_bytes_pb = models_pb2.Query.Model() # type: ignore + query_bytes_pb.ParseFromString(query_pb.query_bytes) + query = cls._decode(query_bytes_pb) return query @@ -957,12 +1618,11 @@ def haversine(lat1: float, lon1: float, lat2: float, lon2: float) -> float: :return: the Haversine distance. """ lat1, lon1, lat2, lon2, = map(radians, [lat1, lon1, lat2, lon2]) - # average earth radius - R = 6372.8 + earth_radius = 6372.8 # average earth radius dlat = lat2 - lat1 dlon = lon2 - lon1 sin_lat_squared = sin(dlat * 0.5) * sin(dlat * 0.5) sin_lon_squared = sin(dlon * 0.5) * sin(dlon * 0.5) computation = asin(sqrt(sin_lat_squared + sin_lon_squared * cos(lat1) * cos(lat2))) - d = 2 * R * computation - return d + distance = 2 * earth_radius * computation + return distance diff --git a/aea/helpers/search/models_pb2.py b/aea/helpers/search/models_pb2.py new file mode 100644 index 0000000000..c66e9aadf1 --- /dev/null +++ b/aea/helpers/search/models_pb2.py @@ -0,0 +1,2278 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: models.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="models.proto", + package="aea.helpers.search.models", + syntax="proto3", + serialized_options=b"H\001", + serialized_pb=b'\n\x0cmodels.proto\x12\x19\x61\x65\x61.helpers.search.models"\xc1\x19\n\x05Query\x1a\xc0\x01\n\tAttribute\x12\x0c\n\x04name\x18\x01 \x01(\t\x12=\n\x04type\x18\x02 \x01(\x0e\x32/.aea.helpers.search.models.Query.Attribute.Type\x12\x10\n\x08required\x18\x03 \x01(\x08\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t"?\n\x04Type\x12\n\n\x06\x44OUBLE\x10\x00\x12\x07\n\x03INT\x10\x01\x12\x08\n\x04\x42OOL\x10\x02\x12\n\n\x06STRING\x10\x03\x12\x0c\n\x08LOCATION\x10\x04\x1an\n\tDataModel\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\nattributes\x18\x02 \x03(\x0b\x32*.aea.helpers.search.models.Query.Attribute\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x1a$\n\x08Location\x12\x0b\n\x03lon\x18\x01 \x01(\x01\x12\x0b\n\x03lat\x18\x02 \x01(\x01\x1a\x99\x01\n\x05Value\x12\x10\n\x06string\x18\x01 \x01(\tH\x00\x12\x10\n\x06\x64ouble\x18\x02 \x01(\x01H\x00\x12\x11\n\x07\x62oolean\x18\x03 \x01(\x08H\x00\x12\x11\n\x07integer\x18\x04 \x01(\x03H\x00\x12=\n\x08location\x18\x05 \x01(\x0b\x32).aea.helpers.search.models.Query.LocationH\x00\x42\x07\n\x05value\x1aN\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.aea.helpers.search.models.Query.Value\x1a\x80\x01\n\x08Instance\x12\x39\n\x05model\x18\x01 \x01(\x0b\x32*.aea.helpers.search.models.Query.DataModel\x12\x39\n\x06values\x18\x02 \x03(\x0b\x32).aea.helpers.search.models.Query.KeyValue\x1a+\n\nStringPair\x12\r\n\x05\x66irst\x18\x01 \x01(\t\x12\x0e\n\x06second\x18\x02 \x01(\t\x1a(\n\x07IntPair\x12\r\n\x05\x66irst\x18\x01 \x01(\x03\x12\x0e\n\x06second\x18\x02 \x01(\x03\x1a+\n\nDoublePair\x12\r\n\x05\x66irst\x18\x01 \x01(\x01\x12\x0e\n\x06second\x18\x02 \x01(\x01\x1a\x83\x01\n\x0cLocationPair\x12\x38\n\x05\x66irst\x18\x01 \x01(\x0b\x32).aea.helpers.search.models.Query.Location\x12\x39\n\x06second\x18\x02 \x01(\x0b\x32).aea.helpers.search.models.Query.Location\x1a\xa1\x02\n\x05Range\x12\x42\n\x0bstring_pair\x18\x01 \x01(\x0b\x32+.aea.helpers.search.models.Query.StringPairH\x00\x12@\n\x0cinteger_pair\x18\x02 \x01(\x0b\x32(.aea.helpers.search.models.Query.IntPairH\x00\x12\x42\n\x0b\x64ouble_pair\x18\x03 \x01(\x0b\x32+.aea.helpers.search.models.Query.DoublePairH\x00\x12\x46\n\rlocation_pair\x18\x04 \x01(\x0b\x32-.aea.helpers.search.models.Query.LocationPairH\x00\x42\x06\n\x04pair\x1aW\n\x08\x44istance\x12\x39\n\x06\x63\x65nter\x18\x01 \x01(\x0b\x32).aea.helpers.search.models.Query.Location\x12\x10\n\x08\x64istance\x18\x02 \x01(\x01\x1a\xca\x01\n\x08Relation\x12\x44\n\x08operator\x18\x01 \x01(\x0e\x32\x32.aea.helpers.search.models.Query.Relation.Operator\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.aea.helpers.search.models.Query.Value"A\n\x08Operator\x12\x06\n\x02\x45Q\x10\x00\x12\x06\n\x02LT\x10\x01\x12\x08\n\x04LTEQ\x10\x02\x12\x06\n\x02GT\x10\x03\x12\x08\n\x04GTEQ\x10\x04\x12\t\n\x05NOTEQ\x10\x05\x1a\xca\x05\n\x03Set\x12?\n\x08operator\x18\x01 \x01(\x0e\x32-.aea.helpers.search.models.Query.Set.Operator\x12;\n\x06values\x18\x02 \x01(\x0b\x32+.aea.helpers.search.models.Query.Set.Values\x1a\xa5\x04\n\x06Values\x12\x45\n\x06string\x18\x01 \x01(\x0b\x32\x33.aea.helpers.search.models.Query.Set.Values.StringsH\x00\x12\x45\n\x06\x64ouble\x18\x02 \x01(\x0b\x32\x33.aea.helpers.search.models.Query.Set.Values.DoublesH\x00\x12\x44\n\x07\x62oolean\x18\x03 \x01(\x0b\x32\x31.aea.helpers.search.models.Query.Set.Values.BoolsH\x00\x12\x43\n\x07integer\x18\x04 \x01(\x0b\x32\x30.aea.helpers.search.models.Query.Set.Values.IntsH\x00\x12I\n\x08location\x18\x05 \x01(\x0b\x32\x35.aea.helpers.search.models.Query.Set.Values.LocationsH\x00\x1a\x16\n\x04Ints\x12\x0e\n\x06values\x18\x01 \x03(\x03\x1a\x19\n\x07\x44oubles\x12\x0e\n\x06values\x18\x01 \x03(\x01\x1a\x19\n\x07Strings\x12\x0e\n\x06values\x18\x01 \x03(\t\x1a\x17\n\x05\x42ools\x12\x0e\n\x06values\x18\x01 \x03(\x08\x1a\x46\n\tLocations\x12\x39\n\x06values\x18\x01 \x03(\x0b\x32).aea.helpers.search.models.Query.LocationB\x08\n\x06values"\x1d\n\x08Operator\x12\x06\n\x02IN\x10\x00\x12\t\n\x05NOTIN\x10\x01\x1a\xc3\x06\n\x0e\x43onstraintExpr\x12\x41\n\x03or_\x18\x01 \x01(\x0b\x32\x32.aea.helpers.search.models.Query.ConstraintExpr.OrH\x00\x12\x43\n\x04\x61nd_\x18\x02 \x01(\x0b\x32\x33.aea.helpers.search.models.Query.ConstraintExpr.AndH\x00\x12\x43\n\x04not_\x18\x03 \x01(\x0b\x32\x33.aea.helpers.search.models.Query.ConstraintExpr.NotH\x00\x12P\n\nconstraint\x18\x04 \x01(\x0b\x32:.aea.helpers.search.models.Query.ConstraintExpr.ConstraintH\x00\x1aI\n\x02Or\x12\x43\n\nexpression\x18\x01 \x03(\x0b\x32/.aea.helpers.search.models.Query.ConstraintExpr\x1aJ\n\x03\x41nd\x12\x43\n\nexpression\x18\x01 \x03(\x0b\x32/.aea.helpers.search.models.Query.ConstraintExpr\x1aJ\n\x03Not\x12\x43\n\nexpression\x18\x01 \x01(\x0b\x32/.aea.helpers.search.models.Query.ConstraintExpr\x1a\xa0\x02\n\nConstraint\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x34\n\x04set_\x18\x02 \x01(\x0b\x32$.aea.helpers.search.models.Query.SetH\x00\x12\x38\n\x06range_\x18\x03 \x01(\x0b\x32&.aea.helpers.search.models.Query.RangeH\x00\x12=\n\x08relation\x18\x04 \x01(\x0b\x32).aea.helpers.search.models.Query.RelationH\x00\x12=\n\x08\x64istance\x18\x05 \x01(\x0b\x32).aea.helpers.search.models.Query.DistanceH\x00\x42\x0c\n\nconstraintB\x0c\n\nexpression\x1a\x88\x01\n\x05Model\x12\x44\n\x0b\x63onstraints\x18\x01 \x03(\x0b\x32/.aea.helpers.search.models.Query.ConstraintExpr\x12\x39\n\x05model\x18\x02 \x01(\x0b\x32*.aea.helpers.search.models.Query.DataModelB\x02H\x01\x62\x06proto3', +) + + +_QUERY_ATTRIBUTE_TYPE = _descriptor.EnumDescriptor( + name="Type", + full_name="aea.helpers.search.models.Query.Attribute.Type", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="DOUBLE", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="INT", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="BOOL", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="STRING", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="LOCATION", index=4, number=4, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=183, + serialized_end=246, +) +_sym_db.RegisterEnumDescriptor(_QUERY_ATTRIBUTE_TYPE) + +_QUERY_RELATION_OPERATOR = _descriptor.EnumDescriptor( + name="Operator", + full_name="aea.helpers.search.models.Query.Relation.Operator", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="EQ", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="LT", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="LTEQ", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="GT", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="GTEQ", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="NOTEQ", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=1550, + serialized_end=1615, +) +_sym_db.RegisterEnumDescriptor(_QUERY_RELATION_OPERATOR) + +_QUERY_SET_OPERATOR = _descriptor.EnumDescriptor( + name="Operator", + full_name="aea.helpers.search.models.Query.Set.Operator", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="IN", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="NOTIN", index=1, number=1, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2303, + serialized_end=2332, +) +_sym_db.RegisterEnumDescriptor(_QUERY_SET_OPERATOR) + + +_QUERY_ATTRIBUTE = _descriptor.Descriptor( + name="Attribute", + full_name="aea.helpers.search.models.Query.Attribute", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="aea.helpers.search.models.Query.Attribute.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="type", + full_name="aea.helpers.search.models.Query.Attribute.type", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="required", + full_name="aea.helpers.search.models.Query.Attribute.required", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="aea.helpers.search.models.Query.Attribute.description", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_QUERY_ATTRIBUTE_TYPE,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=54, + serialized_end=246, +) + +_QUERY_DATAMODEL = _descriptor.Descriptor( + name="DataModel", + full_name="aea.helpers.search.models.Query.DataModel", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="aea.helpers.search.models.Query.DataModel.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="attributes", + full_name="aea.helpers.search.models.Query.DataModel.attributes", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="aea.helpers.search.models.Query.DataModel.description", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=248, + serialized_end=358, +) + +_QUERY_LOCATION = _descriptor.Descriptor( + name="Location", + full_name="aea.helpers.search.models.Query.Location", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="lon", + full_name="aea.helpers.search.models.Query.Location.lon", + index=0, + number=1, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="lat", + full_name="aea.helpers.search.models.Query.Location.lat", + index=1, + number=2, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=360, + serialized_end=396, +) + +_QUERY_VALUE = _descriptor.Descriptor( + name="Value", + full_name="aea.helpers.search.models.Query.Value", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="string", + full_name="aea.helpers.search.models.Query.Value.string", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="double", + full_name="aea.helpers.search.models.Query.Value.double", + index=1, + number=2, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="boolean", + full_name="aea.helpers.search.models.Query.Value.boolean", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="integer", + full_name="aea.helpers.search.models.Query.Value.integer", + index=3, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="location", + full_name="aea.helpers.search.models.Query.Value.location", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="value", + full_name="aea.helpers.search.models.Query.Value.value", + index=0, + containing_type=None, + fields=[], + ), + ], + serialized_start=399, + serialized_end=552, +) + +_QUERY_KEYVALUE = _descriptor.Descriptor( + name="KeyValue", + full_name="aea.helpers.search.models.Query.KeyValue", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="aea.helpers.search.models.Query.KeyValue.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="aea.helpers.search.models.Query.KeyValue.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=554, + serialized_end=632, +) + +_QUERY_INSTANCE = _descriptor.Descriptor( + name="Instance", + full_name="aea.helpers.search.models.Query.Instance", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="model", + full_name="aea.helpers.search.models.Query.Instance.model", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Instance.values", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=635, + serialized_end=763, +) + +_QUERY_STRINGPAIR = _descriptor.Descriptor( + name="StringPair", + full_name="aea.helpers.search.models.Query.StringPair", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="first", + full_name="aea.helpers.search.models.Query.StringPair.first", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="second", + full_name="aea.helpers.search.models.Query.StringPair.second", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=765, + serialized_end=808, +) + +_QUERY_INTPAIR = _descriptor.Descriptor( + name="IntPair", + full_name="aea.helpers.search.models.Query.IntPair", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="first", + full_name="aea.helpers.search.models.Query.IntPair.first", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="second", + full_name="aea.helpers.search.models.Query.IntPair.second", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=810, + serialized_end=850, +) + +_QUERY_DOUBLEPAIR = _descriptor.Descriptor( + name="DoublePair", + full_name="aea.helpers.search.models.Query.DoublePair", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="first", + full_name="aea.helpers.search.models.Query.DoublePair.first", + index=0, + number=1, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="second", + full_name="aea.helpers.search.models.Query.DoublePair.second", + index=1, + number=2, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=852, + serialized_end=895, +) + +_QUERY_LOCATIONPAIR = _descriptor.Descriptor( + name="LocationPair", + full_name="aea.helpers.search.models.Query.LocationPair", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="first", + full_name="aea.helpers.search.models.Query.LocationPair.first", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="second", + full_name="aea.helpers.search.models.Query.LocationPair.second", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=898, + serialized_end=1029, +) + +_QUERY_RANGE = _descriptor.Descriptor( + name="Range", + full_name="aea.helpers.search.models.Query.Range", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="string_pair", + full_name="aea.helpers.search.models.Query.Range.string_pair", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="integer_pair", + full_name="aea.helpers.search.models.Query.Range.integer_pair", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="double_pair", + full_name="aea.helpers.search.models.Query.Range.double_pair", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="location_pair", + full_name="aea.helpers.search.models.Query.Range.location_pair", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="pair", + full_name="aea.helpers.search.models.Query.Range.pair", + index=0, + containing_type=None, + fields=[], + ), + ], + serialized_start=1032, + serialized_end=1321, +) + +_QUERY_DISTANCE = _descriptor.Descriptor( + name="Distance", + full_name="aea.helpers.search.models.Query.Distance", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="center", + full_name="aea.helpers.search.models.Query.Distance.center", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="distance", + full_name="aea.helpers.search.models.Query.Distance.distance", + index=1, + number=2, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1323, + serialized_end=1410, +) + +_QUERY_RELATION = _descriptor.Descriptor( + name="Relation", + full_name="aea.helpers.search.models.Query.Relation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="operator", + full_name="aea.helpers.search.models.Query.Relation.operator", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="aea.helpers.search.models.Query.Relation.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_QUERY_RELATION_OPERATOR,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1413, + serialized_end=1615, +) + +_QUERY_SET_VALUES_INTS = _descriptor.Descriptor( + name="Ints", + full_name="aea.helpers.search.models.Query.Set.Values.Ints", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Set.Values.Ints.values", + index=0, + number=1, + type=3, + cpp_type=2, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2118, + serialized_end=2140, +) + +_QUERY_SET_VALUES_DOUBLES = _descriptor.Descriptor( + name="Doubles", + full_name="aea.helpers.search.models.Query.Set.Values.Doubles", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Set.Values.Doubles.values", + index=0, + number=1, + type=1, + cpp_type=5, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2142, + serialized_end=2167, +) + +_QUERY_SET_VALUES_STRINGS = _descriptor.Descriptor( + name="Strings", + full_name="aea.helpers.search.models.Query.Set.Values.Strings", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Set.Values.Strings.values", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2169, + serialized_end=2194, +) + +_QUERY_SET_VALUES_BOOLS = _descriptor.Descriptor( + name="Bools", + full_name="aea.helpers.search.models.Query.Set.Values.Bools", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Set.Values.Bools.values", + index=0, + number=1, + type=8, + cpp_type=7, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2196, + serialized_end=2219, +) + +_QUERY_SET_VALUES_LOCATIONS = _descriptor.Descriptor( + name="Locations", + full_name="aea.helpers.search.models.Query.Set.Values.Locations", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Set.Values.Locations.values", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2221, + serialized_end=2291, +) + +_QUERY_SET_VALUES = _descriptor.Descriptor( + name="Values", + full_name="aea.helpers.search.models.Query.Set.Values", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="string", + full_name="aea.helpers.search.models.Query.Set.Values.string", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="double", + full_name="aea.helpers.search.models.Query.Set.Values.double", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="boolean", + full_name="aea.helpers.search.models.Query.Set.Values.boolean", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="integer", + full_name="aea.helpers.search.models.Query.Set.Values.integer", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="location", + full_name="aea.helpers.search.models.Query.Set.Values.location", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[ + _QUERY_SET_VALUES_INTS, + _QUERY_SET_VALUES_DOUBLES, + _QUERY_SET_VALUES_STRINGS, + _QUERY_SET_VALUES_BOOLS, + _QUERY_SET_VALUES_LOCATIONS, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Set.Values.values", + index=0, + containing_type=None, + fields=[], + ), + ], + serialized_start=1752, + serialized_end=2301, +) + +_QUERY_SET = _descriptor.Descriptor( + name="Set", + full_name="aea.helpers.search.models.Query.Set", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="operator", + full_name="aea.helpers.search.models.Query.Set.operator", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="values", + full_name="aea.helpers.search.models.Query.Set.values", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_QUERY_SET_VALUES,], + enum_types=[_QUERY_SET_OPERATOR,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1618, + serialized_end=2332, +) + +_QUERY_CONSTRAINTEXPR_OR = _descriptor.Descriptor( + name="Or", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Or", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="expression", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Or.expression", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2640, + serialized_end=2713, +) + +_QUERY_CONSTRAINTEXPR_AND = _descriptor.Descriptor( + name="And", + full_name="aea.helpers.search.models.Query.ConstraintExpr.And", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="expression", + full_name="aea.helpers.search.models.Query.ConstraintExpr.And.expression", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2715, + serialized_end=2789, +) + +_QUERY_CONSTRAINTEXPR_NOT = _descriptor.Descriptor( + name="Not", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Not", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="expression", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Not.expression", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2791, + serialized_end=2865, +) + +_QUERY_CONSTRAINTEXPR_CONSTRAINT = _descriptor.Descriptor( + name="Constraint", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Constraint", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="attribute_name", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Constraint.attribute_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="set_", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Constraint.set_", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="range_", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Constraint.range_", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="relation", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Constraint.relation", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="distance", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Constraint.distance", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="constraint", + full_name="aea.helpers.search.models.Query.ConstraintExpr.Constraint.constraint", + index=0, + containing_type=None, + fields=[], + ), + ], + serialized_start=2868, + serialized_end=3156, +) + +_QUERY_CONSTRAINTEXPR = _descriptor.Descriptor( + name="ConstraintExpr", + full_name="aea.helpers.search.models.Query.ConstraintExpr", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="or_", + full_name="aea.helpers.search.models.Query.ConstraintExpr.or_", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="and_", + full_name="aea.helpers.search.models.Query.ConstraintExpr.and_", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="not_", + full_name="aea.helpers.search.models.Query.ConstraintExpr.not_", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="constraint", + full_name="aea.helpers.search.models.Query.ConstraintExpr.constraint", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[ + _QUERY_CONSTRAINTEXPR_OR, + _QUERY_CONSTRAINTEXPR_AND, + _QUERY_CONSTRAINTEXPR_NOT, + _QUERY_CONSTRAINTEXPR_CONSTRAINT, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="expression", + full_name="aea.helpers.search.models.Query.ConstraintExpr.expression", + index=0, + containing_type=None, + fields=[], + ), + ], + serialized_start=2335, + serialized_end=3170, +) + +_QUERY_MODEL = _descriptor.Descriptor( + name="Model", + full_name="aea.helpers.search.models.Query.Model", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="constraints", + full_name="aea.helpers.search.models.Query.Model.constraints", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="aea.helpers.search.models.Query.Model.model", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3173, + serialized_end=3309, +) + +_QUERY = _descriptor.Descriptor( + name="Query", + full_name="aea.helpers.search.models.Query", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[ + _QUERY_ATTRIBUTE, + _QUERY_DATAMODEL, + _QUERY_LOCATION, + _QUERY_VALUE, + _QUERY_KEYVALUE, + _QUERY_INSTANCE, + _QUERY_STRINGPAIR, + _QUERY_INTPAIR, + _QUERY_DOUBLEPAIR, + _QUERY_LOCATIONPAIR, + _QUERY_RANGE, + _QUERY_DISTANCE, + _QUERY_RELATION, + _QUERY_SET, + _QUERY_CONSTRAINTEXPR, + _QUERY_MODEL, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=44, + serialized_end=3309, +) + +_QUERY_ATTRIBUTE.fields_by_name["type"].enum_type = _QUERY_ATTRIBUTE_TYPE +_QUERY_ATTRIBUTE.containing_type = _QUERY +_QUERY_ATTRIBUTE_TYPE.containing_type = _QUERY_ATTRIBUTE +_QUERY_DATAMODEL.fields_by_name["attributes"].message_type = _QUERY_ATTRIBUTE +_QUERY_DATAMODEL.containing_type = _QUERY +_QUERY_LOCATION.containing_type = _QUERY +_QUERY_VALUE.fields_by_name["location"].message_type = _QUERY_LOCATION +_QUERY_VALUE.containing_type = _QUERY +_QUERY_VALUE.oneofs_by_name["value"].fields.append( + _QUERY_VALUE.fields_by_name["string"] +) +_QUERY_VALUE.fields_by_name["string"].containing_oneof = _QUERY_VALUE.oneofs_by_name[ + "value" +] +_QUERY_VALUE.oneofs_by_name["value"].fields.append( + _QUERY_VALUE.fields_by_name["double"] +) +_QUERY_VALUE.fields_by_name["double"].containing_oneof = _QUERY_VALUE.oneofs_by_name[ + "value" +] +_QUERY_VALUE.oneofs_by_name["value"].fields.append( + _QUERY_VALUE.fields_by_name["boolean"] +) +_QUERY_VALUE.fields_by_name["boolean"].containing_oneof = _QUERY_VALUE.oneofs_by_name[ + "value" +] +_QUERY_VALUE.oneofs_by_name["value"].fields.append( + _QUERY_VALUE.fields_by_name["integer"] +) +_QUERY_VALUE.fields_by_name["integer"].containing_oneof = _QUERY_VALUE.oneofs_by_name[ + "value" +] +_QUERY_VALUE.oneofs_by_name["value"].fields.append( + _QUERY_VALUE.fields_by_name["location"] +) +_QUERY_VALUE.fields_by_name["location"].containing_oneof = _QUERY_VALUE.oneofs_by_name[ + "value" +] +_QUERY_KEYVALUE.fields_by_name["value"].message_type = _QUERY_VALUE +_QUERY_KEYVALUE.containing_type = _QUERY +_QUERY_INSTANCE.fields_by_name["model"].message_type = _QUERY_DATAMODEL +_QUERY_INSTANCE.fields_by_name["values"].message_type = _QUERY_KEYVALUE +_QUERY_INSTANCE.containing_type = _QUERY +_QUERY_STRINGPAIR.containing_type = _QUERY +_QUERY_INTPAIR.containing_type = _QUERY +_QUERY_DOUBLEPAIR.containing_type = _QUERY +_QUERY_LOCATIONPAIR.fields_by_name["first"].message_type = _QUERY_LOCATION +_QUERY_LOCATIONPAIR.fields_by_name["second"].message_type = _QUERY_LOCATION +_QUERY_LOCATIONPAIR.containing_type = _QUERY +_QUERY_RANGE.fields_by_name["string_pair"].message_type = _QUERY_STRINGPAIR +_QUERY_RANGE.fields_by_name["integer_pair"].message_type = _QUERY_INTPAIR +_QUERY_RANGE.fields_by_name["double_pair"].message_type = _QUERY_DOUBLEPAIR +_QUERY_RANGE.fields_by_name["location_pair"].message_type = _QUERY_LOCATIONPAIR +_QUERY_RANGE.containing_type = _QUERY +_QUERY_RANGE.oneofs_by_name["pair"].fields.append( + _QUERY_RANGE.fields_by_name["string_pair"] +) +_QUERY_RANGE.fields_by_name[ + "string_pair" +].containing_oneof = _QUERY_RANGE.oneofs_by_name["pair"] +_QUERY_RANGE.oneofs_by_name["pair"].fields.append( + _QUERY_RANGE.fields_by_name["integer_pair"] +) +_QUERY_RANGE.fields_by_name[ + "integer_pair" +].containing_oneof = _QUERY_RANGE.oneofs_by_name["pair"] +_QUERY_RANGE.oneofs_by_name["pair"].fields.append( + _QUERY_RANGE.fields_by_name["double_pair"] +) +_QUERY_RANGE.fields_by_name[ + "double_pair" +].containing_oneof = _QUERY_RANGE.oneofs_by_name["pair"] +_QUERY_RANGE.oneofs_by_name["pair"].fields.append( + _QUERY_RANGE.fields_by_name["location_pair"] +) +_QUERY_RANGE.fields_by_name[ + "location_pair" +].containing_oneof = _QUERY_RANGE.oneofs_by_name["pair"] +_QUERY_DISTANCE.fields_by_name["center"].message_type = _QUERY_LOCATION +_QUERY_DISTANCE.containing_type = _QUERY +_QUERY_RELATION.fields_by_name["operator"].enum_type = _QUERY_RELATION_OPERATOR +_QUERY_RELATION.fields_by_name["value"].message_type = _QUERY_VALUE +_QUERY_RELATION.containing_type = _QUERY +_QUERY_RELATION_OPERATOR.containing_type = _QUERY_RELATION +_QUERY_SET_VALUES_INTS.containing_type = _QUERY_SET_VALUES +_QUERY_SET_VALUES_DOUBLES.containing_type = _QUERY_SET_VALUES +_QUERY_SET_VALUES_STRINGS.containing_type = _QUERY_SET_VALUES +_QUERY_SET_VALUES_BOOLS.containing_type = _QUERY_SET_VALUES +_QUERY_SET_VALUES_LOCATIONS.fields_by_name["values"].message_type = _QUERY_LOCATION +_QUERY_SET_VALUES_LOCATIONS.containing_type = _QUERY_SET_VALUES +_QUERY_SET_VALUES.fields_by_name["string"].message_type = _QUERY_SET_VALUES_STRINGS +_QUERY_SET_VALUES.fields_by_name["double"].message_type = _QUERY_SET_VALUES_DOUBLES +_QUERY_SET_VALUES.fields_by_name["boolean"].message_type = _QUERY_SET_VALUES_BOOLS +_QUERY_SET_VALUES.fields_by_name["integer"].message_type = _QUERY_SET_VALUES_INTS +_QUERY_SET_VALUES.fields_by_name["location"].message_type = _QUERY_SET_VALUES_LOCATIONS +_QUERY_SET_VALUES.containing_type = _QUERY_SET +_QUERY_SET_VALUES.oneofs_by_name["values"].fields.append( + _QUERY_SET_VALUES.fields_by_name["string"] +) +_QUERY_SET_VALUES.fields_by_name[ + "string" +].containing_oneof = _QUERY_SET_VALUES.oneofs_by_name["values"] +_QUERY_SET_VALUES.oneofs_by_name["values"].fields.append( + _QUERY_SET_VALUES.fields_by_name["double"] +) +_QUERY_SET_VALUES.fields_by_name[ + "double" +].containing_oneof = _QUERY_SET_VALUES.oneofs_by_name["values"] +_QUERY_SET_VALUES.oneofs_by_name["values"].fields.append( + _QUERY_SET_VALUES.fields_by_name["boolean"] +) +_QUERY_SET_VALUES.fields_by_name[ + "boolean" +].containing_oneof = _QUERY_SET_VALUES.oneofs_by_name["values"] +_QUERY_SET_VALUES.oneofs_by_name["values"].fields.append( + _QUERY_SET_VALUES.fields_by_name["integer"] +) +_QUERY_SET_VALUES.fields_by_name[ + "integer" +].containing_oneof = _QUERY_SET_VALUES.oneofs_by_name["values"] +_QUERY_SET_VALUES.oneofs_by_name["values"].fields.append( + _QUERY_SET_VALUES.fields_by_name["location"] +) +_QUERY_SET_VALUES.fields_by_name[ + "location" +].containing_oneof = _QUERY_SET_VALUES.oneofs_by_name["values"] +_QUERY_SET.fields_by_name["operator"].enum_type = _QUERY_SET_OPERATOR +_QUERY_SET.fields_by_name["values"].message_type = _QUERY_SET_VALUES +_QUERY_SET.containing_type = _QUERY +_QUERY_SET_OPERATOR.containing_type = _QUERY_SET +_QUERY_CONSTRAINTEXPR_OR.fields_by_name[ + "expression" +].message_type = _QUERY_CONSTRAINTEXPR +_QUERY_CONSTRAINTEXPR_OR.containing_type = _QUERY_CONSTRAINTEXPR +_QUERY_CONSTRAINTEXPR_AND.fields_by_name[ + "expression" +].message_type = _QUERY_CONSTRAINTEXPR +_QUERY_CONSTRAINTEXPR_AND.containing_type = _QUERY_CONSTRAINTEXPR +_QUERY_CONSTRAINTEXPR_NOT.fields_by_name[ + "expression" +].message_type = _QUERY_CONSTRAINTEXPR +_QUERY_CONSTRAINTEXPR_NOT.containing_type = _QUERY_CONSTRAINTEXPR +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name["set_"].message_type = _QUERY_SET +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name["range_"].message_type = _QUERY_RANGE +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name[ + "relation" +].message_type = _QUERY_RELATION +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name[ + "distance" +].message_type = _QUERY_DISTANCE +_QUERY_CONSTRAINTEXPR_CONSTRAINT.containing_type = _QUERY_CONSTRAINTEXPR +_QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"].fields.append( + _QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name["set_"] +) +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name[ + "set_" +].containing_oneof = _QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"] +_QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"].fields.append( + _QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name["range_"] +) +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name[ + "range_" +].containing_oneof = _QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"] +_QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"].fields.append( + _QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name["relation"] +) +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name[ + "relation" +].containing_oneof = _QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"] +_QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"].fields.append( + _QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name["distance"] +) +_QUERY_CONSTRAINTEXPR_CONSTRAINT.fields_by_name[ + "distance" +].containing_oneof = _QUERY_CONSTRAINTEXPR_CONSTRAINT.oneofs_by_name["constraint"] +_QUERY_CONSTRAINTEXPR.fields_by_name["or_"].message_type = _QUERY_CONSTRAINTEXPR_OR +_QUERY_CONSTRAINTEXPR.fields_by_name["and_"].message_type = _QUERY_CONSTRAINTEXPR_AND +_QUERY_CONSTRAINTEXPR.fields_by_name["not_"].message_type = _QUERY_CONSTRAINTEXPR_NOT +_QUERY_CONSTRAINTEXPR.fields_by_name[ + "constraint" +].message_type = _QUERY_CONSTRAINTEXPR_CONSTRAINT +_QUERY_CONSTRAINTEXPR.containing_type = _QUERY +_QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"].fields.append( + _QUERY_CONSTRAINTEXPR.fields_by_name["or_"] +) +_QUERY_CONSTRAINTEXPR.fields_by_name[ + "or_" +].containing_oneof = _QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"] +_QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"].fields.append( + _QUERY_CONSTRAINTEXPR.fields_by_name["and_"] +) +_QUERY_CONSTRAINTEXPR.fields_by_name[ + "and_" +].containing_oneof = _QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"] +_QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"].fields.append( + _QUERY_CONSTRAINTEXPR.fields_by_name["not_"] +) +_QUERY_CONSTRAINTEXPR.fields_by_name[ + "not_" +].containing_oneof = _QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"] +_QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"].fields.append( + _QUERY_CONSTRAINTEXPR.fields_by_name["constraint"] +) +_QUERY_CONSTRAINTEXPR.fields_by_name[ + "constraint" +].containing_oneof = _QUERY_CONSTRAINTEXPR.oneofs_by_name["expression"] +_QUERY_MODEL.fields_by_name["constraints"].message_type = _QUERY_CONSTRAINTEXPR +_QUERY_MODEL.fields_by_name["model"].message_type = _QUERY_DATAMODEL +_QUERY_MODEL.containing_type = _QUERY +DESCRIPTOR.message_types_by_name["Query"] = _QUERY +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Query = _reflection.GeneratedProtocolMessageType( + "Query", + (_message.Message,), + { + "Attribute": _reflection.GeneratedProtocolMessageType( + "Attribute", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_ATTRIBUTE, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Attribute) + }, + ), + "DataModel": _reflection.GeneratedProtocolMessageType( + "DataModel", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_DATAMODEL, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.DataModel) + }, + ), + "Location": _reflection.GeneratedProtocolMessageType( + "Location", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_LOCATION, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Location) + }, + ), + "Value": _reflection.GeneratedProtocolMessageType( + "Value", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_VALUE, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Value) + }, + ), + "KeyValue": _reflection.GeneratedProtocolMessageType( + "KeyValue", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_KEYVALUE, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.KeyValue) + }, + ), + "Instance": _reflection.GeneratedProtocolMessageType( + "Instance", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_INSTANCE, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Instance) + }, + ), + "StringPair": _reflection.GeneratedProtocolMessageType( + "StringPair", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_STRINGPAIR, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.StringPair) + }, + ), + "IntPair": _reflection.GeneratedProtocolMessageType( + "IntPair", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_INTPAIR, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.IntPair) + }, + ), + "DoublePair": _reflection.GeneratedProtocolMessageType( + "DoublePair", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_DOUBLEPAIR, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.DoublePair) + }, + ), + "LocationPair": _reflection.GeneratedProtocolMessageType( + "LocationPair", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_LOCATIONPAIR, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.LocationPair) + }, + ), + "Range": _reflection.GeneratedProtocolMessageType( + "Range", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_RANGE, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Range) + }, + ), + "Distance": _reflection.GeneratedProtocolMessageType( + "Distance", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_DISTANCE, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Distance) + }, + ), + "Relation": _reflection.GeneratedProtocolMessageType( + "Relation", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_RELATION, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Relation) + }, + ), + "Set": _reflection.GeneratedProtocolMessageType( + "Set", + (_message.Message,), + { + "Values": _reflection.GeneratedProtocolMessageType( + "Values", + (_message.Message,), + { + "Ints": _reflection.GeneratedProtocolMessageType( + "Ints", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_SET_VALUES_INTS, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Set.Values.Ints) + }, + ), + "Doubles": _reflection.GeneratedProtocolMessageType( + "Doubles", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_SET_VALUES_DOUBLES, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Set.Values.Doubles) + }, + ), + "Strings": _reflection.GeneratedProtocolMessageType( + "Strings", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_SET_VALUES_STRINGS, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Set.Values.Strings) + }, + ), + "Bools": _reflection.GeneratedProtocolMessageType( + "Bools", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_SET_VALUES_BOOLS, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Set.Values.Bools) + }, + ), + "Locations": _reflection.GeneratedProtocolMessageType( + "Locations", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_SET_VALUES_LOCATIONS, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Set.Values.Locations) + }, + ), + "DESCRIPTOR": _QUERY_SET_VALUES, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Set.Values) + }, + ), + "DESCRIPTOR": _QUERY_SET, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Set) + }, + ), + "ConstraintExpr": _reflection.GeneratedProtocolMessageType( + "ConstraintExpr", + (_message.Message,), + { + "Or": _reflection.GeneratedProtocolMessageType( + "Or", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_CONSTRAINTEXPR_OR, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.ConstraintExpr.Or) + }, + ), + "And": _reflection.GeneratedProtocolMessageType( + "And", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_CONSTRAINTEXPR_AND, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.ConstraintExpr.And) + }, + ), + "Not": _reflection.GeneratedProtocolMessageType( + "Not", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_CONSTRAINTEXPR_NOT, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.ConstraintExpr.Not) + }, + ), + "Constraint": _reflection.GeneratedProtocolMessageType( + "Constraint", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_CONSTRAINTEXPR_CONSTRAINT, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.ConstraintExpr.Constraint) + }, + ), + "DESCRIPTOR": _QUERY_CONSTRAINTEXPR, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.ConstraintExpr) + }, + ), + "Model": _reflection.GeneratedProtocolMessageType( + "Model", + (_message.Message,), + { + "DESCRIPTOR": _QUERY_MODEL, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query.Model) + }, + ), + "DESCRIPTOR": _QUERY, + "__module__": "models_pb2" + # @@protoc_insertion_point(class_scope:aea.helpers.search.models.Query) + }, +) +_sym_db.RegisterMessage(Query) +_sym_db.RegisterMessage(Query.Attribute) +_sym_db.RegisterMessage(Query.DataModel) +_sym_db.RegisterMessage(Query.Location) +_sym_db.RegisterMessage(Query.Value) +_sym_db.RegisterMessage(Query.KeyValue) +_sym_db.RegisterMessage(Query.Instance) +_sym_db.RegisterMessage(Query.StringPair) +_sym_db.RegisterMessage(Query.IntPair) +_sym_db.RegisterMessage(Query.DoublePair) +_sym_db.RegisterMessage(Query.LocationPair) +_sym_db.RegisterMessage(Query.Range) +_sym_db.RegisterMessage(Query.Distance) +_sym_db.RegisterMessage(Query.Relation) +_sym_db.RegisterMessage(Query.Set) +_sym_db.RegisterMessage(Query.Set.Values) +_sym_db.RegisterMessage(Query.Set.Values.Ints) +_sym_db.RegisterMessage(Query.Set.Values.Doubles) +_sym_db.RegisterMessage(Query.Set.Values.Strings) +_sym_db.RegisterMessage(Query.Set.Values.Bools) +_sym_db.RegisterMessage(Query.Set.Values.Locations) +_sym_db.RegisterMessage(Query.ConstraintExpr) +_sym_db.RegisterMessage(Query.ConstraintExpr.Or) +_sym_db.RegisterMessage(Query.ConstraintExpr.And) +_sym_db.RegisterMessage(Query.ConstraintExpr.Not) +_sym_db.RegisterMessage(Query.ConstraintExpr.Constraint) +_sym_db.RegisterMessage(Query.Model) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/aea/helpers/serializers.py b/aea/helpers/serializers.py new file mode 100644 index 0000000000..a9e7ac77af --- /dev/null +++ b/aea/helpers/serializers.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2020 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""This module contains Serializers that can be used for custom types.""" + +import copy +from typing import Any, Dict + +from google.protobuf.struct_pb2 import Struct + + +class DictProtobufStructSerializer: + """ + Serialize python dictionaries of type DictType = Dict[str, ValueType] recursively conserving their dynamic type, using google.protobuf.Struct + + ValueType = PrimitiveType | DictType | List[ValueType]] + PrimitiveType = bool | int | float | str | bytes + """ + + NEED_PATCH = "_need_patch" + + @classmethod + def encode(cls, dictionary: Dict[str, Any]) -> bytes: + """Serialize compatible dictionary to bytes""" + if not isinstance(dictionary, dict): + raise TypeError( # pragma: nocover + "dictionary must be of dict type, got type {}".format(type(dictionary)) + ) + # TOFIX(LR) problematic as it'll copy every message + patched_dict = copy.deepcopy(dictionary) + cls._patch_dict(patched_dict) + pstruct = Struct() + pstruct.update(patched_dict) # pylint: disable=no-member + return pstruct.SerializeToString() + + @classmethod + def decode(cls, buffer: bytes) -> Dict[str, Any]: + """Deserialize a compatible dictionary""" + pstruct = Struct() + pstruct.ParseFromString(buffer) + dictionary = dict(pstruct) + cls._patch_dict_restore(dictionary) + return dictionary + + @classmethod + def _bytes_to_str(cls, value: bytes) -> str: + return value.decode("utf-8") + + @classmethod + def _str_to_bytes(cls, value: str) -> bytes: + return value.encode("utf-8") + + @classmethod + def _patch_dict(cls, dictionnary: Dict[str, Any]) -> None: + need_patch: Dict[str, bool] = dict() + for key, value in dictionnary.items(): + if isinstance(value, bytes): + # convert bytes values to string, as protobuf.Struct does support byte fields + dictionnary[key] = cls._bytes_to_str(value) + if cls.NEED_PATCH in dictionnary: + dictionnary[cls.NEED_PATCH][key] = True + else: + need_patch[key] = True + elif isinstance(value, int) and not isinstance(value, bool): + # protobuf Struct store int as float under numeric_value type + if cls.NEED_PATCH in dictionnary: + dictionnary[cls.NEED_PATCH][key] = True + else: + need_patch[key] = True + elif isinstance(value, dict): + cls._patch_dict(value) # pylint: disable=protected-access + elif ( + not isinstance(value, bool) + and not isinstance(value, float) + and not isinstance(value, str) + and not isinstance(value, Struct) + ): # pragma: nocover + raise NotImplementedError( + "DictProtobufStructSerializer doesn't support dict value type {}".format( + type(value) + ) + ) + if len(need_patch) > 0: + dictionnary[cls.NEED_PATCH] = need_patch + + @classmethod + def _patch_dict_restore(cls, dictionary: Dict[str, Any]) -> None: + # protobuf Struct doesn't recursively convert Struct to dict + need_patch = dictionary.get(cls.NEED_PATCH, {}) + if len(need_patch) > 0: + dictionary[cls.NEED_PATCH] = dict(need_patch) + + for key, value in dictionary.items(): + if key == cls.NEED_PATCH: + continue + + # protobuf struct doesn't recursively convert Struct to dict + if isinstance(value, Struct): + if value != Struct(): + value = dict(value) + dictionary[key] = value + + if isinstance(value, dict): + cls._patch_dict_restore(value) + elif isinstance(value, str) and dictionary.get(cls.NEED_PATCH, dict()).get( + key, False + ): + dictionary[key] = cls._str_to_bytes(value) + elif isinstance(value, float) and dictionary.get( + cls.NEED_PATCH, dict() + ).get(key, False): + dictionary[key] = int(value) + + dictionary.pop(cls.NEED_PATCH, None) diff --git a/aea/helpers/storage/__init__.py b/aea/helpers/storage/__init__.py new file mode 100644 index 0000000000..3de6e123d2 --- /dev/null +++ b/aea/helpers/storage/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""This module contains generic storage code.""" diff --git a/aea/helpers/storage/backends/__init__.py b/aea/helpers/storage/backends/__init__.py new file mode 100644 index 0000000000..44571706f0 --- /dev/null +++ b/aea/helpers/storage/backends/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +"""This module contains storage abstract backend and implementations.""" diff --git a/aea/helpers/storage/backends/base.py b/aea/helpers/storage/backends/base.py new file mode 100644 index 0000000000..5df33f47aa --- /dev/null +++ b/aea/helpers/storage/backends/base.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +"""This module contains storage abstract backend class.""" + +import re +from abc import ABC, abstractmethod +from typing import Dict, List, Optional, Tuple, Union + + +EQUALS_TYPE = Union[int, float, str, bool] +JSON_TYPES = Union[Dict, str, List, None, int, float] +OBJECT_ID_AND_BODY = Tuple[str, JSON_TYPES] + + +class AbstractStorageBackend(ABC): + """Abstract base class for storage backend.""" + + VALID_COL_NAME = re.compile("^[a-zA-Z0-9_]+$") + + def __init__(self, uri: str) -> None: + """Init backend.""" + self._uri = uri + + def _check_collection_name(self, collection_name: str) -> None: + """ + Check collection name is valid. + + raises ValueError if bad collection name provided. + """ + if not self.VALID_COL_NAME.match(collection_name): + raise ValueError( + f"Invalid collection name: {collection_name}, should contain only alpha-numeric characters and _" + ) + + @abstractmethod + async def connect(self) -> None: + """Connect to backend.""" + + @abstractmethod + async def disconnect(self) -> None: + """Disconnect the backend.""" + + @abstractmethod + async def ensure_collection(self, collection_name: str) -> None: + """ + Create collection if not exits. + + :param collection_name: str. + :return: None + """ + + @abstractmethod + async def put( + self, collection_name: str, object_id: str, object_body: JSON_TYPES + ) -> None: + """ + Put object into collection. + + :param collection_name: str. + :param object_id: str object id + :param object_body: python dict, json compatible. + :return: None + """ + + @abstractmethod + async def get(self, collection_name: str, object_id: str) -> Optional[JSON_TYPES]: + """ + Get object from the collection. + + :param collection_name: str. + :param object_id: str object id + + :return: dict if object exists in collection otherwise None + """ + + @abstractmethod + async def remove(self, collection_name: str, object_id: str) -> None: + """ + Remove object from the collection. + + :param collection_name: str. + :param object_id: str object id + + :return: None + """ + + @abstractmethod + async def find( + self, collection_name: str, field: str, equals: EQUALS_TYPE + ) -> List[OBJECT_ID_AND_BODY]: + """ + Get objects from the collection by filtering by field value. + + :param collection_name: str. + :param field: field name to search: example "parent.field" + :param equals: value field should be equal to + + :return: list of objects bodies + """ + + @abstractmethod + async def list(self, collection_name: str) -> List[OBJECT_ID_AND_BODY]: + """ + List all objects with keys from the collection. + + :param collection_name: str. + :return: Tuple of objects keys, bodies. + """ diff --git a/aea/helpers/storage/backends/binaries/README.txt b/aea/helpers/storage/backends/binaries/README.txt new file mode 100644 index 0000000000..fb314ff062 --- /dev/null +++ b/aea/helpers/storage/backends/binaries/README.txt @@ -0,0 +1 @@ +json1.dll - is sqlite extension for windows python<3.9 diff --git a/aea/helpers/storage/backends/binaries/json1.dll b/aea/helpers/storage/backends/binaries/json1.dll new file mode 100644 index 0000000000..a78cf01ccc Binary files /dev/null and b/aea/helpers/storage/backends/binaries/json1.dll differ diff --git a/aea/helpers/storage/backends/sqlite.py b/aea/helpers/storage/backends/sqlite.py new file mode 100644 index 0000000000..7402f7db62 --- /dev/null +++ b/aea/helpers/storage/backends/sqlite.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2019 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +"""This module contains sqlite storage backend implementation.""" +import asyncio +import json +import os +import platform +import sqlite3 +import sys +import threading +from concurrent.futures.thread import ThreadPoolExecutor +from pathlib import Path +from typing import List, Optional, Tuple +from urllib.parse import urlparse + +from aea.helpers.storage.backends.base import ( + AbstractStorageBackend, + EQUALS_TYPE, + JSON_TYPES, + OBJECT_ID_AND_BODY, +) + + +class SqliteStorageBackend(AbstractStorageBackend): + """Sqlite storage backend.""" + + def __init__(self, uri: str) -> None: + """Init backend.""" + super().__init__(uri) + parsed = urlparse(self._uri) + self._fname = parsed.netloc or parsed.path + self._connection: Optional[sqlite3.Connection] = None + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._lock = threading.Lock() + self._executor = ThreadPoolExecutor(max_workers=1) + + def _execute_sql_sync(self, query: str, args: Optional[List] = None) -> List[Tuple]: + """ + Execute sql command and return results. + + :param query: sql query string + :param args: optional argumets to set into sql query. + + :return: List of tuples with sql records + """ + if not self._connection: # pragma: nocover + raise ValueError("Not connected") + with self._lock: + result = self._connection.execute(query, args or []).fetchall() + self._connection.commit() + return result + + async def _executute_sql(self, query: str, args: Optional[List] = None): + """ + Execute sql command and return results in async executor. + + :param query: sql query string + :param args: optional argumets to set into sql query. + + :return: List of tuples with sql records + """ + if not self._loop: # pragma: nocover + raise ValueError("Not connected") + return await self._loop.run_in_executor( + self._executor, self._execute_sql_sync, query, args + ) + + async def connect(self) -> None: + """Connect to backend.""" + self._loop = asyncio.get_event_loop() + self._connection = await self._loop.run_in_executor( + self._executor, self._do_connect, self._fname + ) + + @staticmethod + def _do_connect(fname: str) -> sqlite3.Connection: + con = sqlite3.connect(fname) + if ( + platform.system() == "Windows" + and sys.version_info.major == 3 + and sys.version_info.minor < 9 + ): # pragma: nocover + con.enable_load_extension(True) + path_ext = Path( + os.path.join(os.path.dirname(__file__), "binaries", "json1.dll") + ).as_posix() + con.load_extension(path_ext) + return con + + async def disconnect(self) -> None: + """Disconnect the backend.""" + if not self._loop or not self._connection: # pragma: nocover + raise ValueError("Not connected") + await self._loop.run_in_executor(self._executor, self._connection.close) + self._connection = None + self._loop = None + + async def ensure_collection(self, collection_name: str) -> None: + """ + Create collection if not exits. + + :param collection_name: str. + :return: None + """ + self._check_collection_name(collection_name) + sql = f"""CREATE TABLE IF NOT EXISTS {collection_name} ( + object_id TEXT PRIMARY KEY, + object_body JSON1 NOT NULL) + """ # nosec + await self._executute_sql(sql) + + async def put( + self, collection_name: str, object_id: str, object_body: JSON_TYPES + ) -> None: + """ + Put object into collection. + + :param collection_name: str. + :param object_id: str object id + :param object_body: python dict, json compatible. + :return: None + """ + self._check_collection_name(collection_name) + sql = f"""INSERT OR REPLACE INTO {collection_name} (object_id, object_body) + VALUES (?, ?); + """ # nosec + await self._executute_sql(sql, [object_id, json.dumps(object_body)]) + + async def get(self, collection_name: str, object_id: str) -> Optional[JSON_TYPES]: + """ + Get object from the collection. + + :param collection_name: str. + :param object_id: str object id + + :return: dict if object exists in collection otherwise None + """ + self._check_collection_name(collection_name) + sql = f"""SELECT object_body FROM {collection_name} WHERE object_id = ? LIMIT 1;""" # nosec + result = await self._executute_sql(sql, [object_id]) + if result: + return json.loads(result[0][0]) + return None + + async def remove(self, collection_name: str, object_id: str) -> None: + """ + Remove object from the collection. + + :param collection_name: str. + :param object_id: str object id + + :return: None + """ + self._check_collection_name(collection_name) + sql = f"""DELETE FROM {collection_name} WHERE object_id = ?;""" # nosec + await self._executute_sql(sql, [object_id]) + + async def find( + self, collection_name: str, field: str, equals: EQUALS_TYPE + ) -> List[OBJECT_ID_AND_BODY]: + """ + Get objects from the collection by filtering by field value. + + :param collection_name: str. + :param field: field name to search: example "parent.field" + :param equals: value field should be equal to + + :return: None + """ + self._check_collection_name(collection_name) + sql = f"""SELECT object_id, object_body FROM {collection_name} WHERE json_extract(object_body, ?) = ?;""" # nosec + if not field.startswith("$."): + field = f"$.{field}" + return [ + (i[0], json.loads(i[1])) + for i in await self._executute_sql(sql, [field, equals]) + ] + + async def list(self, collection_name: str) -> List[OBJECT_ID_AND_BODY]: + """ + List all objects with keys from the collection. + + :param collection_name: str. + :return: Tuple of objects keys, bodies. + """ + self._check_collection_name(collection_name) + sql = f"""SELECT object_id, object_body FROM {collection_name};""" # nosec + return [(i[0], json.loads(i[1])) for i in await self._executute_sql(sql)] diff --git a/aea/helpers/storage/generic_storage.py b/aea/helpers/storage/generic_storage.py new file mode 100644 index 0000000000..f9684c7e3f --- /dev/null +++ b/aea/helpers/storage/generic_storage.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2018-2020 Fetch.AI Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +"""This module contains the storage implementation.""" +import asyncio +from typing import List, Optional +from urllib.parse import urlparse + +from aea.helpers.async_utils import AsyncState, Runnable +from aea.helpers.storage.backends.base import ( + AbstractStorageBackend, + EQUALS_TYPE, + JSON_TYPES, + OBJECT_ID_AND_BODY, +) +from aea.helpers.storage.backends.sqlite import SqliteStorageBackend + + +BACKENDS = {"sqlite": SqliteStorageBackend} + + +class AsyncCollection: + """Async collection.""" + + def __init__(self, storage_backend: AbstractStorageBackend, collection_name: str): + """ + Init collection object. + + :param storage_backend: storage backed to use. + :param collection_name: srt + """ + self._storage_backend = storage_backend + self._collection_name = collection_name + + async def put(self, object_id: str, object_body: JSON_TYPES) -> None: + """ + Put object into collection. + + :param object_id: str object id + :param object_body: python dict, json compatible. + :return: None + """ + + return await self._storage_backend.put( + self._collection_name, object_id, object_body + ) + + async def get(self, object_id: str) -> Optional[JSON_TYPES]: + """ + Get object from the collection. + + :param object_id: str object id + + :return: dict if object exists in collection otherwise None + """ + return await self._storage_backend.get(self._collection_name, object_id) + + async def remove(self, object_id: str) -> None: + """ + Remove object from the collection. + + :param object_id: str object id + + :return: None + """ + return await self._storage_backend.remove(self._collection_name, object_id) + + async def find(self, field: str, equals: EQUALS_TYPE) -> List[OBJECT_ID_AND_BODY]: + """ + Get objects from the collection by filtering by field value. + + :param field: field name to search: example "parent.field" + :param equals: value field should be equal to + + :return: None + """ + return await self._storage_backend.find(self._collection_name, field, equals) + + async def list(self) -> List[OBJECT_ID_AND_BODY]: + """ + List all objects with keys from the collection. + + :return: Tuple of objects keys, bodies. + """ + return await self._storage_backend.list(self._collection_name) + + +class SyncCollection: + """Async collection.""" + + def __init__(self, async_collection_coro, loop: asyncio.AbstractEventLoop): + """ + Init collection object. + + :param async_collection_coro: coroutine returns async collection. + :param loop: abstract event loop where storage is running. + """ + self._loop = loop + self._async_collection = self._run_sync(async_collection_coro) + + def _run_sync(self, coro): + return asyncio.run_coroutine_threadsafe(coro, self._loop).result() + + def put(self, object_id: str, object_body: JSON_TYPES) -> None: + """ + Put object into collection. + + :param object_id: str object id + :param object_body: python dict, json compatible. + :return: None + """ + return self._run_sync(self._async_collection.put(object_id, object_body)) + + def get(self, object_id: str) -> Optional[JSON_TYPES]: + """ + Get object from the collection. + + :param object_id: str object id + + :return: dict if object exists in collection otherwise None + """ + return self._run_sync(self._async_collection.get(object_id)) + + def remove(self, object_id: str) -> None: + """ + Remove object from the collection. + + :param object_id: str object id + + :return: None + """ + return self._run_sync(self._async_collection.remove(object_id)) + + def find(self, field: str, equals: EQUALS_TYPE) -> List[OBJECT_ID_AND_BODY]: + """ + Get objects from the collection by filtering by field value. + + :param field: field name to search: example "parent.field" + :param equals: value field should be equal to + + :return: List of object bodies + """ + return self._run_sync(self._async_collection.find(field, equals)) + + def list(self) -> List[OBJECT_ID_AND_BODY]: + """ + List all objects with keys from the collection. + + :return: Tuple of objects keys, bodies. + """ + return self._run_sync(self._async_collection.list()) + + +class Storage(Runnable): + """Generic storage.""" + + def __init__( + self, + storage_uri: str, + loop: asyncio.AbstractEventLoop = None, + threaded: bool = False, + ) -> None: + """ + Init stortage. + + :param storage_uri: configuration string for storage. + :param loop: asyncio event loop to use. + :param threaded: bool. start in thread if True. + + :return: None + """ + super().__init__(loop=loop, threaded=threaded) + self._storage_uri = storage_uri + self._backend: AbstractStorageBackend = self._get_backend_instance(storage_uri) + self._is_connected = False + self._connected_state = AsyncState(False) + + async def wait_connected(self) -> None: + """Wait generic storage is connected.""" + await self._connected_state.wait(True) + + @property + def is_connected(self) -> bool: + """Get running state of the storage.""" + return self._is_connected + + async def run(self): + """Connect storage.""" + await self._backend.connect() + self._is_connected = True + self._connected_state.set(True) + try: + while True: + await asyncio.sleep(1) + finally: + await self._backend.disconnect() + self._is_connected = False + + @classmethod + def _get_backend_instance(cls, uri: str) -> AbstractStorageBackend: + """Construct backend instance.""" + backend_name = urlparse(uri).scheme + backend_class = BACKENDS.get(backend_name, None) + if backend_class is None: + raise ValueError( + f"Backend `{backend_name}` is not supported. Supported are {', '.join(BACKENDS.keys())} " + ) + return backend_class(uri) + + async def get_collection(self, collection_name: str) -> AsyncCollection: + """Get async collection.""" + await self._backend.ensure_collection(collection_name) + return AsyncCollection( + collection_name=collection_name, storage_backend=self._backend + ) + + def get_sync_collection(self, collection_name: str) -> SyncCollection: + """Get sync collection.""" + if not self._loop: # pragma: nocover + raise ValueError("Storage not started!") + return SyncCollection(self.get_collection(collection_name), self._loop) + + def __repr__(self) -> str: + """Get string representation of the storage.""" + return f"[GenericStorage({self._storage_uri}){'Connected' if self.is_connected else 'Not connected'}]" diff --git a/aea/helpers/transaction/base.py b/aea/helpers/transaction/base.py index f281627435..faabcff4ae 100644 --- a/aea/helpers/transaction/base.py +++ b/aea/helpers/transaction/base.py @@ -21,11 +21,12 @@ import collections import copy -import pickle # nosec -from typing import Any, Dict, List, Optional, Tuple +from typing import Dict, List, Optional, Tuple +from aea.common import JSONLike from aea.crypto.ledger_apis import LedgerApis from aea.exceptions import enforce +from aea.helpers.serializers import DictProtobufStructSerializer Address = str @@ -35,7 +36,7 @@ class RawTransaction: """This class represents an instance of RawTransaction.""" def __init__( - self, ledger_id: str, body: Any, + self, ledger_id: str, body: JSONLike, ): """Initialise an instance of RawTransaction.""" self._ledger_id = ledger_id @@ -45,7 +46,7 @@ def __init__( def _check_consistency(self) -> None: """Check consistency of the object.""" enforce(isinstance(self._ledger_id, str), "ledger_id must be str") - enforce(self._body is not None, "body must not be None") + enforce(isinstance(self._body, dict), "body must not be JSONLike") @property def ledger_id(self) -> str: @@ -70,8 +71,15 @@ def encode( :param raw_transaction_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - raw_transaction_bytes = pickle.dumps(raw_transaction_object) # nosec - raw_transaction_protobuf_object.raw_transaction = raw_transaction_bytes + + raw_transaction_dict = { + "ledger_id": raw_transaction_object.ledger_id, + "body": raw_transaction_object.body, + } + + raw_transaction_protobuf_object.raw_transaction = DictProtobufStructSerializer.encode( + raw_transaction_dict + ) @classmethod def decode(cls, raw_transaction_protobuf_object) -> "RawTransaction": @@ -83,10 +91,10 @@ def decode(cls, raw_transaction_protobuf_object) -> "RawTransaction": :param raw_transaction_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'raw_transaction_protobuf_object' argument. """ - raw_transaction = pickle.loads( # nosec + raw_transaction_dict = DictProtobufStructSerializer.decode( raw_transaction_protobuf_object.raw_transaction ) - return raw_transaction + return cls(raw_transaction_dict["ledger_id"], raw_transaction_dict["body"]) def __eq__(self, other): """Check equality.""" @@ -118,7 +126,7 @@ def __init__( def _check_consistency(self) -> None: """Check consistency of the object.""" enforce(isinstance(self._ledger_id, str), "ledger_id must be str") - enforce(self._body is not None, "body must not be None") + enforce(isinstance(self._body, bytes), "body must not be bytes") enforce( isinstance(self._is_deprecated_mode, bool), "is_deprecated_mode must be bool", @@ -150,8 +158,15 @@ def encode(raw_message_protobuf_object, raw_message_object: "RawMessage") -> Non :param raw_message_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - raw_message_bytes = pickle.dumps(raw_message_object) # nosec - raw_message_protobuf_object.raw_message = raw_message_bytes + raw_message_dict = { + "ledger_id": raw_message_object.ledger_id, + "body": raw_message_object.body, + "is_deprecated_mode": raw_message_object.is_deprecated_mode, + } + + raw_message_protobuf_object.raw_message = DictProtobufStructSerializer.encode( + raw_message_dict + ) @classmethod def decode(cls, raw_message_protobuf_object) -> "RawMessage": @@ -163,8 +178,14 @@ def decode(cls, raw_message_protobuf_object) -> "RawMessage": :param raw_message_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'raw_message_protobuf_object' argument. """ - raw_message = pickle.loads(raw_message_protobuf_object.raw_message) # nosec - return raw_message + raw_message_dict = DictProtobufStructSerializer.decode( + raw_message_protobuf_object.raw_message + ) + return cls( + raw_message_dict["ledger_id"], + raw_message_dict["body"], + raw_message_dict["is_deprecated_mode"], + ) def __eq__(self, other): """Check equality.""" @@ -186,7 +207,7 @@ class SignedTransaction: """This class represents an instance of SignedTransaction.""" def __init__( - self, ledger_id: str, body: Any, + self, ledger_id: str, body: JSONLike, ): """Initialise an instance of SignedTransaction.""" self._ledger_id = ledger_id @@ -196,7 +217,7 @@ def __init__( def _check_consistency(self) -> None: """Check consistency of the object.""" enforce(isinstance(self._ledger_id, str), "ledger_id must be str") - enforce(self._body is not None, "body must not be None") + enforce(isinstance(self._body, dict), "body must not JSONLike") @property def ledger_id(self) -> str: @@ -222,8 +243,14 @@ def encode( :param signed_transaction_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - signed_transaction_bytes = pickle.dumps(signed_transaction_object) # nosec - signed_transaction_protobuf_object.signed_transaction = signed_transaction_bytes + signed_transaction_dict = { + "ledger_id": signed_transaction_object.ledger_id, + "body": signed_transaction_object.body, + } + + signed_transaction_protobuf_object.signed_transaction = DictProtobufStructSerializer.encode( + signed_transaction_dict + ) @classmethod def decode(cls, signed_transaction_protobuf_object) -> "SignedTransaction": @@ -235,10 +262,12 @@ def decode(cls, signed_transaction_protobuf_object) -> "SignedTransaction": :param signed_transaction_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'signed_transaction_protobuf_object' argument. """ - signed_transaction = pickle.loads( # nosec + signed_transaction_dict = DictProtobufStructSerializer.decode( signed_transaction_protobuf_object.signed_transaction ) - return signed_transaction + return cls( + signed_transaction_dict["ledger_id"], signed_transaction_dict["body"] + ) def __eq__(self, other): """Check equality.""" @@ -304,8 +333,15 @@ def encode( :param signed_message_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - signed_message_bytes = pickle.dumps(signed_message_object) # nosec - signed_message_protobuf_object.signed_message = signed_message_bytes + signed_message_dict = { + "ledger_id": signed_message_object.ledger_id, + "body": signed_message_object.body, + "is_deprecated_mode": signed_message_object.is_deprecated_mode, + } + + signed_message_protobuf_object.signed_message = DictProtobufStructSerializer.encode( + signed_message_dict + ) @classmethod def decode(cls, signed_message_protobuf_object) -> "SignedMessage": @@ -317,10 +353,14 @@ def decode(cls, signed_message_protobuf_object) -> "SignedMessage": :param signed_message_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'signed_message_protobuf_object' argument. """ - signed_message = pickle.loads( # nosec + signed_message_dict = DictProtobufStructSerializer.decode( signed_message_protobuf_object.signed_message ) - return signed_message + return cls( + signed_message_dict["ledger_id"], + signed_message_dict["body"], + signed_message_dict["is_deprecated_mode"], + ) def __eq__(self, other): """Check equality.""" @@ -341,7 +381,7 @@ def __str__(self): class State: """This class represents an instance of State.""" - def __init__(self, ledger_id: str, body: bytes): + def __init__(self, ledger_id: str, body: JSONLike): """Initialise an instance of State.""" self._ledger_id = ledger_id self._body = body @@ -350,7 +390,7 @@ def __init__(self, ledger_id: str, body: bytes): def _check_consistency(self) -> None: """Check consistency of the object.""" enforce(isinstance(self._ledger_id, str), "ledger_id must be str") - enforce(self._body is not None, "body must not be None") + enforce(isinstance(self._body, dict), "body must be dict") @property def ledger_id(self) -> str: @@ -358,7 +398,7 @@ def ledger_id(self) -> str: return self._ledger_id @property - def body(self): + def body(self) -> JSONLike: """Get the body.""" return self._body @@ -373,8 +413,12 @@ def encode(state_protobuf_object, state_object: "State") -> None: :param state_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - state_bytes = pickle.dumps(state_object) # nosec - state_protobuf_object.state = state_bytes + state_dict = { + "ledger_id": state_object.ledger_id, + "body": state_object.body, + } + + state_protobuf_object.state = DictProtobufStructSerializer.encode(state_dict) @classmethod def decode(cls, state_protobuf_object) -> "State": @@ -386,8 +430,8 @@ def decode(cls, state_protobuf_object) -> "State": :param state_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'state_protobuf_object' argument. """ - state = pickle.loads(state_protobuf_object.state) # nosec - return state + state_dict = DictProtobufStructSerializer.decode(state_protobuf_object.state) + return cls(state_dict["ledger_id"], state_dict["body"]) def __eq__(self, other): """Check equality.""" @@ -728,10 +772,15 @@ def fee_by_currency_id(self) -> Dict[str, int]: return copy.copy(self._fee_by_currency_id) @property - def kwargs(self) -> Dict[str, Any]: + def kwargs(self) -> JSONLike: """Get the kwargs.""" return self._kwargs + @property + def is_strict(self) -> bool: + """Get is_strict.""" + return self._is_strict + def _get_lists(self) -> Tuple[List[str], List[int], List[int]]: ordered = collections.OrderedDict(sorted(self.quantities_by_good_id.items())) good_ids = [] # type: List[str] @@ -821,8 +870,19 @@ def encode(terms_protobuf_object, terms_object: "Terms") -> None: :param terms_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - terms_bytes = pickle.dumps(terms_object) # nosec - terms_protobuf_object.terms = terms_bytes + terms_dict = { + "ledger_id": terms_object.ledger_id, + "sender_address": terms_object.sender_address, + "counterparty_address": terms_object.counterparty_address, + "amount_by_currency_id": terms_object.amount_by_currency_id, + "quantities_by_good_id": terms_object.quantities_by_good_id, + "nonce": terms_object.nonce, + "is_sender_payable_tx_fee": terms_object.is_sender_payable_tx_fee, + "fee_by_currency_id": terms_object.fee_by_currency_id, + "is_strict": terms_object.is_strict, + "kwargs": terms_object.kwargs, + } + terms_protobuf_object.terms = DictProtobufStructSerializer.encode(terms_dict) @classmethod def decode(cls, terms_protobuf_object) -> "Terms": @@ -834,8 +894,20 @@ def decode(cls, terms_protobuf_object) -> "Terms": :param terms_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'terms_protobuf_object' argument. """ - terms = pickle.loads(terms_protobuf_object.terms) # nosec - return terms + terms_dict = DictProtobufStructSerializer.decode(terms_protobuf_object.terms) + + return cls( + terms_dict["ledger_id"], + terms_dict["sender_address"], + terms_dict["counterparty_address"], + terms_dict["amount_by_currency_id"], + terms_dict["quantities_by_good_id"], + terms_dict["nonce"], + terms_dict["is_sender_payable_tx_fee"], + dict(terms_dict["fee_by_currency_id"]), + terms_dict["is_strict"], + **dict(terms_dict["kwargs"]), + ) def __eq__(self, other): """Check equality.""" @@ -872,7 +944,7 @@ def __str__(self): class TransactionDigest: """This class represents an instance of TransactionDigest.""" - def __init__(self, ledger_id: str, body: Any): + def __init__(self, ledger_id: str, body: str): """Initialise an instance of TransactionDigest.""" self._ledger_id = ledger_id self._body = body @@ -881,7 +953,7 @@ def __init__(self, ledger_id: str, body: Any): def _check_consistency(self) -> None: """Check consistency of the object.""" enforce(isinstance(self._ledger_id, str), "ledger_id must be str") - enforce(self._body is not None, "body must not be None") + enforce(isinstance(self._body, str), "body must not be None") @property def ledger_id(self) -> str: @@ -889,7 +961,7 @@ def ledger_id(self) -> str: return self._ledger_id @property - def body(self) -> Any: + def body(self) -> str: """Get the receipt.""" return self._body @@ -907,8 +979,14 @@ def encode( :param transaction_digest_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - transaction_digest_bytes = pickle.dumps(transaction_digest_object) # nosec - transaction_digest_protobuf_object.transaction_digest = transaction_digest_bytes + transaction_digest_dict = { + "ledger_id": transaction_digest_object.ledger_id, + "body": transaction_digest_object.body, + } + + transaction_digest_protobuf_object.transaction_digest = DictProtobufStructSerializer.encode( + transaction_digest_dict + ) @classmethod def decode(cls, transaction_digest_protobuf_object) -> "TransactionDigest": @@ -920,10 +998,13 @@ def decode(cls, transaction_digest_protobuf_object) -> "TransactionDigest": :param transaction_digest_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'transaction_digest_protobuf_object' argument. """ - transaction_digest = pickle.loads( # nosec + transaction_digest_dict = DictProtobufStructSerializer.decode( transaction_digest_protobuf_object.transaction_digest ) - return transaction_digest + + return cls( + transaction_digest_dict["ledger_id"], transaction_digest_dict["body"] + ) def __eq__(self, other): """Check equality.""" @@ -943,7 +1024,7 @@ def __str__(self): class TransactionReceipt: """This class represents an instance of TransactionReceipt.""" - def __init__(self, ledger_id: str, receipt: Any, transaction: Any): + def __init__(self, ledger_id: str, receipt: JSONLike, transaction: JSONLike): """Initialise an instance of TransactionReceipt.""" self._ledger_id = ledger_id self._receipt = receipt @@ -953,8 +1034,8 @@ def __init__(self, ledger_id: str, receipt: Any, transaction: Any): def _check_consistency(self) -> None: """Check consistency of the object.""" enforce(isinstance(self._ledger_id, str), "ledger_id must be str") - enforce(self._receipt is not None, "receipt must not be None") - enforce(self._transaction is not None, "transaction must not be None") + enforce(isinstance(self._receipt, dict), "receipt must be dict") + enforce(isinstance(self._transaction, dict), "transaction must be dict") @property def ledger_id(self) -> str: @@ -962,12 +1043,12 @@ def ledger_id(self) -> str: return self._ledger_id @property - def receipt(self) -> Any: + def receipt(self) -> JSONLike: """Get the receipt.""" return self._receipt @property - def transaction(self) -> Any: + def transaction(self) -> JSONLike: """Get the transaction.""" return self._transaction @@ -985,9 +1066,14 @@ def encode( :param transaction_receipt_object: an instance of this class to be encoded in the protocol buffer object. :return: None """ - transaction_receipt_bytes = pickle.dumps(transaction_receipt_object) # nosec - transaction_receipt_protobuf_object.transaction_receipt = ( - transaction_receipt_bytes + transaction_receipt_dict = { + "ledger_id": transaction_receipt_object.ledger_id, + "receipt": transaction_receipt_object.receipt, + "transaction": transaction_receipt_object.transaction, + } + + transaction_receipt_protobuf_object.transaction_receipt = DictProtobufStructSerializer.encode( + transaction_receipt_dict ) @classmethod @@ -1000,10 +1086,14 @@ def decode(cls, transaction_receipt_protobuf_object) -> "TransactionReceipt": :param transaction_receipt_protobuf_object: the protocol buffer object whose type corresponds with this class. :return: A new instance of this class that matches the protocol buffer object in the 'transaction_receipt_protobuf_object' argument. """ - transaction_receipt = pickle.loads( # nosec + transaction_receipt_dict = DictProtobufStructSerializer.decode( transaction_receipt_protobuf_object.transaction_receipt ) - return transaction_receipt + return cls( + transaction_receipt_dict["ledger_id"], + transaction_receipt_dict["receipt"], + transaction_receipt_dict["transaction"], + ) def __eq__(self, other): """Check equality.""" diff --git a/aea/manager.py b/aea/manager.py index f3a42b19f5..8b4e896180 100644 --- a/aea/manager.py +++ b/aea/manager.py @@ -58,7 +58,6 @@ def __init__(self, agent: AEA, loop: asyncio.AbstractEventLoop) -> None: def create_run_loop(self) -> None: """Create run loop.""" - pass def start(self) -> None: """Start task.""" @@ -585,7 +584,8 @@ def _ensure_working_dir(self) -> None: if not os.path.isdir(self.working_dir): # pragma: nocover raise ValueError(f"{self.working_dir} is not a directory!") - os.makedirs(self._keys_dir) + if not os.path.exists(self._keys_dir): + os.makedirs(self._keys_dir) def _build_agent_alias( self, diff --git a/aea/protocols/base.py b/aea/protocols/base.py index 43e63747a4..b847377c3b 100644 --- a/aea/protocols/base.py +++ b/aea/protocols/base.py @@ -22,6 +22,7 @@ import logging import re from abc import ABC, abstractmethod +from base64 import b64decode, b64encode from copy import copy from enum import Enum from pathlib import Path @@ -35,7 +36,7 @@ _default_logger = logging.getLogger(__name__) -MAX_PRINT_INNER = 200 +MAX_PRINT_INNER = 600 MAX_PRINT_OUTER = 2000 Address = str @@ -46,6 +47,8 @@ class Message: protocol_id = None # type: PublicId serializer = None # type: Type["Serializer"] + __slots__ = ("_slots", "_to", "_sender") + class Performative(Enum): """Performatives for the base message.""" @@ -83,6 +86,29 @@ def __init__(self, _body: Optional[Dict] = None, **kwargs): except Exception as e: # pylint: disable=broad-except _default_logger.error(e) + def json(self) -> dict: + """Get json friendly str representation of the message.""" + return { + "to": self._to, + "sender": self._sender, + "body": b64encode(self.encode()).decode("utf-8"), + } + + @classmethod + def from_json(cls, data: dict) -> "Message": + """Construct message instance from json data.""" + try: + instance = cls.decode(b64decode(data["body"])) + sender = data["sender"] + if sender: + instance.sender = sender + to = data["to"] + if to: + instance.to = to + return instance + except KeyError: # pragma: nocover + raise ValueError(f"Message representation is invalid: {data}") + @property def valid_performatives(self) -> Set[str]: """Get valid performatives.""" @@ -246,6 +272,11 @@ def encode(self) -> bytes: """Encode the message.""" return self.serializer.encode(self) + @classmethod + def decode(cls, data: bytes) -> "Message": + """Decode the message.""" + return cls.serializer.decode(data) + @property def has_dialogue_info(self) -> bool: """ diff --git a/aea/protocols/dialogue/base.py b/aea/protocols/dialogue/base.py index cd5ac6dd5f..81239e0462 100644 --- a/aea/protocols/dialogue/base.py +++ b/aea/protocols/dialogue/base.py @@ -16,7 +16,6 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - """ This module contains the classes required for dialogue management. @@ -24,18 +23,32 @@ - Dialogue: The dialogue class maintains state of a dialogue and manages it. - Dialogues: The dialogues class keeps track of all dialogues. """ - +import inspect import itertools import secrets import sys -from collections import namedtuple +from collections import defaultdict, namedtuple from enum import Enum from inspect import signature -from typing import Callable, Dict, FrozenSet, List, Optional, Set, Tuple, Type, cast +from typing import ( + Callable, + Dict, + FrozenSet, + Iterable, + List, + Optional, + Set, + Tuple, + Type, + cast, +) from aea.common import Address from aea.exceptions import AEAEnforceError, enforce +from aea.helpers.base import cached_property +from aea.helpers.storage.generic_storage import SyncCollection from aea.protocols.base import Message +from aea.skills.base import SkillComponent if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and sys.version_info[1] < 7): @@ -337,6 +350,66 @@ def __init__( "Message class provided not a subclass of `Message`.", ) self._message_class = message_class + self._terminal_state_callbacks: Set[Callable[["Dialogue"], None]] = set() + + def add_terminal_state_callback(self, fn: Callable[["Dialogue"], None]) -> None: + """ + Add callback to be called on dialogue reach terminal state. + + :param fn: callable to be called with one argument: Dialogue + :return: None + """ + self._terminal_state_callbacks.add(fn) + + def __eq__(self, other) -> bool: + """Compare two dialogues.""" + return ( + type(self) == type(other) # pylint: disable=unidiomatic-typecheck + and self.dialogue_label == other.dialogue_label + and self.message_class == other.message_class + and self._incoming_messages == other._incoming_messages + and self._outgoing_messages == other._outgoing_messages + and self.role == other.role + and self.self_address == other.self_address + ) + + def json(self) -> dict: + """Get json representation of the dialogue.""" + data = { + "dialogue_label": self._dialogue_label.json, + "self_address": self.self_address, + "role": self._role.value, + "incoming_messages": [i.json() for i in self._incoming_messages], + "outgoing_messages": [i.json() for i in self._outgoing_messages], + } + return data + + @classmethod + def from_json(cls, message_class: Type[Message], data: dict) -> "Dialogue": + """ + Create a dialogue instance with all messages from json data. + + :param message_class: type of message used with this dialogue + :param data: dict with data exported with Dialogue.to_json() method + + :return: Dialogue instance + """ + try: + obj = cls( + dialogue_label=DialogueLabel.from_json(data["dialogue_label"]), + message_class=message_class, + self_address=Address(data["self_address"]), + role=cls.Role(data["role"]), + ) + obj._incoming_messages = [ # pylint: disable=protected-access + message_class.from_json(i) for i in data["incoming_messages"] + ] + obj._outgoing_messages = [ # pylint: disable=protected-access + message_class.from_json(i) for i in data["outgoing_messages"] + ] + return obj + except KeyError: # pragma: nocover + raise ValueError(f"Dialogue representation is invalid: {data}") @property def dialogue_label(self) -> DialogueLabel: @@ -578,6 +651,10 @@ def _update(self, message: Message) -> None: else: self._incoming_messages.append(message) + if message.performative in self.rules.terminal_performatives: + for fn in self._terminal_state_callbacks: + fn(self) + def _is_belonging_to_dialogue(self, message: Message) -> bool: """ Check if the message is belonging to the dialogue. @@ -948,9 +1025,453 @@ def add_dialogue_endstate( self._other_initiated[end_state] += 1 +def find_caller_object(object_type: Type): + """Find caller object of certain type in the call stack.""" + caller_object = None + for frame_info in inspect.stack(): + frame_self = frame_info.frame.f_locals.get("self", None) + if not frame_self: + continue + + if not isinstance(frame_self, object_type): + continue + caller_object = frame_self + return caller_object + + +class BasicDialoguesStorage: + """Dialogues state storage.""" + + def __init__(self, dialogues: "Dialogues") -> None: + """Init dialogues storage.""" + self._dialogues_by_dialogue_label = {} # type: Dict[DialogueLabel, Dialogue] + self._dialogue_by_address = defaultdict( + list + ) # type: Dict[Address, List[Dialogue]] + self._incomplete_to_complete_dialogue_labels = ( + {} + ) # type: Dict[DialogueLabel, DialogueLabel] + self._dialogues = dialogues + self._terminal_state_dialogues_labels: Set[DialogueLabel] = set() + + @property + def dialogues_in_terminal_state(self) -> List["Dialogue"]: + """Get all dialogues in terminal state.""" + return list( + filter( + None, + [ + self._dialogues_by_dialogue_label.get(i) + for i in self._terminal_state_dialogues_labels + ], + ) + ) + + @property + def dialogues_in_active_state(self) -> List["Dialogue"]: + """Get all dialogues in active state.""" + active_dialogues = ( + set(self._dialogues_by_dialogue_label.keys()) + - self._terminal_state_dialogues_labels + ) + return list( + filter( + None, + [self._dialogues_by_dialogue_label.get(i) for i in active_dialogues], + ) + ) + + @property + def is_terminal_dialogues_kept(self) -> bool: + """Return True if dialogues should stay after terminal state.""" + return self._dialogues.is_keep_dialogues_in_terminal_state + + def dialogue_terminal_state_callback(self, dialogue: "Dialogue") -> None: + """Method to be called on dialogue terminal state reached.""" + if self.is_terminal_dialogues_kept: + self._terminal_state_dialogues_labels.add(dialogue.dialogue_label) + else: + self.remove(dialogue.dialogue_label) + + def setup(self) -> None: + """Set up dialogue storage.""" + + def teardown(self) -> None: + """Tear down dialogue storage.""" + + def add(self, dialogue: Dialogue) -> None: + """ + Add dialogue to storage. + + :param dialogue: dialogue to add. + :return: None + """ + dialogue.add_terminal_state_callback(self.dialogue_terminal_state_callback) + self._dialogues_by_dialogue_label[dialogue.dialogue_label] = dialogue + self._dialogue_by_address[ + dialogue.dialogue_label.dialogue_opponent_addr + ].append(dialogue) + + def _add_terminal_state_dialogue(self, dialogue: Dialogue) -> None: + """ + Add terminal state dialogue to storage. + + :param dialogue: dialogue to add. + :return: None + """ + self.add(dialogue) + self._terminal_state_dialogues_labels.add(dialogue.dialogue_label) + + def remove(self, dialogue_label: DialogueLabel) -> None: + """ + Remove dialogue from storage by it's label. + + :param dialogue_label: label of the dialogue to remove + :return: None + """ + dialogue = self._dialogues_by_dialogue_label.pop(dialogue_label, None) + + self._incomplete_to_complete_dialogue_labels.pop(dialogue_label, None) + + if dialogue_label in self._terminal_state_dialogues_labels: + self._terminal_state_dialogues_labels.remove(dialogue_label) + + if dialogue: + self._dialogue_by_address[dialogue_label.dialogue_opponent_addr].remove( + dialogue + ) + + def get(self, dialogue_label: DialogueLabel) -> Optional[Dialogue]: + """ + Get dialogue stored by it's label. + + :param dialogue_label: label of the dialogue + :return: dialogue if presents or None + """ + return self._dialogues_by_dialogue_label.get(dialogue_label, None) + + def get_dialogues_with_counterparty(self, counterparty: Address) -> List[Dialogue]: + """ + Get the dialogues by address. + + :param counterparty: the counterparty + :return: The dialogues with the counterparty. + """ + return self._dialogue_by_address.get(counterparty, []) + + def is_in_incomplete(self, dialogue_label: DialogueLabel) -> bool: + """Check dialogue label presents in list of incomplete.""" + return dialogue_label in self._incomplete_to_complete_dialogue_labels + + def set_incomplete_dialogue( + self, incomplete_dialogue_label, complete_dialogue_label + ) -> None: + """Set incomplete dialogue label.""" + self._incomplete_to_complete_dialogue_labels[ + incomplete_dialogue_label + ] = complete_dialogue_label + + def is_dialogue_present(self, dialogue_label: DialogueLabel) -> bool: + """Check dialogue with label specified presents in storage.""" + return dialogue_label in self._dialogues_by_dialogue_label + + def get_latest_label(self, dialogue_label: DialogueLabel) -> DialogueLabel: + """Get latest label for dialogue.""" + return self._incomplete_to_complete_dialogue_labels.get( + dialogue_label, dialogue_label + ) + + +class PersistDialoguesStorage(BasicDialoguesStorage): + """ + Persist dialogues storage. + + Uses generic storage to load/save dialogues data on setup/teardown. + """ + + INCOMPLETE_DIALOGUES_OBJECT_NAME = "incomplete_dialogues" + TERMINAL_STATE_DIALOGUES_COLLECTTION_SUFFIX = "_terminal" + + def __init__(self, dialogues: "Dialogues") -> None: + """Init dialogues storage.""" + super().__init__(dialogues) + + self._skill_component: Optional[SkillComponent] = self.get_skill_component() + + @staticmethod + def get_skill_component() -> Optional[SkillComponent]: + """Get skill component dialogues storage constructed for.""" + caller_object = find_caller_object(SkillComponent) + if not caller_object: # pragma: nocover + return None + return caller_object + + def _get_collection_name(self) -> Optional[str]: + """Generate collection name based on the dialogues class name and skill component.""" + if not self._skill_component: # pragma: nocover + return None + return "_".join( + [ + self._skill_component.skill_id.author, + self._skill_component.skill_id.name, + self._skill_component.name, + self._skill_component.__class__.__name__, + self._dialogues.__class__.__name__, + ] + ) + + def _get_collection_instance(self, col_name: str) -> Optional[SyncCollection]: + """Get sync collection if generic storage available.""" + if ( + not self._skill_component or not self._skill_component.context.storage + ): # pragma: nocover + return None + return self._skill_component.context.storage.get_sync_collection(col_name) + + @cached_property + def _terminal_dialogues_collection(self) -> Optional[SyncCollection]: + col_name = self._get_collection_name() + if not col_name: + return None + col_name = f"{col_name}{self.TERMINAL_STATE_DIALOGUES_COLLECTTION_SUFFIX}" + return self._get_collection_instance(col_name) + + @cached_property + def _active_dialogues_collection(self) -> Optional[SyncCollection]: + col_name = self._get_collection_name() + if not col_name: + return None + return self._get_collection_instance(col_name) + + def _dump(self) -> None: + """Dump dialogues storage to the generic storage.""" + if ( + not self._active_dialogues_collection + or not self._terminal_dialogues_collection + ): + return # pragma: nocover + + self._dump_incomplete_dialogues_labels(self._active_dialogues_collection) + self._dump_dialogues( + self.dialogues_in_active_state, self._active_dialogues_collection + ) + self._dump_dialogues( + self.dialogues_in_terminal_state, self._terminal_dialogues_collection + ) + + def _dump_incomplete_dialogues_labels(self, collection: SyncCollection) -> None: + """Dump incomplete labels.""" + collection.put( + self.INCOMPLETE_DIALOGUES_OBJECT_NAME, + self._incomplete_dialogues_labels_to_json(), + ) + + def _load_incomplete_dialogues_labels(self, collection: SyncCollection) -> None: + """Load and set incomplete dialogue labels.""" + incomplete_dialogues_data = collection.get( + self.INCOMPLETE_DIALOGUES_OBJECT_NAME + ) + if incomplete_dialogues_data is not None: + incomplete_dialogues_data = cast(List, incomplete_dialogues_data) + self._set_incomplete_dialogues_labels_from_json(incomplete_dialogues_data) + + def _load_dialogues(self, collection: SyncCollection) -> Iterable[Dialogue]: + """Load dialogues from collection.""" + if not collection: # pragma: nocover + return + for label, dialogue_data in collection.list(): + if label == self.INCOMPLETE_DIALOGUES_OBJECT_NAME: + continue + dialogue_data = cast(Dict, dialogue_data) + yield self._dialogue_from_json(dialogue_data) + + def _dialogue_from_json(self, dialogue_data: dict) -> "Dialogue": + return self._dialogues.dialogue_class.from_json( + self._dialogues.message_class, dialogue_data + ) + + @staticmethod + def _dump_dialogues( + dialogues: Iterable[Dialogue], collection: SyncCollection + ) -> None: + """Dump dialogues to collection.""" + for dialogue in dialogues: + collection.put(str(dialogue.dialogue_label), dialogue.json()) + + def _load(self) -> None: + """Dump dialogues and incomplete dialogues labels from the generic storage.""" + if ( + not self._active_dialogues_collection + or not self._terminal_dialogues_collection + ): + return # pragma: nocover + + self._load_incomplete_dialogues_labels(self._active_dialogues_collection) + self._load_active_dialogues() + self._load_terminated_dialogues() + + def _load_active_dialogues(self) -> None: + """Load active dialogues from storage.""" + for dialogue in self._load_dialogues(self._active_dialogues_collection): + self.add(dialogue) + + def _load_terminated_dialogues(self) -> None: + """Load terminated dialogues from storage.""" + for dialogue in self._load_dialogues(self._terminal_dialogues_collection): + self._add_terminal_state_dialogue(dialogue) + + def _incomplete_dialogues_labels_to_json(self) -> List: + """Dump incomplete_to_complete_dialogue_labels to json friendly dict.""" + return [ + [k.json, v.json] + for k, v in self._incomplete_to_complete_dialogue_labels.items() + ] + + def _set_incomplete_dialogues_labels_from_json(self, data: List) -> None: + """Set incomplete_to_complete_dialogue_labels from json friendly dict.""" + self._incomplete_to_complete_dialogue_labels = { + DialogueLabel.from_json(k): DialogueLabel.from_json(v) for k, v in data + } + + def setup(self) -> None: + """Set up dialogue storage.""" + if not self._skill_component: # pragma: nocover + return + self._load() + + def teardown(self) -> None: + """Tear down dialogue storage.""" + if not self._skill_component: # pragma: nocover + return + self._dump() + + def remove(self, dialogue_label: DialogueLabel) -> None: + """Remove dialogue from memory and persistent storage.""" + if dialogue_label in self._terminal_state_dialogues_labels: + collection = self._terminal_dialogues_collection + else: + collection = self._active_dialogues_collection + + super().remove(dialogue_label) + + if collection: + collection.remove(str(dialogue_label)) + + +class PersistDialoguesStorageWithOffloading(PersistDialoguesStorage): + """Dialogue Storage with dialogues offloading.""" + + def dialogue_terminal_state_callback(self, dialogue: "Dialogue") -> None: + """Call on dialogue reaches terminal staste.""" + if ( + not self.is_terminal_dialogues_kept + or not self._terminal_dialogues_collection + ): # pragma: nocover + super().dialogue_terminal_state_callback(dialogue) + return + + # do offloading + # push to storage + self._terminal_dialogues_collection.put( + str(dialogue.dialogue_label), dialogue.json() + ) + # remove from memory + self.remove(dialogue.dialogue_label) + + def get(self, dialogue_label: DialogueLabel) -> Optional[Dialogue]: + """Try to get dialogue by label from memory or persists storage.""" + dialogue = super().get(dialogue_label) + if dialogue: + return dialogue + + dialogue = self._get_dialogue_from_collection( + dialogue_label, self._terminal_dialogues_collection + ) + if dialogue: + # get dialogue from terminal state collection and cache it + self._add_terminal_state_dialogue(dialogue) + return dialogue + return None + + def _get_dialogue_from_collection( + self, dialogue_label: "DialogueLabel", collection: SyncCollection + ) -> Optional[Dialogue]: + """ + Get dialogue by label from collection. + + :param dialogue_label: label for lookup + :param collection: collection with dialogues + :return: dialogue if exists + """ + if not collection: + return None + dialogue_data = collection.get(str(dialogue_label)) + if not dialogue_data: + return None + dialogue_data = cast(Dict, dialogue_data) + return self._dialogue_from_json(dialogue_data) + + def _load_terminated_dialogues(self) -> None: + """Skip terminated dialogues loading, cause it's offloaded.""" + + def _get_dialogues_by_address_from_collection( + self, address: Address, collection: SyncCollection + ) -> List["Dialogue"]: + """ + Get all dialogues with opponent address from specified collection. + + :param address: address for lookup. + :param: collection: collection to get dialogues from. + + :return: list of dialogues + """ + if not collection: + return [] + + return [ + self._dialogue_from_json(cast(Dict, i[1])) + for i in collection.find("dialogue_label.dialogue_opponent_addr", address) + ] + + def get_dialogues_with_counterparty(self, counterparty: Address) -> List[Dialogue]: + """ + Get the dialogues by address. + + :param counterparty: the counterparty + :return: The dialogues with the counterparty. + """ + dialogues = ( + self._get_dialogues_by_address_from_collection( + counterparty, self._active_dialogues_collection + ) + + self._get_dialogues_by_address_from_collection( + counterparty, self._terminal_dialogues_collection + ) + + super().get_dialogues_with_counterparty(counterparty) + ) + return self._unique_dialogues_by_label(dialogues) + + @staticmethod + def _unique_dialogues_by_label(dialogues: List[Dialogue]) -> List[Dialogue]: + """Filter list of dialogues by unique dialogue label.""" + return list( + {dialogue.dialogue_label: dialogue for dialogue in dialogues}.values() + ) + + @property + def dialogues_in_terminal_state(self) -> List["Dialogue"]: + """Get all dialogues in terminal state.""" + dialogues = super().dialogues_in_terminal_state + list( + self._load_dialogues(self._terminal_dialogues_collection) + ) + return self._unique_dialogues_by_label(dialogues) + + class Dialogues: """The dialogues class keeps track of all dialogues for an agent.""" + _keep_terminal_state_dialogues = False + def __init__( self, self_address: Address, @@ -958,22 +1479,25 @@ def __init__( message_class: Type[Message], dialogue_class: Type[Dialogue], role_from_first_message: Callable[[Message, Address], Dialogue.Role], + keep_terminal_state_dialogues: Optional[bool] = None, ) -> None: """ Initialize dialogues. :param self_address: the address of the entity for whom dialogues are maintained :param end_states: the list of dialogue endstates + :param keep_terminal_state_dialogues: specify do dialogues in terminal state should stay or not + :return: None """ - self._dialogues_by_dialogue_label = {} # type: Dict[DialogueLabel, Dialogue] - self._dialogue_by_address = {} # type: Dict[Address, List[Dialogue]] - self._incomplete_to_complete_dialogue_labels = ( - {} - ) # type: Dict[DialogueLabel, DialogueLabel] + + self._dialogues_storage = PersistDialoguesStorageWithOffloading(self) self._self_address = self_address self._dialogue_stats = DialogueStats(end_states) + if keep_terminal_state_dialogues is not None: + self._keep_terminal_state_dialogues = keep_terminal_state_dialogues + enforce( issubclass(message_class, Message), "message_class is not a subclass of Message.", @@ -1021,9 +1545,9 @@ def __init__( self._role_from_first_message = role_from_first_message @property - def dialogues(self) -> Dict[DialogueLabel, Dialogue]: - """Get dictionary of dialogues in which the agent engages.""" - return self._dialogues_by_dialogue_label + def is_keep_dialogues_in_terminal_state(self) -> bool: + """Is requrired to keep dialogues in terminal state.""" + return self._keep_terminal_state_dialogues @property def self_address(self) -> Address: @@ -1065,7 +1589,7 @@ def get_dialogues_with_counterparty(self, counterparty: Address) -> List[Dialogu :param counterparty: the counterparty :return: The dialogues with the counterparty. """ - return self._dialogue_by_address.get(counterparty, []) + return self._dialogues_storage.get_dialogues_with_counterparty(counterparty) def _is_message_by_self(self, message: Message) -> bool: """ @@ -1178,7 +1702,7 @@ def _create_dialogue( try: dialogue._update(initial_message) # pylint: disable=protected-access except InvalidDialogueMessage as e: - self._dialogues_by_dialogue_label.pop(dialogue.dialogue_label) + self._dialogues_storage.remove(dialogue.dialogue_label) raise SyntaxError( "Cannot create a dialogue with the specified performative and contents." ) from e @@ -1250,7 +1774,7 @@ def update(self, message: Message) -> Optional[Dialogue]: if ( is_new_dialogue ): # remove the newly created dialogue if the initial message is invalid - self._dialogues_by_dialogue_label.pop(dialogue.dialogue_label) + self._dialogues_storage.remove(dialogue.dialogue_label) else: # couldn't find the dialogue referenced by the message result = None @@ -1280,12 +1804,13 @@ def _complete_dialogue_reference(self, message: Message) -> None: incomplete_dialogue_reference, message.sender, self.self_address, ) - if ( - incomplete_dialogue_label in self.dialogues - and incomplete_dialogue_label - not in self._incomplete_to_complete_dialogue_labels - ): - dialogue = self.dialogues.pop(incomplete_dialogue_label) + if self._dialogues_storage.is_dialogue_present( + incomplete_dialogue_label + ) and not self._dialogues_storage.is_in_incomplete(incomplete_dialogue_label): + dialogue = self._dialogues_storage.get(incomplete_dialogue_label) + if not dialogue: # pragma: nocover + raise ValueError("no dialogue found") + self._dialogues_storage.remove(incomplete_dialogue_label) final_dialogue_label = DialogueLabel( complete_dialogue_reference, incomplete_dialogue_label.dialogue_opponent_addr, @@ -1294,10 +1819,10 @@ def _complete_dialogue_reference(self, message: Message) -> None: dialogue._update_dialogue_label( # pylint: disable=protected-access final_dialogue_label ) - self.dialogues.update({dialogue.dialogue_label: dialogue}) - self._incomplete_to_complete_dialogue_labels[ - incomplete_dialogue_label - ] = final_dialogue_label + self._dialogues_storage.add(dialogue) + self._dialogues_storage.set_incomplete_dialogue( + incomplete_dialogue_label, final_dialogue_label + ) def get_dialogue(self, message: Message) -> Optional[Dialogue]: """ @@ -1341,10 +1866,7 @@ def _get_latest_label(self, dialogue_label: DialogueLabel) -> DialogueLabel: :param dialogue_label: the dialogue label :return dialogue_label: the dialogue label """ - result = self._incomplete_to_complete_dialogue_labels.get( - dialogue_label, dialogue_label - ) - return result + return self._dialogues_storage.get_latest_label(dialogue_label) def _get_dialogue_from_label( self, dialogue_label: DialogueLabel @@ -1355,8 +1877,7 @@ def _get_dialogue_from_label( :param dialogue_label: the dialogue label :return: the dialogue if present """ - result = self.dialogues.get(dialogue_label, None) - return result + return self._dialogues_storage.get(dialogue_label) def _create_self_initiated( self, @@ -1434,19 +1955,18 @@ def _create( :return: the created dialogue """ enforce( - incomplete_dialogue_label - not in self._incomplete_to_complete_dialogue_labels, + not self._dialogues_storage.is_in_incomplete(incomplete_dialogue_label), "Incomplete dialogue label already present.", ) if complete_dialogue_label is None: dialogue_label = incomplete_dialogue_label else: - self._incomplete_to_complete_dialogue_labels[ - incomplete_dialogue_label - ] = complete_dialogue_label + self._dialogues_storage.set_incomplete_dialogue( + incomplete_dialogue_label, complete_dialogue_label + ) dialogue_label = complete_dialogue_label enforce( - dialogue_label not in self.dialogues, + not self._dialogues_storage.is_dialogue_present(dialogue_label), "Dialogue label already present in dialogues.", ) dialogue = self._dialogue_class( @@ -1455,15 +1975,7 @@ def _create( self_address=self.self_address, role=role, ) - self.dialogues.update({dialogue_label: dialogue}) - if ( - self._dialogue_by_address.get(dialogue_label.dialogue_opponent_addr, None) - is None - ): - self._dialogue_by_address[dialogue_label.dialogue_opponent_addr] = [] - self._dialogue_by_address[dialogue_label.dialogue_opponent_addr].append( - dialogue - ) + self._dialogues_storage.add(dialogue) return dialogue @staticmethod @@ -1474,3 +1986,17 @@ def _generate_dialogue_nonce() -> str: :return: the next nonce """ return secrets.token_hex(DialogueLabel.NONCE_BYTES_NB) + + def setup(self) -> None: + """Set up.""" + self._dialogues_storage.setup() + super_obj = super() + if hasattr(super_obj, "setup"): # pragma: nocover + super_obj.setup() # type: ignore # pylint: disable=no-member + + def teardown(self) -> None: + """Tear down.""" + self._dialogues_storage.teardown() + super_obj = super() + if hasattr(super_obj, "teardown"): # pragma: nocover + super_obj.teardown() # type: ignore # pylint: disable=no-member diff --git a/aea/protocols/generator/base.py b/aea/protocols/generator/base.py index 67746d3469..fa5e428fbd 100644 --- a/aea/protocols/generator/base.py +++ b/aea/protocols/generator/base.py @@ -636,6 +636,8 @@ def _message_class_str(self) -> str: ) # slots + cls_str += self.indent + "__slots__: Tuple[str, ...] = tuple()\n" + cls_str += self.indent + "class _SlotsCls():\n" self._change_indent(1) cls_str += self.indent + "__slots__ = (\n" @@ -1114,6 +1116,11 @@ def _dialogue_class_str(self) -> str: cls_str += self.indent + "{" + end_states_str + "}" cls_str += self.indent + ")\n\n" + cls_str += ( + self.indent + + f"_keep_terminal_state_dialogues = {repr(self.spec.keep_terminal_state_dialogues)}\n\n" + ) + cls_str += self.indent + "def __init__(\n" self._change_indent(1) cls_str += self.indent + "self,\n" diff --git a/aea/protocols/generator/common.py b/aea/protocols/generator/common.py index 7a9f0a5af5..24c8304c48 100644 --- a/aea/protocols/generator/common.py +++ b/aea/protocols/generator/common.py @@ -26,7 +26,11 @@ from typing import Tuple from aea.configurations.base import ProtocolSpecification -from aea.configurations.constants import DEFAULT_PROTOCOL_CONFIG_FILE, PACKAGES +from aea.configurations.constants import ( + DEFAULT_PROTOCOL_CONFIG_FILE, + LIBPROTOC_VERSION, + PACKAGES, +) from aea.configurations.loader import ConfigLoader @@ -322,6 +326,15 @@ def check_prerequisites() -> None: "Cannot find protocol buffer compiler! To install, please follow this link: https://developers.google.com/protocol-buffers/" ) + result = subprocess.run( # nosec + ["protoc", "--version"], stdout=subprocess.PIPE, check=True + ) + result_str = result.stdout.decode("utf-8") + if LIBPROTOC_VERSION not in result_str: + raise FileNotFoundError( # pragma: nocover + f"Invalid version for protoc. Found: {result_str}. Required: {LIBPROTOC_VERSION}." + ) + def load_protocol_specification(specification_path: str) -> ProtocolSpecification: """ diff --git a/aea/protocols/generator/extract_specification.py b/aea/protocols/generator/extract_specification.py index 550c3d5dc1..fcdb59b835 100644 --- a/aea/protocols/generator/extract_specification.py +++ b/aea/protocols/generator/extract_specification.py @@ -164,6 +164,7 @@ def __init__(self) -> None: self.terminal_performatives = list() # type: List[str] self.roles = list() # type: List[str] self.end_states = list() # type: List[str] + self.keep_terminal_state_dialogues = False # type: bool self.typing_imports = { "Set": True, @@ -258,4 +259,10 @@ def extract( spec.end_states = cast( List[str], protocol_specification.dialogue_config["end_states"] ) + spec.keep_terminal_state_dialogues = cast( + bool, + protocol_specification.dialogue_config.get( + "keep_terminal_state_dialogues", False + ), + ) return spec diff --git a/aea/protocols/generator/validate.py b/aea/protocols/generator/validate.py index 856ca02b9c..1f0b2d6f26 100644 --- a/aea/protocols/generator/validate.py +++ b/aea/protocols/generator/validate.py @@ -19,7 +19,7 @@ """This module validates a protocol specification.""" import re -from typing import Dict, List, Optional, Set, Tuple, cast +from typing import Any, Dict, List, Optional, Set, Tuple, cast from aea.configurations.base import ProtocolSpecification from aea.protocols.generator.common import ( @@ -43,6 +43,15 @@ ROLE_REGEX_PATTERN = "^[a-zA-Z0-9]+$|^[a-zA-Z0-9]+(_?[a-zA-Z0-9]+)+$" END_STATE_REGEX_PATTERN = "^[a-zA-Z0-9]+$|^[a-zA-Z0-9]+(_?[a-zA-Z0-9]+)+$" +DIALOGUE_SECTION_REQUIRED_FIELDS = [ + "initiation", + "reply", + "termination", + "roles", + "end_states", + "keep_terminal_state_dialogues", +] + def _is_reserved_name(content_name: str) -> bool: """ @@ -283,6 +292,7 @@ def _validate_performatives(performative: str) -> Tuple[bool, str]: :param performative: a performative. :return: Boolean result, and associated message. """ + # check performative is not a reserved name if _is_reserved_name(performative): return ( False, @@ -291,6 +301,7 @@ def _validate_performatives(performative: str) -> Tuple[bool, str]: ), ) + # check performative's format if not _is_valid_regex(PERFORMATIVE_REGEX_PATTERN, performative): return ( False, @@ -311,6 +322,7 @@ def _validate_content_name(content_name: str, performative: str) -> Tuple[bool, :return: Boolean result, and associated message. """ + # check content name's format if not _is_valid_regex(CONTENT_NAME_REGEX_PATTERN, content_name): return ( False, @@ -319,6 +331,7 @@ def _validate_content_name(content_name: str, performative: str) -> Tuple[bool, ), ) + # check content name is not a reserved name if _is_reserved_name(content_name): return ( False, @@ -375,6 +388,9 @@ def _validate_speech_acts_section( custom_types_set = set() performatives_set = set() + content_names_types: Dict[str, Tuple[str, str]] = dict() + + # check that speech-acts definition is not empty if len(protocol_specification.speech_acts.read_all()) == 0: return ( False, @@ -418,6 +434,17 @@ def _validate_speech_acts_section( None, ) + # check type of content_type + if not isinstance(content_type, str): + return ( + False, + "Invalid type for '{}'. Expected str. Found {}.".format( + content_name, type(content_type) + ), + None, + None, + ) + # Validate content type ( result_content_type_validation, @@ -431,6 +458,26 @@ def _validate_speech_acts_section( None, ) + # check content name isn't repeated with a different type + if content_name in content_names_types: + last_performative = content_names_types[content_name][0] + last_content_type = content_names_types[content_name][1] + if last_content_type != content_type: + return ( + False, + "Content '{}' with type '{}' under performative '{}' is already defined under performative '{}' with a different type ('{}').".format( + content_name, + content_type, + performative, + last_performative, + last_content_type, + ), + None, + None, + ) + + content_names_types[content_name] = (performative, content_type) + if _is_valid_ct(content_type): custom_types_set.add(content_type.strip()) @@ -444,7 +491,7 @@ def _validate_protocol_buffer_schema_code_snippets( Evaluate whether the protobuf code snippet section of a protocol specification is valid. :param protocol_specification: a protocol specification. - :param custom_types_set: set of all custom types in the dialogue. + :param custom_types_set: set of all custom types in the protocol. :return: Boolean result, and associated message. """ @@ -452,6 +499,7 @@ def _validate_protocol_buffer_schema_code_snippets( protocol_specification.protobuf_snippets is not None and protocol_specification.protobuf_snippets != "" ): + # check all custom types are actually used in speech-acts definition for custom_type in protocol_specification.protobuf_snippets.keys(): if custom_type not in custom_types_set: return ( @@ -462,6 +510,7 @@ def _validate_protocol_buffer_schema_code_snippets( ) custom_types_set.remove(custom_type) + # check that no custom type already used in speech-acts definition is missing if len(custom_types_set) != 0: return ( False, @@ -473,6 +522,27 @@ def _validate_protocol_buffer_schema_code_snippets( return True, "Protobuf code snippet section is valid." +def _validate_field_existence(dialogue_config) -> Tuple[bool, str]: + """ + Evaluate whether the dialogue section of a protocol specification contains the required fields. + + :param dialogue_config: the dialogue section of a protocol specification. + + :return: Boolean result, and associated message. + """ + # check required fields exist + for required_field in DIALOGUE_SECTION_REQUIRED_FIELDS: + if required_field not in dialogue_config: + return ( + False, + "Missing required field '{}' in the dialogue section of the protocol specification.".format( + required_field + ), + ) + + return True, "Dialogue section has all the required fields." + + def _validate_initiation( initiation: List[str], performatives_set: Set[str] ) -> Tuple[bool, str]: @@ -484,12 +554,23 @@ def _validate_initiation( :return: Boolean result, and associated message. """ + # check type + if not isinstance(initiation, list): + return ( + False, + "Invalid type for initiation. Expected list. Found '{}'.".format( + type(initiation) + ), + ) + + # check initiation is not empty/None if len(initiation) == 0 or initiation is None: return ( False, "At least one initial performative for this dialogue must be specified.", ) + # check performatives are previously defined for performative in initiation: if performative not in performatives_set: return ( @@ -503,41 +584,83 @@ def _validate_initiation( def _validate_reply( - reply: Dict[str, List[str]], performatives_set: Set[str] -) -> Tuple[bool, str]: + reply_definition: Dict[str, List[str]], performatives_set: Set[str] +) -> Tuple[bool, str, Optional[Set[str]]]: """ - Evaluate whether the reply structure in a protocol specification is valid. + Evaluate whether the reply definition in a protocol specification is valid. - :param reply: Reply structure of a dialogue. + :param reply_definition: Reply structure of a dialogue. :param performatives_set: set of all performatives in the dialogue. :return: Boolean result, and associated message. """ + # check type + if not isinstance(reply_definition, dict): + return ( + False, + "Invalid type for the reply definition. Expected dict. Found '{}'.".format( + type(reply_definition) + ), + None, + ) + performatives_set_2 = performatives_set.copy() + terminal_performatives_from_reply = set() - for performative in reply.keys(): + for performative, replies in reply_definition.items(): + # check only previously defined performatives are included in the reply definition if performative not in performatives_set_2: return ( False, "Performative '{}' specified in \"reply\" is not defined in the protocol's speech-acts.".format( performative, ), + None, + ) + + # check the type of replies + if not isinstance(replies, list): + return ( + False, + "Invalid type for replies of performative {}. Expected list. Found '{}'.".format( + performative, type(replies) + ), + None, ) + + # check all replies are performatives which are previously defined in the speech-acts definition + for reply in replies: + if reply not in performatives_set: + return ( + False, + "Performative '{}' in the list of replies for '{}' is not defined in speech-acts.".format( + reply, performative + ), + None, + ) + performatives_set_2.remove(performative) + if len(replies) == 0: + terminal_performatives_from_reply.add(performative) + + # check all previously defined performatives are included in the reply definition if len(performatives_set_2) != 0: return ( False, "No reply is provided for the following performatives: {}".format( performatives_set_2, ), + None, ) - return True, "Reply structure is valid." + return True, "Reply structure is valid.", terminal_performatives_from_reply def _validate_termination( - termination: List[str], performatives_set: Set[str] + termination: List[str], + performatives_set: Set[str], + terminal_performatives_from_reply: Set[str], ) -> Tuple[bool, str]: """ Evaluate whether termination field in a protocol specification is valid. @@ -547,12 +670,23 @@ def _validate_termination( :return: Boolean result, and associated message. """ + # check type + if not isinstance(termination, list): + return ( + False, + "Invalid type for termination. Expected list. Found '{}'.".format( + type(termination) + ), + ) + + # check termination is not empty/None if len(termination) == 0 or termination is None: return ( False, "At least one terminal performative for this dialogue must be specified.", ) + # check terminal performatives are previously defined for performative in termination: if performative not in performatives_set: return ( @@ -562,16 +696,54 @@ def _validate_termination( ), ) + # check that there are no repetitive performatives in termination + number_of_duplicates = len(termination) - len(set(termination)) + if number_of_duplicates > 0: + return ( + False, + 'There are {} duplicate performatives in "termination".'.format( + number_of_duplicates, + ), + ) + + # check terminal performatives have no replies + for performative in termination: + if performative not in terminal_performatives_from_reply: + return ( + False, + 'The terminal performative \'{}\' specified in "termination" is assigned replies in "reply".'.format( + performative, + ), + ) + + # check performatives with no replies are specified as terminal performatives + for performative in terminal_performatives_from_reply: + if performative not in termination: + return ( + False, + "The performative '{}' has no replies but is not listed as a terminal performative in \"termination\".".format( + performative, + ), + ) + return True, "Terminal messages are valid." -def _validate_roles(roles: Set[str]) -> Tuple[bool, str]: +def _validate_roles(roles: Dict[str, Any]) -> Tuple[bool, str]: """ Evaluate whether roles field in a protocol specification is valid. :param roles: Set of roles of a dialogue. :return: Boolean result, and associated message. """ + # check type + if not isinstance(roles, dict): + return ( + False, + "Invalid type for roles. Expected dict. Found '{}'.".format(type(roles)), + ) + + # check number of roles if not 1 <= len(roles) <= 2: return ( False, @@ -580,6 +752,7 @@ def _validate_roles(roles: Set[str]) -> Tuple[bool, str]: ), ) + # check each role's format for role in roles: if not _is_valid_regex(ROLE_REGEX_PATTERN, role): return ( @@ -599,6 +772,16 @@ def _validate_end_states(end_states: List[str]) -> Tuple[bool, str]: :param end_states: List of end states of a dialogue. :return: Boolean result, and associated message. """ + # check type + if not isinstance(end_states, list): + return ( + False, + "Invalid type for roles. Expected list. Found '{}'.".format( + type(end_states) + ), + ) + + # check each end_state's format for end_state in end_states: if not _is_valid_regex(END_STATE_REGEX_PATTERN, end_state): return ( @@ -611,6 +794,28 @@ def _validate_end_states(end_states: List[str]) -> Tuple[bool, str]: return True, "Dialogue end_states are valid." +def _validate_keep_terminal(keep_terminal_state_dialogues: bool) -> Tuple[bool, str]: + """ + Evaluate whether keep_terminal_state_dialogues field in a protocol specification is valid. + + :param keep_terminal_state_dialogues: the value of keep_terminal_state_dialogues. + :return: Boolean result, and associated message. + """ + # check the type of keep_terminal_state_dialogues's value + if ( + type(keep_terminal_state_dialogues) # pylint: disable=unidiomatic-typecheck + != bool + ): + return ( + False, + "Invalid type for keep_terminal_state_dialogues. Expected bool. Found {}.".format( + type(keep_terminal_state_dialogues) + ), + ) + + return True, "Dialogue keep_terminal_state_dialogues is valid." + + def _validate_dialogue_section( protocol_specification: ProtocolSpecification, performatives_set: Set[str] ) -> Tuple[bool, str]: @@ -626,6 +831,16 @@ def _validate_dialogue_section( protocol_specification.dialogue_config != {} and protocol_specification.dialogue_config is not None ): + # validate required fields exist + ( + result_field_existence_validation, + msg_field_existence_validation, + ) = _validate_field_existence( + cast(List[str], protocol_specification.dialogue_config), + ) + if not result_field_existence_validation: + return result_field_existence_validation, msg_field_existence_validation + # Validate initiation result_initiation_validation, msg_initiation_validation = _validate_initiation( cast(List[str], protocol_specification.dialogue_config["initiation"]), @@ -635,7 +850,11 @@ def _validate_dialogue_section( return result_initiation_validation, msg_initiation_validation # Validate reply - result_reply_validation, msg_reply_validation = _validate_reply( + ( + result_reply_validation, + msg_reply_validation, + terminal_performatives_from_reply, + ) = _validate_reply( cast(Dict[str, List[str]], protocol_specification.dialogue_config["reply"]), performatives_set, ) @@ -643,19 +862,23 @@ def _validate_dialogue_section( return result_reply_validation, msg_reply_validation # Validate termination + terminal_performatives_from_reply = cast( + Set[str], terminal_performatives_from_reply + ) ( result_termination_validation, msg_termination_validation, ) = _validate_termination( cast(List[str], protocol_specification.dialogue_config["termination"]), performatives_set, + terminal_performatives_from_reply, ) if not result_termination_validation: return result_termination_validation, msg_termination_validation # Validate roles result_roles_validation, msg_roles_validation = _validate_roles( - cast(Set[str], protocol_specification.dialogue_config["roles"]) + cast(Dict[str, Any], protocol_specification.dialogue_config["roles"]) ) if not result_roles_validation: return result_roles_validation, msg_roles_validation @@ -667,6 +890,19 @@ def _validate_dialogue_section( if not result_end_states_validation: return result_end_states_validation, msg_end_states_validation + # Validate keep_terminal_state_dialogues + ( + result_keep_terminal_validation, + msg_keep_terminal_validation, + ) = _validate_keep_terminal( + cast( + bool, + protocol_specification.dialogue_config["keep_terminal_state_dialogues"], + ) + ) + if not result_keep_terminal_validation: + return result_keep_terminal_validation, msg_keep_terminal_validation + return True, "Dialogue section of the protocol specification is valid." diff --git a/aea/protocols/scaffold/protocol.yaml b/aea/protocols/scaffold/protocol.yaml index 479b8e7d01..9d3c3a92da 100644 --- a/aea/protocols/scaffold/protocol.yaml +++ b/aea/protocols/scaffold/protocol.yaml @@ -4,7 +4,7 @@ version: 0.1.0 type: protocol description: The scaffold protocol scaffolds a protocol to be implemented by the developer. license: Apache-2.0 -aea_version: '>=0.7.0, <0.8.0' +aea_version: '>=0.8.0, <0.9.0' fingerprint: __init__.py: Qmc9Ln8THrWmwou4nr3Acag7vcZ1fv8v5oRSkCWtv1aH6t message.py: QmWPrVTSHeKANCaVA4VaQyMGLix7yiMALbytsKZppAG2VU diff --git a/aea/runtime.py b/aea/runtime.py index a53701d254..6f07805152 100644 --- a/aea/runtime.py +++ b/aea/runtime.py @@ -17,7 +17,6 @@ # # ------------------------------------------------------------------------------ """This module contains the implementation of runtime for economic agent (AEA).""" - import asyncio from asyncio.events import AbstractEventLoop from concurrent.futures._base import CancelledError @@ -33,6 +32,7 @@ from aea.helpers.async_utils import Runnable from aea.helpers.exception_policy import ExceptionPolicyEnum from aea.helpers.logging import WithLogger, get_logger +from aea.helpers.storage.generic_storage import Storage from aea.multiplexer import AsyncMultiplexer from aea.skills.tasks import TaskManager @@ -81,10 +81,24 @@ def __init__( self._multiplexer: AsyncMultiplexer = self._get_multiplexer_instance() self._task_manager = TaskManager() self._decision_maker: Optional[DecisionMaker] = None + self._storage: Optional[Storage] = self._get_storage(agent) self._loop_mode = loop_mode or self.DEFAULT_RUN_LOOP self.main_loop: BaseAgentLoop = self._get_main_loop_instance(self._loop_mode) + @staticmethod + def _get_storage(agent) -> Optional[Storage]: + """Get storage instance if storage_uri provided.""" + if agent.storage_uri: + # threaded has to be always True, cause syncrhonous operations are supported + return Storage(agent.storage_uri, threaded=True) + return None # pragma: nocover + + @property + def storage(self) -> Optional[Storage]: + """Get optional storage.""" + return self._storage + @property def loop_mode(self) -> str: # pragma: nocover """Get current loop mode.""" @@ -270,6 +284,10 @@ async def stop_runtime(self) -> None: await self.main_loop.wait_completed() self._teardown() + if self._storage is not None: + self._storage.stop() + await self._storage.wait_completed() + self.multiplexer.stop() await self.multiplexer.wait_completed() self.logger.debug("Runtime loop stopped!") @@ -277,7 +295,15 @@ async def stop_runtime(self) -> None: async def run_runtime(self) -> None: """Run agent and starts multiplexer.""" self._state.set(RuntimeStates.starting) - await asyncio.gather(self._start_multiplexer(), self._start_agent_loop()) + await asyncio.gather( + self._start_multiplexer(), self._start_agent_loop(), self._start_storage() + ) + + async def _start_storage(self) -> None: + """Start storage component.""" + if self._storage is not None: + self._storage.start() + await self._storage.wait_completed() async def _start_multiplexer(self) -> None: """Call multiplexer connect asynchronous way.""" @@ -292,17 +318,20 @@ async def _start_multiplexer(self) -> None: async def _start_agent_loop(self) -> None: """Start agent main loop asynchronous way.""" - self.logger.debug("[{}]: Runtime started".format(self._agent.name)) + self.logger.debug("[{}] Runtime started".format(self._agent.name)) await self.multiplexer.connection_status.wait(ConnectionStates.connected) - self.logger.debug("[{}]: Multiplexer connected.".format(self._agent.name)) + self.logger.debug("[{}] Multiplexer connected.".format(self._agent.name)) + if self.storage: + await self.storage.wait_connected() + self.logger.debug("[{}] Storage connected.".format(self._agent.name)) self.task_manager.start() if self._decision_maker is not None: # pragma: nocover self.decision_maker.start() - self.logger.debug("[{}]: Calling setup method...".format(self._agent.name)) + self.logger.debug("[{}] Calling setup method...".format(self._agent.name)) self._agent.setup() - self.logger.debug("[{}]: Run main loop...".format(self._agent.name)) + self.logger.debug("[{}] Run main loop...".format(self._agent.name)) self.main_loop.start() self._state.set(RuntimeStates.running) try: diff --git a/aea/skills/base.py b/aea/skills/base.py index f813543177..b00df5953b 100644 --- a/aea/skills/base.py +++ b/aea/skills/base.py @@ -16,9 +16,7 @@ # limitations under the License. # # ------------------------------------------------------------------------------ - """This module contains the base classes for the skills.""" - import datetime import inspect import logging @@ -52,6 +50,7 @@ ) from aea.helpers.base import _get_aea_logger_name_prefix, load_module from aea.helpers.logging import AgentLoggerAdapter +from aea.helpers.storage.generic_storage import Storage from aea.multiplexer import MultiplexerStatus, OutBox from aea.protocols.base import Message from aea.skills.tasks import TaskManager @@ -181,6 +180,11 @@ def outbox(self) -> OutBox: """Get outbox.""" return self._get_agent_context().outbox + @property + def storage(self) -> Optional[Storage]: + """Get optional storage for agent.""" + return self._get_agent_context().storage + @property def message_in_queue(self) -> Queue: """Get message in queue.""" @@ -315,6 +319,9 @@ def setup(self) -> None: :return: None """ + super_obj = super() + if hasattr(super_obj, "setup"): + super_obj.setup() # type: ignore # pylint: disable=no-member @abstractmethod def teardown(self) -> None: @@ -323,6 +330,9 @@ def teardown(self) -> None: :return: None """ + super_obj = super() + if hasattr(super_obj, "teardown"): + super_obj.teardown() # type: ignore # pylint: disable=no-member @classmethod @abstractmethod @@ -567,11 +577,41 @@ def parse_module( # pylint: disable=arguments-differ class Model(SkillComponent, ABC): """This class implements an abstract model.""" + def __init__( + self, + name: str, + skill_context: SkillContext, + configuration: Optional[SkillComponentConfiguration] = None, + keep_terminal_state_dialogues: Optional[bool] = None, + **kwargs, + ) -> None: + """ + Initialize a model. + + :param name: the name of the component. + :param configuration: the configuration for the component. + :param skill_context: the skill context. + :param keep_terminal_state_dialogues: specify do dialogues in terminal state should stay or not + + :return: None + """ + super().__init__(name, skill_context, configuration=configuration, **kwargs) + + # used by dialogues if mixed with the Model + if keep_terminal_state_dialogues is not None: + self._keep_terminal_state_dialogues = keep_terminal_state_dialogues + def setup(self) -> None: """Set the class up.""" + super_obj = super() + if hasattr(super_obj, "setup"): + super_obj.setup() # type: ignore # pylint: disable=no-member def teardown(self) -> None: """Tear the class down.""" + super_obj = super() + if hasattr(super_obj, "teardown"): + super_obj.teardown() # type: ignore # pylint: disable=no-member @classmethod def parse_module( # pylint: disable=arguments-differ diff --git a/aea/skills/scaffold/skill.yaml b/aea/skills/scaffold/skill.yaml index 43db789633..660f404339 100644 --- a/aea/skills/scaffold/skill.yaml +++ b/aea/skills/scaffold/skill.yaml @@ -4,7 +4,7 @@ version: 0.1.0 type: skill description: The scaffold skill is a scaffold for your own skill implementation. license: Apache-2.0 -aea_version: '>=0.7.0, <0.8.0' +aea_version: '>=0.8.0, <0.9.0' fingerprint: __init__.py: QmYRssFqDqb3uWDvfoXy93avisjKRx2yf9SbAQXnkRj1QB behaviours.py: QmNgDDAmBzWBeBF7e5gUCny38kdqVVfpvHGaAZVZcMtm9Q diff --git a/aea/test_tools/test_cases.py b/aea/test_tools/test_cases.py index 7a36ede2f5..03073ae5b8 100644 --- a/aea/test_tools/test_cases.py +++ b/aea/test_tools/test_cases.py @@ -65,8 +65,6 @@ write_envelope_to_file, ) -from tests.conftest import ROOT_DIR - _default_logger = logging.getLogger(__name__) @@ -87,7 +85,7 @@ class BaseAEATestCase(ABC): # pylint: disable=too-many-public-methods threads: List[Thread] = [] # list of started threads packages_dir_path: Path = Path(DEFAULT_REGISTRY_NAME) use_packages_dir: bool = True - package_registry_src: Path = Path(ROOT_DIR, DEFAULT_REGISTRY_NAME) + package_registry_src_rel: Path = Path(os.getcwd(), packages_dir_path) old_cwd: Path # current working directory path t: Path # temporary directory path current_agent_context: str = "" # the name of the current agent @@ -322,7 +320,7 @@ def is_allowed_diff_in_agent_config( fetched_agent_name = agent_name path_to_fetched_aea = os.path.join(new_cwd, fetched_agent_name) registry_tmp_dir = os.path.join(new_cwd, cls.packages_dir_path) - shutil.copytree(str(cls.package_registry_src), str(registry_tmp_dir)) + shutil.copytree(str(cls.package_registry_src_rel), str(registry_tmp_dir)) with cd(new_cwd): cls.run_cli_command( "fetch", "--local", public_id, "--alias", fetched_agent_name @@ -576,6 +574,23 @@ def add_private_key( "add-key", ledger_api_id, private_key_filepath, cwd=cls._get_cwd() ) + @classmethod + def remove_private_key( + cls, ledger_api_id: str = DEFAULT_LEDGER, connection: bool = False, + ) -> Result: + """ + Remove private key with CLI command. + + Run from agent's directory. + + :param ledger_api_id: ledger API ID. + :param connection: whether or not the private key filepath is for a connection. + + :return: Result + """ + args = ["remove-key", ledger_api_id] + (["--connection"] if connection else []) + return cls.run_cli_command(*args, cwd=cls._get_cwd()) + @classmethod def replace_private_key_in_file( cls, private_key: str, private_key_filepath: str = DEFAULT_PRIVATE_KEY_FILE @@ -827,9 +842,9 @@ def setup_class(cls): cls.t = Path(tempfile.mkdtemp()) cls.change_directory(cls.t) + cls.package_registry_src = cls.old_cwd / cls.package_registry_src_rel if cls.use_packages_dir: registry_tmp_dir = cls.t / cls.packages_dir_path - cls.package_registry_src = cls.old_cwd / cls.packages_dir_path shutil.copytree(str(cls.package_registry_src), str(registry_tmp_dir)) cls.initialize_aea(cls.author) @@ -849,7 +864,6 @@ def teardown_class(cls): cls.use_packages_dir = True cls.agents = set() cls.current_agent_context = "" - cls.package_registry_src = Path(ROOT_DIR, DEFAULT_REGISTRY_NAME) cls.stdout = {} cls.stderr = {} diff --git a/aea/test_tools/test_skill.py b/aea/test_tools/test_skill.py index f55100ba1f..7c47403abb 100644 --- a/aea/test_tools/test_skill.py +++ b/aea/test_tools/test_skill.py @@ -17,7 +17,6 @@ # # ------------------------------------------------------------------------------ """This module contains test case classes based on pytest for AEA skill testing.""" - import asyncio from pathlib import Path from queue import Queue @@ -435,7 +434,6 @@ def setup(cls, **kwargs) -> None: _skill_config_overrides = cast( Dict[str, Any], kwargs.pop("config_overrides", dict()) ) - agent_context = AgentContext( identity=identity, connection_status=cls._multiplexer.connection_status, diff --git a/benchmark/checks/data/05.09.2020_17-49.txt b/benchmark/checks/data/2020.09.05_17-49.txt similarity index 99% rename from benchmark/checks/data/05.09.2020_17-49.txt rename to benchmark/checks/data/2020.09.05_17-49.txt index fbc29613d7..1f818b1e54 100644 --- a/benchmark/checks/data/05.09.2020_17-49.txt +++ b/benchmark/checks/data/2020.09.05_17-49.txt @@ -185,4 +185,4 @@ async 16 rate 6193.666667 578.218048 async 16 mem 76.97526 1.526111 async 16 RTT 12.952551 2.512823 async 16 latency 9.581866 1.340397 -Done! \ No newline at end of file +Done! diff --git a/benchmark/checks/data/27.10.2020_mem_usage_report.txt b/benchmark/checks/data/2020.10.27_mem_usage_report.txt similarity index 100% rename from benchmark/checks/data/27.10.2020_mem_usage_report.txt rename to benchmark/checks/data/2020.10.27_mem_usage_report.txt diff --git a/benchmark/checks/data/30.10.2020 optimized messages.txt b/benchmark/checks/data/2020.10.30 optimized messages.txt similarity index 100% rename from benchmark/checks/data/30.10.2020 optimized messages.txt rename to benchmark/checks/data/2020.10.30 optimized messages.txt diff --git a/benchmark/checks/data/2020.12.10_optimized_messages.txt b/benchmark/checks/data/2020.12.10_optimized_messages.txt new file mode 100644 index 0000000000..a461ddbf89 --- /dev/null +++ b/benchmark/checks/data/2020.12.10_optimized_messages.txt @@ -0,0 +1,36 @@ +Performance report for 10.12.2020_09:32 +----------------------------- + + + +Multi agents with http dialogues: number of runs: 100, num_agents: 10, messages: 100 +------------------------------------------------------------------ +runtime mode duration value mean stdev +------------------------------------------------------------------ +async 2 mem 71.147305 0.636357 +async 5 mem 71.937031 0.630627 +async 10 mem 73.255586 0.891906 +async 20 mem 75.449219 0.901333 +async 30 mem 77.292383 0.808147 + +async 50 mem 81.357891 1.194153 + + +Message generation and allocation: number of runs: 100 +------------------------------------------------------------------ +message value mean stdev +------------------------------------------------------------------ +10000 mem 1.514961 0.02531 +10000 time 0.567152 0.070787 +50000 mem 7.804883 0.000856 +50000 time 2.942939 0.233939 +100000 mem 15.384297 0.003956 +100000 time 5.779464 0.321653 + + +Dialogues message processing: number of runs: 100 +------------------------------------------------------------------ +message value mean stdev +------------------------------------------------------------------ +10000 mem 7.255781 0.062602 +10000 time 4.002207 0.275296 diff --git a/benchmark/run_from_branch.sh b/benchmark/run_from_branch.sh index ff82e1cf0e..9627dc2170 100755 --- a/benchmark/run_from_branch.sh +++ b/benchmark/run_from_branch.sh @@ -12,7 +12,7 @@ pip install pipenv # this is to install benchmark dependencies pipenv install --dev --skip-lock # this is to install the AEA in the Pipenv virtual env -pipenv run pip install --upgrade aea[all]=="0.7.5" +pipenv run pip install --upgrade aea[all]=="0.8.0" chmod +x benchmark/checks/run_benchmark.sh echo "Start the experiments." diff --git a/deploy-image/Dockerfile b/deploy-image/Dockerfile index d3115bedbd..db59d2e92f 100644 --- a/deploy-image/Dockerfile +++ b/deploy-image/Dockerfile @@ -13,7 +13,7 @@ ENV PYTHONPATH "$PYTHONPATH:/usr/lib/python3.7/site-packages" RUN apk add --no-cache go RUN pip install --upgrade pip -RUN pip install --upgrade --force-reinstall aea[all]==0.7.5 +RUN pip install --upgrade --force-reinstall aea[all]==0.8.0 # COPY ./packages /home/packages # enable to add packages dir WORKDIR home diff --git a/deploy-image/docker-env.sh b/deploy-image/docker-env.sh index deeb0e6cf4..884dfb7cf9 100755 --- a/deploy-image/docker-env.sh +++ b/deploy-image/docker-env.sh @@ -1,7 +1,7 @@ #!/bin/bash # Swap the following lines if you want to work with 'latest' -DOCKER_IMAGE_TAG=fetchai/aea-deploy:0.7.5 +DOCKER_IMAGE_TAG=fetchai/aea-deploy:0.8.0 # DOCKER_IMAGE_TAG=fetchai/aea-deploy:latest DOCKER_BUILD_CONTEXT_DIR=.. diff --git a/develop-image/docker-env.sh b/develop-image/docker-env.sh index e3fd053584..2f7298db0e 100755 --- a/develop-image/docker-env.sh +++ b/develop-image/docker-env.sh @@ -1,7 +1,7 @@ #!/bin/bash # Swap the following lines if you want to work with 'latest' -DOCKER_IMAGE_TAG=fetchai/aea-develop:0.7.5 +DOCKER_IMAGE_TAG=fetchai/aea-develop:0.8.0 # DOCKER_IMAGE_TAG=aea-develop:latest DOCKER_BUILD_CONTEXT_DIR=.. diff --git a/docs/acn.md b/docs/acn.md index d903ccaa6f..68a5569dc3 100644 --- a/docs/acn.md +++ b/docs/acn.md @@ -30,7 +30,7 @@ At its core, the ACN implements a distributed hash table (DHT). A DHT is similar - Consistent hashing: decide responsibility for assignment of the DHT key-value storage - Structured overlays: organize the participating peers in a well defined topology for efficient routing -DHT +DHT For the ACN, we use the DHT to store and maintain association between an agent address and the (network) location of its peer. @@ -39,7 +39,7 @@ For the ACN, we use the DHT to store and maintain association between an agent a To satisfy different resource constraints and flexible deployment the ACN is implemented as a multi-tier architecture. As such, it provides an extension of the client-server model. The agent framework exploits this by implementing different tiers as different `Connections`: -DHT +DHT

Note

@@ -56,7 +56,7 @@ An agent can choose which connection to use depending on the resource and trust All communication protocols use public cryptography to ensure security (authentication, confidentiality, and availability) using TLS handshakes with pre-shared public keys. -DHT +DHT
diff --git a/docs/aea-vs-mvc.md b/docs/aea-vs-mvc.md index fb2c558e0f..7312b26f9c 100644 --- a/docs/aea-vs-mvc.md +++ b/docs/aea-vs-mvc.md @@ -17,7 +17,7 @@ The AEA framework is based on `Task`: are meant to deal with long running executions and can be thought of as the equivalent of background tasks in traditional web apps. - `Model`: implement business logic and data representation, as such they are similar to the `Model` in MVC. -AEA Skill Components +AEA Skill Components The `View` concept is probably best compared to the `Message` of a given `Protocol` in the AEA framework. Whilst views represent information to the client, messages represent information sent to other agents, other agent components and services. diff --git a/docs/agent-oriented-development.md b/docs/agent-oriented-development.md index fdbe553c11..006da42b37 100644 --- a/docs/agent-oriented-development.md +++ b/docs/agent-oriented-development.md @@ -1,20 +1,20 @@ # Agent-oriented development -In this section, we highlight some of the most fundamental characteristics of the agent-oriented approach to solution development, which might be different from some of the existing paradigms and methodologies you may be used to. We hope that with this, we can guide you towards having the right mindset when you are designing your own agent-based solutions to real world problems. +In this section, we highlight some of the most fundamental characteristics of the agent-oriented approach to solution development, which might be different from some of the existing paradigms and methodologies you may be used to. We hope that with this, we can guide you towards having the right mindset when you are designing your own agent-based solutions to real world problems. ## Decentralisation -Multi-Agent Systems (**MAS**) are inherently decentralised. The vision is, an environment in which every agent is able to directly connect with everyone else and interact with them without having to rely on third-parties to facilitate this. This is in direct contrast to centralised systems in which a single entity is the central point of authority, through which all interactions happen. For example systems based on the client-server architecture, in which clients interact with one another, regarding a specific service (e.g. communication, trade), only through the server. +Multi-Agent Systems (**MAS**) are inherently decentralised. The vision is of an environment in which every agent is able to directly connect with everyone else and interact with them without having to rely on a third party acting as an intermediary or match-maker. This is in direct contrast to centralised systems in which a single entity is the central point of authority, through which all interactions happen. Conventional systems based on a client-server model, are examples of centralized architectures as clients interact with one another to access a specific service (e.g. communication, trade) only through requests to the server. Note, this is not to say that facilitators and middlemen have no place in a multi-agent system; rather it is the 'commanding reliance on middlemen' that MAS disagrees with. -**Division of responsibilities:** In a decentralised system, every agent is equally privileged, and (in principle) should be able to interact with any other agent. The idea is very much aligned with the peer-to-peer paradigm, in which it is the voluntary participation and contribution of peers that creates the infrastructure. As such, in a decentralised system, there is no central 'enforcer'. This means all the work that would typically fall under the responsibilities of a central entity must be performed by individual parties in a decentralised system. Blockchain-based cryptocurrencies are a good example of this. People who are getting into cryptocurrencies are often reminded that, due to the lack of a central trusted entity (e.g. a bank), most security precautions related to the handling of digital assets and the execution of transactions fall on individuals themselves. +**Division of responsibilities:** In a decentralised system, every agent is equally privileged, and (in principle) should be able to interact with any other agent. The idea is very much aligned with the peer-to-peer paradigm, in which it is the voluntary participation and contribution of peers that creates the infrastructure. As such, in a decentralised system, there is no central 'enforcer'. This means all the work that would typically fall under the responsibilities of a central entity must be performed by individual parties in a decentralised system. Blockchain-based cryptocurrencies are a good example of this. A notable feature of cryptocurrencies is that the absence of a central trusted entity (e.g. a bank), means that most security precautions related to the handling of digital assets and the execution of transactions are the responsibility of individuals. -**Decentralisation vs distribution:** It is important to emphasise that by decentralisation we do not mean distribution; although multi-agent systems typically do tend to also be distributed. A distributed system is one whose components are physically located in different places and connected over a network. A fully centralised system, owned and operated by a single entity, may in fact be highly distributed. Google's infrastructure is an example of this, where all of the components are distributed across the globe, yet designed to work together highly efficiently and function in unison. Decentralisation on the other hand refers to a system whose components may be owned, operated, and managed by different stakeholders, each with their own personal objectives, interests, and preferences, which may not necessarily be aligned with one another or that of the system itself. Therefore, distribution refers to the physical placement of a system's components, whereas decentralisation refers to **a)** the diversity of ownership and control over a system's constituents, and **b)** the absence of a central point of authority between them. +**Decentralisation vs distribution:** It is important to emphasise that by decentralisation we do not mean distribution; although multi-agent systems typically do tend to also be distributed. A distributed system is one whose components are physically located in different places and connected over a network. A fully centralised system, owned and operated by a single entity, may in fact be highly distributed. Google or Microsoft's cloud infrastructure are examples of this, with all of the components distributed across the globe, yet designed to work together highly efficiently and function in unison. Decentralisation on the other hand refers to a system whose components may be owned, operated, and managed by different stakeholders, each with their own personal objectives, interests, and preferences, which may not necessarily be aligned with one another or with the system itself. Therefore, distribution refers to the physical placement of a system's components, whereas decentralisation refers to **a)** the diversity of ownership and control over a system's constituents, and **b)** the absence of a central point of authority between them. -**Example:** To better illustrate the distinction between centralised and decentralised systems, consider another example: search and discoverability in a commerce environment. In a centralised system (say Amazon), there is a single search service -- provided, owned and run by the commerce company itself -- which takes care of all search related functionality for every product within their domain. So to be discoverable in this system, all sellers must register their products with this particular service. However in a decentralised system, there may not necessarily be a single search service provider. There may be multiple such services, run by different, perhaps competing entities. Each seller has the freedom to register with (i.e. make themselves known to) one or a handful of services. On the buyers side, the more services they contact and query, the higher their chances of finding the product they are looking for. +**Example:** To better illustrate the distinction between centralised and decentralised systems, consider another example: search and discoverability in a commerce environment. In a centralised system (say Amazon), there is a single search service -- provided, owned and run by the commerce company itself -- which takes care of all search-related functionality for every product within their domain. So to be discoverable in this system, all sellers must register their products with this particular service. However in a decentralised system, there may not necessarily be a single search service provider. There may be multiple such services, run by different, perhaps competing entities. Each seller has the freedom to register with (i.e. make themselves known to) one or a handful of services. On the buyers side, the more services they contact and query, the higher their chances of finding the product they are looking for. ## Conflicting Environment @@ -47,7 +47,7 @@ All of this makes asynchronisation the preferred method for designing agent proc ## Complex, Incomplete, Inconsistent and Uncertain -The forth characteristic(s) relate to the environment in which agents are expected to operate in, and these have been mentioned a number of times in the previous sections. +The fourth characteristic(s) relate to the environment in which agents are expected to operate in, and these have been mentioned a number of times in the previous sections. The environment agents are suited for typically tend to be complex, to the point that it is usually impossible for any single agent to perceive the whole of the environment on its own. This means that at any point in time, any agent has a limited knowledge about the state of the environment. In other words, the agents;' information tend to be incomplete due to the complexity and sophistication of the world in which they reside. diff --git a/docs/agent-vs-aea.md b/docs/agent-vs-aea.md index 454620c373..5c09b430e2 100644 --- a/docs/agent-vs-aea.md +++ b/docs/agent-vs-aea.md @@ -1,6 +1,6 @@ AEAs are more than just agents. -AEA vs Agent vs Multiplexer +AEA vs Agent vs Multiplexer In this guide we show some of the differences in terms of code. @@ -128,7 +128,7 @@ We run the agent from a different thread so that we can still use the main threa We use the input and output text files to send an envelope to our agent and receive a response ``` python # Create a message inside an envelope and get the stub connection to pass it into the agent - message_text = b"my_agent,other_agent,fetchai/default:0.9.0,\x12\r\x08\x01*\t*\x07\n\x05hello," + message_text = b"my_agent,other_agent,fetchai/default:0.10.0,\x12\r\x08\x01*\t*\x07\n\x05hello," with open(INPUT_FILE, "wb") as f: write_with_lock(f, message_text) @@ -256,7 +256,7 @@ def run(): time.sleep(3) # Create a message inside an envelope and get the stub connection to pass it into the agent - message_text = b"my_agent,other_agent,fetchai/default:0.9.0,\x12\r\x08\x01*\t*\x07\n\x05hello," + message_text = b"my_agent,other_agent,fetchai/default:0.10.0,\x12\r\x08\x01*\t*\x07\n\x05hello," with open(INPUT_FILE, "wb") as f: write_with_lock(f, message_text) diff --git a/docs/api/abstract_agent.md b/docs/api/abstract_agent.md index 435cfa2bbe..7316ca9aa9 100644 --- a/docs/api/abstract_agent.md +++ b/docs/api/abstract_agent.md @@ -148,6 +148,16 @@ dict of kwargs Return list of connections. + +#### storage`_`uri + +```python + | @abstractproperty + | storage_uri() -> Optional[str] +``` + +Return storage uri. + #### exception`_`handler diff --git a/docs/api/aea.md b/docs/api/aea.md index 02b64b523f..0d27e95845 100644 --- a/docs/api/aea.md +++ b/docs/api/aea.md @@ -16,7 +16,7 @@ This class implements an autonomous economic agent. #### `__`init`__` ```python - | __init__(identity: Identity, wallet: Wallet, resources: Resources, loop: Optional[AbstractEventLoop] = None, period: float = 0.05, execution_timeout: float = 0, max_reactions: int = 20, decision_maker_handler_class: Optional[Type[DecisionMakerHandler]] = None, skill_exception_policy: ExceptionPolicyEnum = ExceptionPolicyEnum.propagate, connection_exception_policy: ExceptionPolicyEnum = ExceptionPolicyEnum.propagate, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, default_ledger: Optional[str] = None, currency_denominations: Optional[Dict[str, str]] = None, default_connection: Optional[PublicId] = None, default_routing: Optional[Dict[PublicId, PublicId]] = None, connection_ids: Optional[Collection[PublicId]] = None, search_service_address: str = DEFAULT_SEARCH_SERVICE_ADDRESS, **kwargs, ,) -> None + | __init__(identity: Identity, wallet: Wallet, resources: Resources, loop: Optional[AbstractEventLoop] = None, period: float = 0.05, execution_timeout: float = 0, max_reactions: int = 20, error_handler_class: Optional[Type[AbstractErrorHandler]] = None, decision_maker_handler_class: Optional[Type[DecisionMakerHandler]] = None, skill_exception_policy: ExceptionPolicyEnum = ExceptionPolicyEnum.propagate, connection_exception_policy: ExceptionPolicyEnum = ExceptionPolicyEnum.propagate, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, default_ledger: Optional[str] = None, currency_denominations: Optional[Dict[str, str]] = None, default_connection: Optional[PublicId] = None, default_routing: Optional[Dict[PublicId, PublicId]] = None, connection_ids: Optional[Collection[PublicId]] = None, search_service_address: str = DEFAULT_SEARCH_SERVICE_ADDRESS, storage_uri: Optional[str] = None, **kwargs, ,) -> None ``` Instantiate the agent. @@ -40,12 +40,23 @@ Instantiate the agent. - `default_routing`: dictionary for default routing. - `connection_ids`: active connection ids. Default: consider all the ones in the resources. - `search_service_address`: the address of the search service used. +- `storage_uri`: optional uri to set generic storage - `kwargs`: keyword arguments to be attached in the agent context namespace. **Returns**: None + +#### get`_`build`_`dir + +```python + | @classmethod + | get_build_dir(cls) -> str +``` + +Get agent build directory. + #### context diff --git a/docs/api/aea_builder.md b/docs/api/aea_builder.md index acfed71faf..b9718d54bc 100644 --- a/docs/api/aea_builder.md +++ b/docs/api/aea_builder.md @@ -305,6 +305,24 @@ Set decision maker handler class. self + +#### set`_`error`_`handler + +```python + | set_error_handler(error_handler_dotted_path: str, file_path: Path) -> "AEABuilder" +``` + +Set error handler class. + +**Arguments**: + +- `error_handler_dotted_path`: the dotted path to the error handler +- `file_path`: the file path to the file which contains the error handler + +**Returns**: + +self + #### set`_`skill`_`exception`_`policy @@ -392,6 +410,21 @@ Set the runtime mode. self + +#### set`_`storage`_`uri + +```python + | set_storage_uri(storage_uri: Optional[str]) -> "AEABuilder" +``` + +Set the storage uri. + +:param storage uri: storage uri + +**Returns**: + +self + #### set`_`search`_`service`_`address @@ -518,6 +551,23 @@ Set a default ledger API to use. the AEABuilder + +#### set`_`build`_`entrypoint + +```python + | set_build_entrypoint(build_entrypoint: Optional[str]) -> "AEABuilder" +``` + +Set build entrypoint. + +**Arguments**: + +- `build_entrypoint`: path to the builder script. + +**Returns**: + +the AEABuilder + #### set`_`currency`_`denominations @@ -737,6 +787,25 @@ Remove protocol. the AEABuilder + +#### call`_`all`_`build`_`entrypoints + +```python + | call_all_build_entrypoints() +``` + +Call all the build entrypoints. + + +#### run`_`build`_`for`_`component`_`configuration + +```python + | @classmethod + | run_build_for_component_configuration(cls, config: ComponentConfiguration, logger: Optional[logging.Logger] = None) -> None +``` + +Run a build entrypoint script for component configuration. + #### install`_`pypi`_`dependencies diff --git a/docs/api/agent.md b/docs/api/agent.md index c8af2cdc8b..85d8a5bc49 100644 --- a/docs/api/agent.md +++ b/docs/api/agent.md @@ -16,7 +16,7 @@ This class provides an abstract base class for a generic agent. #### `__`init`__` ```python - | __init__(identity: Identity, connections: List[Connection], loop: Optional[AbstractEventLoop] = None, period: float = 1.0, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, logger: Logger = _default_logger) -> None + | __init__(identity: Identity, connections: List[Connection], loop: Optional[AbstractEventLoop] = None, period: float = 1.0, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, storage_uri: Optional[str] = None, logger: Logger = _default_logger) -> None ``` Instantiate the agent. @@ -29,6 +29,7 @@ Instantiate the agent. - `period`: period to call agent's act - `loop_mode`: loop_mode to choose agent run loop. - `runtime_mode`: runtime mode to up agent. +- `storage_uri`: optional uri to set generic storage **Returns**: @@ -44,6 +45,16 @@ None Return list of connections. + +#### storage`_`uri + +```python + | @property + | storage_uri() -> Optional[str] +``` + +Return storage uri. + #### active`_`connections diff --git a/docs/api/components/base.md b/docs/api/components/base.md index ce4d60e508..30787982bc 100644 --- a/docs/api/components/base.md +++ b/docs/api/components/base.md @@ -106,6 +106,16 @@ Get the directory. Raise error if it has not been set yet. Set the directory. Raise error if already set. + +#### build`_`directory + +```python + | @property + | build_directory() -> Optional[str] +``` + +Get build directory for the component. + #### load`_`aea`_`package diff --git a/docs/api/configurations/base.md b/docs/api/configurations/base.md index eab9da17fb..49e735c0dd 100644 --- a/docs/api/configurations/base.md +++ b/docs/api/configurations/base.md @@ -547,6 +547,15 @@ Delete an item. Read all the items. + +#### keys + +```python + | keys() -> Set[str] +``` + +Get the set of keys. + ## PublicId Objects @@ -1101,7 +1110,7 @@ A package can be one of: #### `__`init`__` ```python - | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None) + | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, build_entrypoint: Optional[str] = None) ``` Initialize a package configuration. @@ -1118,6 +1127,7 @@ describing the AEA versions allowed. The fixed version is interpreted with the specifier '=='. - `fingerprint`: the fingerprint. - `fingerprint_ignore_patterns`: a list of file patterns to ignore files to fingerprint. +- `build_entrypoint`: path to a script to execute at build time. #### name @@ -1239,11 +1249,31 @@ Class to represent an agent component configuration. #### `__`init`__` ```python - | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, dependencies: Optional[Dependencies] = None) + | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, build_entrypoint: Optional[str] = None, build_directory: Optional[str] = None, dependencies: Optional[Dependencies] = None) ``` Set component configuration. + +#### build`_`directory + +```python + | @property + | build_directory() -> Optional[str] +``` + +Get the component type. + + +#### build`_`directory + +```python + | @build_directory.setter + | build_directory(value: Optional[str]) -> None +``` + +Get the component type. + #### component`_`type @@ -1334,7 +1364,7 @@ Handle connection configuration. #### `__`init`__` ```python - | __init__(name: SimpleIdOrStr = "", author: SimpleIdOrStr = "", version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, class_name: str = "", protocols: Optional[Set[PublicId]] = None, connections: Optional[Set[PublicId]] = None, restricted_to_protocols: Optional[Set[PublicId]] = None, excluded_protocols: Optional[Set[PublicId]] = None, dependencies: Optional[Dependencies] = None, description: str = "", connection_id: Optional[PublicId] = None, is_abstract: bool = False, **config, ,) + | __init__(name: SimpleIdOrStr = "", author: SimpleIdOrStr = "", version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, build_entrypoint: Optional[str] = None, build_directory: Optional[str] = None, class_name: str = "", protocols: Optional[Set[PublicId]] = None, connections: Optional[Set[PublicId]] = None, restricted_to_protocols: Optional[Set[PublicId]] = None, excluded_protocols: Optional[Set[PublicId]] = None, dependencies: Optional[Dependencies] = None, description: str = "", connection_id: Optional[PublicId] = None, is_abstract: bool = False, **config, ,) ``` Initialize a connection configuration object. @@ -1411,7 +1441,7 @@ Handle protocol configuration. #### `__`init`__` ```python - | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, aea_version: str = "", dependencies: Optional[Dependencies] = None, description: str = "") + | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, build_entrypoint: Optional[str] = None, build_directory: Optional[str] = None, aea_version: str = "", dependencies: Optional[Dependencies] = None, description: str = "") ``` Initialize a connection configuration object. @@ -1493,7 +1523,7 @@ Class to represent a skill configuration file. #### `__`init`__` ```python - | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, connections: Optional[Set[PublicId]] = None, protocols: Optional[Set[PublicId]] = None, contracts: Optional[Set[PublicId]] = None, skills: Optional[Set[PublicId]] = None, dependencies: Optional[Dependencies] = None, description: str = "", is_abstract: bool = False) + | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, build_entrypoint: Optional[str] = None, build_directory: Optional[str] = None, connections: Optional[Set[PublicId]] = None, protocols: Optional[Set[PublicId]] = None, contracts: Optional[Set[PublicId]] = None, skills: Optional[Set[PublicId]] = None, dependencies: Optional[Dependencies] = None, description: str = "", is_abstract: bool = False) ``` Initialize a skill configuration. @@ -1568,7 +1598,7 @@ Class to represent the agent configuration file. #### `__`init`__` ```python - | __init__(agent_name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, registry_path: str = DEFAULT_REGISTRY_NAME, description: str = "", logging_config: Optional[Dict] = None, period: Optional[float] = None, execution_timeout: Optional[float] = None, max_reactions: Optional[int] = None, decision_maker_handler: Optional[Dict] = None, skill_exception_policy: Optional[str] = None, connection_exception_policy: Optional[str] = None, default_ledger: Optional[str] = None, currency_denominations: Optional[Dict[str, str]] = None, default_connection: Optional[str] = None, default_routing: Optional[Dict[str, str]] = None, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, component_configurations: Optional[Dict[ComponentId, Dict]] = None) + | __init__(agent_name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, build_entrypoint: Optional[str] = None, registry_path: str = DEFAULT_REGISTRY_NAME, description: str = "", logging_config: Optional[Dict] = None, period: Optional[float] = None, execution_timeout: Optional[float] = None, max_reactions: Optional[int] = None, error_handler: Optional[Dict] = None, decision_maker_handler: Optional[Dict] = None, skill_exception_policy: Optional[str] = None, connection_exception_policy: Optional[str] = None, default_ledger: Optional[str] = None, currency_denominations: Optional[Dict[str, str]] = None, default_connection: Optional[str] = None, default_routing: Optional[Dict[str, str]] = None, loop_mode: Optional[str] = None, runtime_mode: Optional[str] = None, storage_uri: Optional[str] = None, component_configurations: Optional[Dict[ComponentId, Dict]] = None) ``` Instantiate the agent configuration object. @@ -1801,7 +1831,7 @@ Handle contract configuration. #### `__`init`__` ```python - | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, dependencies: Optional[Dependencies] = None, description: str = "", contract_interface_paths: Optional[Dict[str, str]] = None, class_name: str = "") + | __init__(name: SimpleIdOrStr, author: SimpleIdOrStr, version: str = "", license_: str = "", aea_version: str = "", fingerprint: Optional[Dict[str, str]] = None, fingerprint_ignore_patterns: Optional[Sequence[str]] = None, build_entrypoint: Optional[str] = None, build_directory: Optional[str] = None, dependencies: Optional[Dependencies] = None, description: str = "", contract_interface_paths: Optional[Dict[str, str]] = None, class_name: str = "") ``` Initialize a protocol configuration object. diff --git a/docs/api/context/base.md b/docs/api/context/base.md index 363b8527b9..7242d986d2 100644 --- a/docs/api/context/base.md +++ b/docs/api/context/base.md @@ -16,7 +16,7 @@ Provide read access to relevant objects of the agent for the skills. #### `__`init`__` ```python - | __init__(identity: Identity, connection_status: MultiplexerStatus, outbox: OutBox, decision_maker_message_queue: Queue, decision_maker_handler_context: SimpleNamespace, task_manager: TaskManager, default_ledger_id: str, currency_denominations: Dict[str, str], default_connection: Optional[PublicId], default_routing: Dict[PublicId, PublicId], search_service_address: Address, decision_maker_address: Address, **kwargs) + | __init__(identity: Identity, connection_status: MultiplexerStatus, outbox: OutBox, decision_maker_message_queue: Queue, decision_maker_handler_context: SimpleNamespace, task_manager: TaskManager, default_ledger_id: str, currency_denominations: Dict[str, str], default_connection: Optional[PublicId], default_routing: Dict[PublicId, PublicId], search_service_address: Address, decision_maker_address: Address, storage_callable: Callable[[], Optional[Storage]] = lambda: None, **kwargs) ``` Initialize an agent context. @@ -35,8 +35,19 @@ Initialize an agent context. - `default_routing`: the default routing - `search_service_address`: the address of the search service - `decision_maker_address`: the address of the decision maker +- `storage_callable`: function that returns optional storage attached to agent. - `kwargs`: keyword arguments to be attached in the agent context namespace. + +#### storage + +```python + | @property + | storage() -> Optional[Storage] +``` + +Return storage instance if enabled in AEA. + #### shared`_`state diff --git a/docs/api/contracts/base.md b/docs/api/contracts/base.md index 26b5bf1b80..21110b2d14 100644 --- a/docs/api/contracts/base.md +++ b/docs/api/contracts/base.md @@ -105,7 +105,7 @@ the contract object. ```python | @classmethod - | get_deploy_transaction(cls, ledger_api: LedgerApi, deployer_address: str, **kwargs) -> Dict[str, Any] + | get_deploy_transaction(cls, ledger_api: LedgerApi, deployer_address: str, **kwargs) -> Optional[JSONLike] ``` Handler method for the 'GET_DEPLOY_TRANSACTION' requests. @@ -128,7 +128,7 @@ the tx ```python | @classmethod - | get_raw_transaction(cls, ledger_api: LedgerApi, contract_address: str, **kwargs) -> Dict[str, Any] + | get_raw_transaction(cls, ledger_api: LedgerApi, contract_address: str, **kwargs) -> Optional[JSONLike] ``` Handler method for the 'GET_RAW_TRANSACTION' requests. @@ -150,7 +150,7 @@ the tx ```python | @classmethod - | get_raw_message(cls, ledger_api: LedgerApi, contract_address: str, **kwargs) -> Dict[str, Any] + | get_raw_message(cls, ledger_api: LedgerApi, contract_address: str, **kwargs) -> Optional[bytes] ``` Handler method for the 'GET_RAW_MESSAGE' requests. @@ -172,7 +172,7 @@ the tx ```python | @classmethod - | get_state(cls, ledger_api: LedgerApi, contract_address: str, **kwargs) -> Dict[str, Any] + | get_state(cls, ledger_api: LedgerApi, contract_address: str, **kwargs) -> Optional[JSONLike] ``` Handler method for the 'GET_STATE' requests. diff --git a/docs/api/crypto/base.md b/docs/api/crypto/base.md index 9c37641eab..3a496e0248 100644 --- a/docs/api/crypto/base.md +++ b/docs/api/crypto/base.md @@ -149,10 +149,10 @@ signature of the message in string form ```python | @abstractmethod - | sign_transaction(transaction: Any) -> Any + | sign_transaction(transaction: JSONLike) -> JSONLike ``` -Sign a transaction in bytes string form. +Sign a transaction in dict form. **Arguments**: @@ -195,7 +195,7 @@ Interface for helper class usable as Mixin for LedgerApi or as standalone class. ```python | @staticmethod | @abstractmethod - | is_transaction_settled(tx_receipt: Any) -> bool + | is_transaction_settled(tx_receipt: JSONLike) -> bool ``` Check whether a transaction is settled or not. @@ -214,7 +214,7 @@ True if the transaction has been settled, False o/w. ```python | @staticmethod | @abstractmethod - | is_transaction_valid(tx: Any, seller: Address, client: Address, tx_nonce: str, amount: int) -> bool + | is_transaction_valid(tx: JSONLike, seller: Address, client: Address, tx_nonce: str, amount: int) -> bool ``` Check whether a transaction is valid or not. @@ -387,12 +387,34 @@ This usually takes the form of a web request to be waited synchronously. the balance. + +#### get`_`state + +```python + | @abstractmethod + | get_state(callable_name: str, *args, **kwargs) -> Optional[JSONLike] +``` + +Call a specified function on the underlying ledger API. + +This usually takes the form of a web request to be waited synchronously. + +**Arguments**: + +- `callable_name`: the name of the API function to be called. +- `args`: the positional arguments for the API function. +- `kwargs`: the keyword arguments for the API function. + +**Returns**: + +the ledger API response. + #### get`_`transfer`_`transaction ```python | @abstractmethod - | get_transfer_transaction(sender_address: Address, destination_address: Address, amount: int, tx_fee: int, tx_nonce: str, **kwargs, ,) -> Optional[Any] + | get_transfer_transaction(sender_address: Address, destination_address: Address, amount: int, tx_fee: int, tx_nonce: str, **kwargs, ,) -> Optional[JSONLike] ``` Submit a transfer transaction to the ledger. @@ -414,7 +436,7 @@ the transfer transaction ```python | @abstractmethod - | send_signed_transaction(tx_signed: Any) -> Optional[str] + | send_signed_transaction(tx_signed: JSONLike) -> Optional[str] ``` Send a signed transaction and wait for confirmation. @@ -430,7 +452,7 @@ Use keyword arguments for the specifying the signed transaction payload. ```python | @abstractmethod - | get_transaction_receipt(tx_digest: str) -> Optional[Any] + | get_transaction_receipt(tx_digest: str) -> Optional[JSONLike] ``` Get the transaction receipt for a transaction digest. @@ -448,7 +470,7 @@ the tx receipt, if present ```python | @abstractmethod - | get_transaction(tx_digest: str) -> Optional[Any] + | get_transaction(tx_digest: str) -> Optional[JSONLike] ``` Get the transaction for a transaction digest. @@ -485,7 +507,7 @@ the contract instance ```python | @abstractmethod - | get_deploy_transaction(contract_interface: Dict[str, str], deployer_address: Address, **kwargs, ,) -> Dict[str, Any] + | get_deploy_transaction(contract_interface: Dict[str, str], deployer_address: Address, **kwargs, ,) -> Optional[JSONLike] ``` Get the transaction to deploy the smart contract. @@ -496,6 +518,24 @@ Get the transaction to deploy the smart contract. - `deployer_address`: The address that will deploy the contract. :returns tx: the transaction dictionary. + +#### update`_`with`_`gas`_`estimate + +```python + | @abstractmethod + | update_with_gas_estimate(transaction: JSONLike) -> JSONLike +``` + +Attempts to update the transaction with a gas estimate + +**Arguments**: + +- `transaction`: the transaction + +**Returns**: + +the updated transaction + ## FaucetApi Objects @@ -510,7 +550,7 @@ Interface for testnet faucet APIs. ```python | @abstractmethod - | get_wealth(address: Address) -> None + | get_wealth(address: Address, url: Optional[str] = None) -> None ``` Get wealth from the faucet for the provided address. @@ -518,6 +558,7 @@ Get wealth from the faucet for the provided address. **Arguments**: - `address`: the address. +- `url`: the url **Returns**: diff --git a/docs/api/crypto/cosmos.md b/docs/api/crypto/cosmos.md index 32271c9553..377006b0a1 100644 --- a/docs/api/crypto/cosmos.md +++ b/docs/api/crypto/cosmos.md @@ -17,7 +17,7 @@ Helper class usable as Mixin for CosmosApi or as standalone class. ```python | @staticmethod - | is_transaction_settled(tx_receipt: Any) -> bool + | is_transaction_settled(tx_receipt: JSONLike) -> bool ``` Check whether a transaction is settled or not. @@ -35,7 +35,7 @@ True if the transaction has been settled, False o/w. ```python | @staticmethod - | is_transaction_valid(tx: Any, seller: Address, client: Address, tx_nonce: str, amount: int) -> bool + | is_transaction_valid(tx: JSONLike, seller: Address, client: Address, tx_nonce: str, amount: int) -> bool ``` Check whether a transaction is valid or not. @@ -264,7 +264,7 @@ signature of the message in string form ```python | @staticmethod - | format_default_transaction(transaction: Any, signature: str, base64_pbk: str) -> Any + | format_default_transaction(transaction: JSONLike, signature: str, base64_pbk: str) -> JSONLike ``` Format default CosmosSDK transaction and add signature. @@ -284,7 +284,7 @@ formatted transaction with signature ```python | @staticmethod - | format_wasm_transaction(transaction: Any, signature: str, base64_pbk: str) -> Any + | format_wasm_transaction(transaction: JSONLike, signature: str, base64_pbk: str) -> JSONLike ``` Format CosmWasm transaction and add signature. @@ -303,7 +303,7 @@ formatted transaction with signature #### sign`_`transaction ```python - | sign_transaction(transaction: Any) -> Any + | sign_transaction(transaction: JSONLike) -> JSONLike ``` Sign a transaction in bytes string form. @@ -380,11 +380,25 @@ Get the underlying API object. Get the balance of a given account. + +#### get`_`state + +```python + | get_state(callable_name: str, *args, **kwargs) -> Optional[JSONLike] +``` + +Call a specified function on the ledger API. + +Based on the cosmos REST +API specification, which takes a path (strings separated by '/'). The +convention here is to define the root of the path (txs, blocks, etc.) +as the callable_name and the rest of the path as args. + #### get`_`deploy`_`transaction ```python - | get_deploy_transaction(contract_interface: Dict[str, str], deployer_address: Address, tx_fee: int = 0, gas: int = 80000, denom: Optional[str] = None, memo: str = "", chain_id: Optional[str] = None, **kwargs, ,) -> Dict[str, Any] + | get_deploy_transaction(contract_interface: Dict[str, str], deployer_address: Address, tx_fee: int = 0, gas: int = 80000, denom: Optional[str] = None, memo: str = "", chain_id: Optional[str] = None, **kwargs, ,) -> Optional[JSONLike] ``` Create a CosmWasm bytecode deployment transaction. @@ -394,7 +408,7 @@ Create a CosmWasm bytecode deployment transaction. - `sender_address`: the sender address of the message initiator. - `filename`: the path to wasm bytecode file. - `gas`: Maximum amount of gas to be used on executing command. -- `memo`: Any string comment. +- `memo`: any string comment. - `chain_id`: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet). **Returns**: @@ -405,7 +419,7 @@ the unsigned CosmWasm contract deploy message #### get`_`init`_`transaction ```python - | get_init_transaction(deployer_address: Address, code_id: int, init_msg: Any, amount: int, tx_fee: int, gas: int = 80000, denom: Optional[str] = None, label: str = "", memo: str = "", chain_id: Optional[str] = None) -> Optional[Any] + | get_init_transaction(deployer_address: Address, code_id: int, init_msg: Any, amount: int, tx_fee: int, gas: int = 80000, denom: Optional[str] = None, label: str = "", memo: str = "", chain_id: Optional[str] = None) -> Optional[JSONLike] ``` Create a CosmWasm InitMsg transaction. @@ -419,7 +433,7 @@ Create a CosmWasm InitMsg transaction. - `gas`: Maximum amount of gas to be used on executing command. - `denom`: the name of the denomination of the contract funds - `label`: the label name of the contract -- `memo`: Any string comment. +- `memo`: any string comment. - `chain_id`: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet). **Returns**: @@ -430,7 +444,7 @@ the unsigned CosmWasm InitMsg #### get`_`handle`_`transaction ```python - | get_handle_transaction(sender_address: Address, contract_address: Address, handle_msg: Any, amount: int, tx_fee: int, denom: Optional[str] = None, gas: int = 80000, memo: str = "", chain_id: Optional[str] = None) -> Optional[Any] + | get_handle_transaction(sender_address: Address, contract_address: Address, handle_msg: Any, amount: int, tx_fee: int, denom: Optional[str] = None, gas: int = 80000, memo: str = "", chain_id: Optional[str] = None) -> Optional[JSONLike] ``` Create a CosmWasm HandleMsg transaction. @@ -441,7 +455,7 @@ Create a CosmWasm HandleMsg transaction. - `contract_address`: the address of the smart contract. - `handle_msg`: HandleMsg in JSON format. - `gas`: Maximum amount of gas to be used on executing command. -- `memo`: Any string comment. +- `memo`: any string comment. - `chain_id`: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet). **Returns**: @@ -457,7 +471,7 @@ the unsigned CosmWasm HandleMsg | "Encountered exception when trying to execute wasm transaction: {}", | logger_method=_default_logger.warning, | ) - | try_execute_wasm_transaction(tx_signed: Any, signed_tx_filename: str = "tx.signed") -> Optional[str] + | try_execute_wasm_transaction(tx_signed: JSONLike, signed_tx_filename: str = "tx.signed") -> Optional[str] ``` Execute a CosmWasm Transaction. QueryMsg doesn't require signing. @@ -479,7 +493,7 @@ the transaction digest | "Encountered exception when trying to execute wasm query: {}", | logger_method=_default_logger.warning, | ) - | try_execute_wasm_query(contract_address: Address, query_msg: Any) -> Optional[str] + | try_execute_wasm_query(contract_address: Address, query_msg: JSONLike) -> Optional[str] ``` Execute a CosmWasm QueryMsg. QueryMsg doesn't require signing. @@ -497,7 +511,7 @@ the message receipt #### get`_`transfer`_`transaction ```python - | get_transfer_transaction(sender_address: Address, destination_address: Address, amount: int, tx_fee: int, tx_nonce: str, denom: Optional[str] = None, gas: int = 80000, memo: str = "", chain_id: Optional[str] = None, **kwargs, ,) -> Optional[Any] + | get_transfer_transaction(sender_address: Address, destination_address: Address, amount: int, tx_fee: int, tx_nonce: str, denom: Optional[str] = None, gas: int = 80000, memo: str = "", chain_id: Optional[str] = None, **kwargs, ,) -> Optional[JSONLike] ``` Submit a transfer transaction to the ledger. @@ -522,7 +536,7 @@ the transfer transaction #### send`_`signed`_`transaction ```python - | send_signed_transaction(tx_signed: Any) -> Optional[str] + | send_signed_transaction(tx_signed: JSONLike) -> Optional[str] ``` Send a signed transaction and wait for confirmation. @@ -540,7 +554,7 @@ tx_digest, if present ```python | @staticmethod - | is_cosmwasm_transaction(tx_signed: Any) -> bool + | is_cosmwasm_transaction(tx_signed: JSONLike) -> bool ``` Check whether it is a cosmwasm tx. @@ -550,7 +564,7 @@ Check whether it is a cosmwasm tx. ```python | @staticmethod - | is_transfer_transaction(tx_signed: Any) -> bool + | is_transfer_transaction(tx_signed: JSONLike) -> bool ``` Check whether it is a transfer tx. @@ -559,7 +573,7 @@ Check whether it is a transfer tx. #### get`_`transaction`_`receipt ```python - | get_transaction_receipt(tx_digest: str) -> Optional[Any] + | get_transaction_receipt(tx_digest: str) -> Optional[JSONLike] ``` Get the transaction receipt for a transaction digest. @@ -576,7 +590,7 @@ the tx receipt, if present #### get`_`transaction ```python - | get_transaction(tx_digest: str) -> Optional[Any] + | get_transaction(tx_digest: str) -> Optional[JSONLike] ``` Get the transaction for a transaction digest. @@ -637,6 +651,23 @@ Get contract address of latest initialised contract by its ID. contract address of last initialised contract + +#### update`_`with`_`gas`_`estimate + +```python + | update_with_gas_estimate(transaction: JSONLike) -> JSONLike +``` + +Attempts to update the transaction with a gas estimate + +**Arguments**: + +- `transaction`: the transaction + +**Returns**: + +the updated transaction + ## CosmosApi Objects @@ -668,7 +699,7 @@ Initialize CosmosFaucetApi. #### get`_`wealth ```python - | get_wealth(address: Address) -> None + | get_wealth(address: Address, url: Optional[str] = None) -> None ``` Get wealth from the faucet for the provided address. @@ -676,6 +707,7 @@ Get wealth from the faucet for the provided address. **Arguments**: - `address`: the address. +- `url`: the url **Returns**: diff --git a/docs/api/crypto/ethereum.md b/docs/api/crypto/ethereum.md index db049a42bc..b68032c79f 100644 --- a/docs/api/crypto/ethereum.md +++ b/docs/api/crypto/ethereum.md @@ -3,6 +3,64 @@ Ethereum module wrapping the public and private key cryptography and ledger api. + +## SignedTransactionTranslator Objects + +```python +class SignedTransactionTranslator() +``` + +Translator for SignedTransaction. + + +#### to`_`dict + +```python + | @staticmethod + | to_dict(signed_transaction: SignedTransaction) -> Dict[str, Union[str, int]] +``` + +Write SignedTransaction to dict. + + +#### from`_`dict + +```python + | @staticmethod + | from_dict(signed_transaction_dict: JSONLike) -> SignedTransaction +``` + +Get SignedTransaction from dict. + + +## AttributeDictTranslator Objects + +```python +class AttributeDictTranslator() +``` + +Translator for AttributeDict. + + +#### to`_`dict + +```python + | @classmethod + | to_dict(cls, attr_dict: AttributeDict) -> JSONLike +``` + +Simplify to dict. + + +#### from`_`dict + +```python + | @classmethod + | from_dict(cls, di: JSONLike) -> AttributeDict +``` + +Get back attribute dict. + ## EthereumCrypto Objects @@ -107,7 +165,7 @@ signature of the message in string form #### sign`_`transaction ```python - | sign_transaction(transaction: Any) -> Any + | sign_transaction(transaction: JSONLike) -> JSONLike ``` Sign a transaction in bytes string form. @@ -161,7 +219,7 @@ Helper class usable as Mixin for EthereumApi or as standalone class. ```python | @staticmethod - | is_transaction_settled(tx_receipt: Any) -> bool + | is_transaction_settled(tx_receipt: JSONLike) -> bool ``` Check whether a transaction is settled or not. @@ -179,7 +237,7 @@ True if the transaction has been settled, False o/w. ```python | @staticmethod - | is_transaction_valid(tx: Any, seller: Address, client: Address, tx_nonce: str, amount: int) -> bool + | is_transaction_valid(tx: dict, seller: Address, client: Address, tx_nonce: str, amount: int) -> bool ``` Check whether a transaction is valid or not. @@ -330,11 +388,20 @@ Get the underlying API object. Get the balance of a given account. + +#### get`_`state + +```python + | get_state(callable_name: str, *args, **kwargs) -> Optional[JSONLike] +``` + +Call a specified function on the ledger API. + #### get`_`transfer`_`transaction ```python - | get_transfer_transaction(sender_address: Address, destination_address: Address, amount: int, tx_fee: int, tx_nonce: str, chain_id: Optional[int] = None, gas_price: Optional[str] = None, **kwargs, ,) -> Optional[Any] + | get_transfer_transaction(sender_address: Address, destination_address: Address, amount: int, tx_fee: int, tx_nonce: str, chain_id: Optional[int] = None, gas_price: Optional[str] = None, **kwargs, ,) -> Optional[JSONLike] ``` Submit a transfer transaction to the ledger. @@ -353,11 +420,28 @@ Submit a transfer transaction to the ledger. the transfer transaction + +#### update`_`with`_`gas`_`estimate + +```python + | update_with_gas_estimate(transaction: JSONLike) -> JSONLike +``` + +Attempts to update the transaction with a gas estimate + +**Arguments**: + +- `transaction`: the transaction + +**Returns**: + +the updated transaction + #### send`_`signed`_`transaction ```python - | send_signed_transaction(tx_signed: Any) -> Optional[str] + | send_signed_transaction(tx_signed: JSONLike) -> Optional[str] ``` Send a signed transaction and wait for confirmation. @@ -374,7 +458,7 @@ tx_digest, if present #### get`_`transaction`_`receipt ```python - | get_transaction_receipt(tx_digest: str) -> Optional[Any] + | get_transaction_receipt(tx_digest: str) -> Optional[JSONLike] ``` Get the transaction receipt for a transaction digest. @@ -391,7 +475,7 @@ the tx receipt, if present #### get`_`transaction ```python - | get_transaction(tx_digest: str) -> Optional[Any] + | get_transaction(tx_digest: str) -> Optional[JSONLike] ``` Get the transaction for a transaction digest. @@ -426,7 +510,7 @@ the contract instance #### get`_`deploy`_`transaction ```python - | get_deploy_transaction(contract_interface: Dict[str, str], deployer_address: Address, value: int = 0, gas: int = 0, **kwargs, ,) -> Dict[str, Any] + | get_deploy_transaction(contract_interface: Dict[str, str], deployer_address: Address, value: int = 0, gas: int = 0, **kwargs, ,) -> Optional[JSONLike] ``` Get the transaction to deploy the smart contract. @@ -439,23 +523,6 @@ Get the transaction to deploy the smart contract. - `gas`: the gas to be used :returns tx: the transaction dictionary. - -#### try`_`estimate`_`gas - -```python - | try_estimate_gas(tx: Dict[str, Any]) -> Dict[str, Any] -``` - -Attempts to update the transaction with a gas estimate. - -**Arguments**: - -- `tx`: the transaction - -**Returns**: - -the transaction (potentially updated) - #### is`_`valid`_`address @@ -483,7 +550,7 @@ Ethereum testnet faucet API. #### get`_`wealth ```python - | get_wealth(address: Address) -> None + | get_wealth(address: Address, url: Optional[str] = None) -> None ``` Get wealth from the faucet for the provided address. @@ -491,6 +558,7 @@ Get wealth from the faucet for the provided address. **Arguments**: - `address`: the address. +- `url`: the url **Returns**: diff --git a/docs/api/crypto/helpers.md b/docs/api/crypto/helpers.md index a586c2b0df..3812aa183a 100644 --- a/docs/api/crypto/helpers.md +++ b/docs/api/crypto/helpers.md @@ -63,7 +63,7 @@ None #### try`_`generate`_`testnet`_`wealth ```python -try_generate_testnet_wealth(identifier: str, address: str, _sync: bool = True) -> None +try_generate_testnet_wealth(identifier: str, address: str, url: Optional[str] = None, _sync: bool = True) -> None ``` Try generate wealth on a testnet. @@ -72,6 +72,7 @@ Try generate wealth on a testnet. - `identifier`: the identifier of the ledger - `address`: the address to check for +- `url`: the url - `_sync`: whether to wait to sync or not; currently unused **Returns**: diff --git a/docs/api/crypto/wallet.md b/docs/api/crypto/wallet.md index 37dc2b21d4..8a5a8a0d8d 100644 --- a/docs/api/crypto/wallet.md +++ b/docs/api/crypto/wallet.md @@ -177,7 +177,7 @@ the signature of the message #### sign`_`transaction ```python - | sign_transaction(crypto_id: str, transaction: Any) -> Optional[Any] + | sign_transaction(crypto_id: str, transaction: Any) -> Optional[JSONLike] ``` Sign a tx. diff --git a/docs/api/error_handler/base.md b/docs/api/error_handler/base.md new file mode 100644 index 0000000000..9a7dbf20c9 --- /dev/null +++ b/docs/api/error_handler/base.md @@ -0,0 +1,72 @@ + +# aea.error`_`handler.base + +This module contains the abstract error handler class. + + +## AbstractErrorHandler Objects + +```python +class AbstractErrorHandler(ABC) +``` + +Error handler class for handling problematic envelopes. + + +#### send`_`unsupported`_`protocol + +```python + | @classmethod + | @abstractmethod + | send_unsupported_protocol(cls, envelope: Envelope, logger: Logger) -> None +``` + +Handle the received envelope in case the protocol is not supported. + +**Arguments**: + +- `envelope`: the envelope +- `logger`: the logger + +**Returns**: + +None + + +#### send`_`decoding`_`error + +```python + | @classmethod + | @abstractmethod + | send_decoding_error(cls, envelope: Envelope, logger: Logger) -> None +``` + +Handle a decoding error. + +**Arguments**: + +- `envelope`: the envelope + +**Returns**: + +None + + +#### send`_`unsupported`_`skill + +```python + | @classmethod + | @abstractmethod + | send_unsupported_skill(cls, envelope: Envelope, logger: Logger) -> None +``` + +Handle the received envelope in case the skill is not supported. + +**Arguments**: + +- `envelope`: the envelope + +**Returns**: + +None + diff --git a/docs/api/helpers/temp_error_handler.md b/docs/api/error_handler/default.md similarity index 65% rename from docs/api/helpers/temp_error_handler.md rename to docs/api/error_handler/default.md index c860164e48..7599ec681b 100644 --- a/docs/api/helpers/temp_error_handler.md +++ b/docs/api/error_handler/default.md @@ -1,18 +1,18 @@ - -# aea.helpers.temp`_`error`_`handler + +# aea.error`_`handler.default -Temporary error handler. +This module contains the default error handler class. - + ## ErrorHandler Objects ```python -class ErrorHandler() +class ErrorHandler(AbstractErrorHandler) ``` Error handler class for handling problematic envelopes. - + #### send`_`unsupported`_`protocol ```python @@ -30,7 +30,7 @@ Handle the received envelope in case the protocol is not supported. None - + #### send`_`decoding`_`error ```python @@ -48,7 +48,7 @@ Handle a decoding error. None - + #### send`_`unsupported`_`skill ```python diff --git a/docs/api/helpers/base.md b/docs/api/helpers/base.md index 027fdd4c67..56c384a549 100644 --- a/docs/api/helpers/base.md +++ b/docs/api/helpers/base.md @@ -312,3 +312,48 @@ Find the reachable subgraph induced by a set of starting nodes. the adjacency list of the subgraph. + +## cached`_`property Objects + +```python +class cached_property() +``` + +Cached property from python3.8 functools. + + +#### `__`init`__` + +```python + | __init__(func) +``` + +Init cached property. + + +#### `__`set`_`name`__` + +```python + | __set_name__(_, name) +``` + +Set name. + + +#### `__`get`__` + +```python + | __get__(instance, _=None) +``` + +Get instance. + + +#### ensure`_`dir + +```python +ensure_dir(dir_path: str) -> None +``` + +Check if dir_path is a directory or create it. + diff --git a/docs/api/helpers/search/models.md b/docs/api/helpers/search/models.md index b291442541..08513d6af1 100644 --- a/docs/api/helpers/search/models.md +++ b/docs/api/helpers/search/models.md @@ -68,6 +68,37 @@ Compare equality of two locations. Get the string representation of the data model. + +#### encode + +```python + | encode() -> models_pb2.Query.Location +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, location_pb) -> "Location" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `location_pb`: the protocol buffer object corresponding with this class. + +**Returns**: + +A new instance of this class matching the protocol buffer object + ## AttributeInconsistencyException Objects @@ -101,7 +132,7 @@ Initialize an attribute. **Arguments**: - `name`: the name of the attribute. -- `type`: the type of the attribute. +- `type_`: the type of the attribute. - `is_required`: whether the attribute is required by the data model. - `description`: an (optional) human-readable description for the attribute. @@ -123,6 +154,37 @@ Compare with another object. Get the string representation of the data model. + +#### encode + +```python + | encode() -> models_pb2.Query.Attribute +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, attribute_pb) -> "Attribute" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `attribute_pb`: the protocol buffer object corresponding with this class. + +**Returns**: + +A new instance of this class matching the protocol buffer object + ## DataModel Objects @@ -164,6 +226,37 @@ Compare with another object. Get the string representation of the data model. + +#### encode + +```python + | encode() -> models_pb2.Query.DataModel +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, data_model_pb) -> "DataModel" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `data_model_pb`: the protocol buffer object corresponding with this class. + +**Returns**: + +A new instance of this class matching the protocol buffer object + #### generate`_`data`_`model @@ -251,17 +344,18 @@ Get the string representation of the description. ```python | @classmethod - | encode(cls, description_protobuf_object, description_object: "Description") -> None + | encode(cls, description_pb, description: "Description") -> None ``` Encode an instance of this class into the protocol buffer object. -The protocol buffer object in the description_protobuf_object argument must be matched with the instance of this class in the 'description_object' argument. +The protocol buffer object in the description_protobuf_object argument must be matched +with the instance of this class in the 'description_object' argument. **Arguments**: -- `description_protobuf_object`: the protocol buffer object whose type corresponds with this class. -- `description_object`: an instance of this class to be encoded in the protocol buffer object. +- `description_pb`: the protocol buffer object whose type corresponds with this class. +- `description`: an instance of this class to be encoded in the protocol buffer object. **Returns**: @@ -272,16 +366,17 @@ None ```python | @classmethod - | decode(cls, description_protobuf_object) -> "Description" + | decode(cls, description_pb) -> "Description" ``` Decode a protocol buffer object that corresponds with this class into an instance of this class. -A new instance of this class must be created that matches the protocol buffer object in the 'description_protobuf_object' argument. +A new instance of this class must be created that matches the protocol +buffer object in the 'description_protobuf_object' argument. **Arguments**: -- `description_protobuf_object`: the protocol buffer object whose type corresponds with this class. +- `description_pb`: the protocol buffer object whose type corresponds with this class. **Returns**: @@ -326,8 +421,8 @@ Used with the Constraint class, this class allows to specify constraint over att >>> not_equal_london = ConstraintType("!=", "London") >>> less_than_pi = ConstraintType("<", 3.14) >>> within_range = ConstraintType("within", (-10.0, 10.0)) - >>> in_a_set = ConstraintType("in", [1, 2, 3]) - >>> not_in_a_set = ConstraintType("not_in", {"C", "Java", "Python"}) + >>> in_a_set = ConstraintType("in", (1, 2, 3)) + >>> not_in_a_set = ConstraintType("not_in", ("C", "Java", "Python")) #### `__`init`__` @@ -340,7 +435,7 @@ Initialize a constraint type. **Arguments**: -- `type`: the type of the constraint. +- `type_`: the type of the constraint. | Either an instance of the ConstraintTypes enum, | or a string representation associated with the type. - `value`: the value that defines the constraint. @@ -457,6 +552,38 @@ Check equality with another object. Get the string representation of the constraint type. + +#### encode + +```python + | encode() +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, constraint_type_pb, category: str) -> "ConstraintType" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `constraint_type_pb`: the protocol buffer object corresponding with this class. +- `category`: the category of the constraint ('relation', 'set', 'range', 'distance). + +**Returns**: + +A new instance of this class matching the protocol buffer object + ## ConstraintExpr Objects @@ -600,6 +727,37 @@ Check whether the Constraint Expression satisfies some basic requirements. Compare with another object. + +#### encode + +```python + | encode() -> models_pb2.Query.ConstraintExpr.And +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, and_pb) -> "And" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `and_pb`: the protocol buffer object corresponding with this class. + +**Returns**: + +A new instance of this class matching the protocol buffer object + ## Or Objects @@ -680,6 +838,37 @@ Check whether the Constraint Expression satisfies some basic requirements. Compare with another object. + +#### encode + +```python + | encode() -> models_pb2.Query.ConstraintExpr.Or +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, or_pb) -> "Or" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `or_pb`: the protocol buffer object corresponding with this class. + +**Returns**: + +A new instance of this class matching the protocol buffer object + ## Not Objects @@ -745,6 +934,37 @@ Check whether the constraint expression is valid wrt a data model. Compare with another object. + +#### encode + +```python + | encode() -> models_pb2.Query.ConstraintExpr.Not +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, not_pb) -> "Not" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `not_pb`: the protocol buffer object corresponding with this class. + +**Returns**: + +A new instance of this class matching the protocol buffer object + ## Constraint Objects @@ -791,7 +1011,7 @@ Examples: >>> attr_genre = Attribute("genre", str, True, "The genre of the book.") >>> c1 = Constraint("author", ConstraintType("==", "Stephen King")) >>> c2 = Constraint("year", ConstraintType(">", 1990)) ->>> c3 = Constraint("genre", ConstraintType("in", {"horror", "science_fiction"})) +>>> c3 = Constraint("genre", ConstraintType("in", ("horror", "science_fiction"))) >>> book_1 = Description({"author": "Stephen King", "year": 1991, "genre": "horror"}) >>> book_2 = Description({"author": "George Orwell", "year": 1948, "genre": "horror"}) @@ -855,6 +1075,37 @@ Compare with another object. Get the string representation of the constraint. + +#### encode + +```python + | encode() -> models_pb2.Query.ConstraintExpr.Constraint +``` + +Encode an instance of this class into a protocol buffer object. + +**Returns**: + +the matching protocol buffer object + + +#### decode + +```python + | @classmethod + | decode(cls, constraint_pb) -> "Constraint" +``` + +Decode a protocol buffer object that corresponds with this class into an instance of this class. + +**Arguments**: + +- `constraint_pb`: the protocol buffer object corresponding with this class. + +**Returns**: + +A new instance of this class matching the protocol buffer object + ## Query Objects @@ -948,17 +1199,18 @@ Get the string representation of the constraint. ```python | @classmethod - | encode(cls, query_protobuf_object, query_object: "Query") -> None + | encode(cls, query_pb, query: "Query") -> None ``` Encode an instance of this class into the protocol buffer object. -The protocol buffer object in the query_protobuf_object argument must be matched with the instance of this class in the 'query_object' argument. +The protocol buffer object in the query_protobuf_object argument must be matched +with the instance of this class in the 'query_object' argument. **Arguments**: -- `query_protobuf_object`: the protocol buffer object whose type corresponds with this class. -- `query_object`: an instance of this class to be encoded in the protocol buffer object. +- `query_pb`: the protocol buffer object wrapping an object that corresponds with this class. +- `query`: an instance of this class to be encoded in the protocol buffer object. **Returns**: @@ -969,16 +1221,17 @@ None ```python | @classmethod - | decode(cls, query_protobuf_object) -> "Query" + | decode(cls, query_pb) -> "Query" ``` Decode a protocol buffer object that corresponds with this class into an instance of this class. -A new instance of this class must be created that matches the protocol buffer object in the 'query_protobuf_object' argument. +A new instance of this class must be created that matches the protocol +buffer object in the 'query_protobuf_object' argument. **Arguments**: -- `query_protobuf_object`: the protocol buffer object whose type corresponds with this class. +- `query_pb`: the protocol buffer object whose type corresponds with this class. **Returns**: diff --git a/docs/api/helpers/serializers.md b/docs/api/helpers/serializers.md new file mode 100644 index 0000000000..d975115fbe --- /dev/null +++ b/docs/api/helpers/serializers.md @@ -0,0 +1,37 @@ + +# aea.helpers.serializers + +This module contains Serializers that can be used for custom types. + + +## DictProtobufStructSerializer Objects + +```python +class DictProtobufStructSerializer() +``` + +Serialize python dictionaries of type DictType = Dict[str, ValueType] recursively conserving their dynamic type, using google.protobuf.Struct + +ValueType = PrimitiveType | DictType | List[ValueType]] +PrimitiveType = bool | int | float | str | bytes + + +#### encode + +```python + | @classmethod + | encode(cls, dictionary: Dict[str, Any]) -> bytes +``` + +Serialize compatible dictionary to bytes + + +#### decode + +```python + | @classmethod + | decode(cls, buffer: bytes) -> Dict[str, Any] +``` + +Deserialize a compatible dictionary + diff --git a/docs/api/helpers/storage/backends/base.md b/docs/api/helpers/storage/backends/base.md new file mode 100644 index 0000000000..a90d85f753 --- /dev/null +++ b/docs/api/helpers/storage/backends/base.md @@ -0,0 +1,157 @@ + +# aea.helpers.storage.backends.base + +This module contains storage abstract backend class. + + +## AbstractStorageBackend Objects + +```python +class AbstractStorageBackend(ABC) +``` + +Abstract base class for storage backend. + + +#### `__`init`__` + +```python + | __init__(uri: str) -> None +``` + +Init backend. + + +#### connect + +```python + | @abstractmethod + | async connect() -> None +``` + +Connect to backend. + + +#### disconnect + +```python + | @abstractmethod + | async disconnect() -> None +``` + +Disconnect the backend. + + +#### ensure`_`collection + +```python + | @abstractmethod + | async ensure_collection(collection_name: str) -> None +``` + +Create collection if not exits. + +**Arguments**: + +- `collection_name`: str. + +**Returns**: + +None + + +#### put + +```python + | @abstractmethod + | async put(collection_name: str, object_id: str, object_body: JSON_TYPES) -> None +``` + +Put object into collection. + +**Arguments**: + +- `collection_name`: str. +- `object_id`: str object id +- `object_body`: python dict, json compatible. + +**Returns**: + +None + + +#### get + +```python + | @abstractmethod + | async get(collection_name: str, object_id: str) -> Optional[JSON_TYPES] +``` + +Get object from the collection. + +**Arguments**: + +- `collection_name`: str. +- `object_id`: str object id + +**Returns**: + +dict if object exists in collection otherwise None + + +#### remove + +```python + | @abstractmethod + | async remove(collection_name: str, object_id: str) -> None +``` + +Remove object from the collection. + +**Arguments**: + +- `collection_name`: str. +- `object_id`: str object id + +**Returns**: + +None + + +#### find + +```python + | @abstractmethod + | async find(collection_name: str, field: str, equals: EQUALS_TYPE) -> List[OBJECT_ID_AND_BODY] +``` + +Get objects from the collection by filtering by field value. + +**Arguments**: + +- `collection_name`: str. +- `field`: field name to search: example "parent.field" +- `equals`: value field should be equal to + +**Returns**: + +list of objects bodies + + +#### list + +```python + | @abstractmethod + | async list(collection_name: str) -> List[OBJECT_ID_AND_BODY] +``` + +List all objects with keys from the collection. + +**Arguments**: + +- `collection_name`: str. + +**Returns**: + +Tuple of objects keys, bodies. + diff --git a/docs/api/helpers/storage/backends/sqlite.md b/docs/api/helpers/storage/backends/sqlite.md new file mode 100644 index 0000000000..695b776d13 --- /dev/null +++ b/docs/api/helpers/storage/backends/sqlite.md @@ -0,0 +1,149 @@ + +# aea.helpers.storage.backends.sqlite + +This module contains sqlite storage backend implementation. + + +## SqliteStorageBackend Objects + +```python +class SqliteStorageBackend(AbstractStorageBackend) +``` + +Sqlite storage backend. + + +#### `__`init`__` + +```python + | __init__(uri: str) -> None +``` + +Init backend. + + +#### connect + +```python + | async connect() -> None +``` + +Connect to backend. + + +#### disconnect + +```python + | async disconnect() -> None +``` + +Disconnect the backend. + + +#### ensure`_`collection + +```python + | async ensure_collection(collection_name: str) -> None +``` + +Create collection if not exits. + +**Arguments**: + +- `collection_name`: str. + +**Returns**: + +None + + +#### put + +```python + | async put(collection_name: str, object_id: str, object_body: JSON_TYPES) -> None +``` + +Put object into collection. + +**Arguments**: + +- `collection_name`: str. +- `object_id`: str object id +- `object_body`: python dict, json compatible. + +**Returns**: + +None + + +#### get + +```python + | async get(collection_name: str, object_id: str) -> Optional[JSON_TYPES] +``` + +Get object from the collection. + +**Arguments**: + +- `collection_name`: str. +- `object_id`: str object id + +**Returns**: + +dict if object exists in collection otherwise None + + +#### remove + +```python + | async remove(collection_name: str, object_id: str) -> None +``` + +Remove object from the collection. + +**Arguments**: + +- `collection_name`: str. +- `object_id`: str object id + +**Returns**: + +None + + +#### find + +```python + | async find(collection_name: str, field: str, equals: EQUALS_TYPE) -> List[OBJECT_ID_AND_BODY] +``` + +Get objects from the collection by filtering by field value. + +**Arguments**: + +- `collection_name`: str. +- `field`: field name to search: example "parent.field" +- `equals`: value field should be equal to + +**Returns**: + +None + + +#### list + +```python + | async list(collection_name: str) -> List[OBJECT_ID_AND_BODY] +``` + +List all objects with keys from the collection. + +**Arguments**: + +- `collection_name`: str. + +**Returns**: + +Tuple of objects keys, bodies. + diff --git a/docs/api/helpers/storage/generic_storage.md b/docs/api/helpers/storage/generic_storage.md new file mode 100644 index 0000000000..045da74933 --- /dev/null +++ b/docs/api/helpers/storage/generic_storage.md @@ -0,0 +1,300 @@ + +# aea.helpers.storage.generic`_`storage + +This module contains the storage implementation. + + +## AsyncCollection Objects + +```python +class AsyncCollection() +``` + +Async collection. + + +#### `__`init`__` + +```python + | __init__(storage_backend: AbstractStorageBackend, collection_name: str) +``` + +Init collection object. + +**Arguments**: + +- `storage_backend`: storage backed to use. +- `collection_name`: srt + + +#### put + +```python + | async put(object_id: str, object_body: JSON_TYPES) -> None +``` + +Put object into collection. + +**Arguments**: + +- `object_id`: str object id +- `object_body`: python dict, json compatible. + +**Returns**: + +None + + +#### get + +```python + | async get(object_id: str) -> Optional[JSON_TYPES] +``` + +Get object from the collection. + +**Arguments**: + +- `object_id`: str object id + +**Returns**: + +dict if object exists in collection otherwise None + + +#### remove + +```python + | async remove(object_id: str) -> None +``` + +Remove object from the collection. + +**Arguments**: + +- `object_id`: str object id + +**Returns**: + +None + + +#### find + +```python + | async find(field: str, equals: EQUALS_TYPE) -> List[OBJECT_ID_AND_BODY] +``` + +Get objects from the collection by filtering by field value. + +**Arguments**: + +- `field`: field name to search: example "parent.field" +- `equals`: value field should be equal to + +**Returns**: + +None + + +#### list + +```python + | async list() -> List[OBJECT_ID_AND_BODY] +``` + +List all objects with keys from the collection. + +**Returns**: + +Tuple of objects keys, bodies. + + +## SyncCollection Objects + +```python +class SyncCollection() +``` + +Async collection. + + +#### `__`init`__` + +```python + | __init__(async_collection_coro, loop: asyncio.AbstractEventLoop) +``` + +Init collection object. + +**Arguments**: + +- `async_collection_coro`: coroutine returns async collection. +- `loop`: abstract event loop where storage is running. + + +#### put + +```python + | put(object_id: str, object_body: JSON_TYPES) -> None +``` + +Put object into collection. + +**Arguments**: + +- `object_id`: str object id +- `object_body`: python dict, json compatible. + +**Returns**: + +None + + +#### get + +```python + | get(object_id: str) -> Optional[JSON_TYPES] +``` + +Get object from the collection. + +**Arguments**: + +- `object_id`: str object id + +**Returns**: + +dict if object exists in collection otherwise None + + +#### remove + +```python + | remove(object_id: str) -> None +``` + +Remove object from the collection. + +**Arguments**: + +- `object_id`: str object id + +**Returns**: + +None + + +#### find + +```python + | find(field: str, equals: EQUALS_TYPE) -> List[OBJECT_ID_AND_BODY] +``` + +Get objects from the collection by filtering by field value. + +**Arguments**: + +- `field`: field name to search: example "parent.field" +- `equals`: value field should be equal to + +**Returns**: + +List of object bodies + + +#### list + +```python + | list() -> List[OBJECT_ID_AND_BODY] +``` + +List all objects with keys from the collection. + +**Returns**: + +Tuple of objects keys, bodies. + + +## Storage Objects + +```python +class Storage(Runnable) +``` + +Generic storage. + + +#### `__`init`__` + +```python + | __init__(storage_uri: str, loop: asyncio.AbstractEventLoop = None, threaded: bool = False) -> None +``` + +Init stortage. + +**Arguments**: + +- `storage_uri`: configuration string for storage. +- `loop`: asyncio event loop to use. +- `threaded`: bool. start in thread if True. + +**Returns**: + +None + + +#### wait`_`connected + +```python + | async wait_connected() -> None +``` + +Wait generic storage is connected. + + +#### is`_`connected + +```python + | @property + | is_connected() -> bool +``` + +Get running state of the storage. + + +#### run + +```python + | async run() +``` + +Connect storage. + + +#### get`_`collection + +```python + | async get_collection(collection_name: str) -> AsyncCollection +``` + +Get async collection. + + +#### get`_`sync`_`collection + +```python + | get_sync_collection(collection_name: str) -> SyncCollection +``` + +Get sync collection. + + +#### `__`repr`__` + +```python + | __repr__() -> str +``` + +Get string representation of the storage. + diff --git a/docs/api/helpers/transaction/base.md b/docs/api/helpers/transaction/base.md index 2db69505a1..4c0eaa2bb4 100644 --- a/docs/api/helpers/transaction/base.md +++ b/docs/api/helpers/transaction/base.md @@ -16,7 +16,7 @@ This class represents an instance of RawTransaction. #### `__`init`__` ```python - | __init__(ledger_id: str, body: Any) + | __init__(ledger_id: str, body: JSONLike) ``` Initialise an instance of RawTransaction. @@ -220,7 +220,7 @@ This class represents an instance of SignedTransaction. #### `__`init`__` ```python - | __init__(ledger_id: str, body: Any) + | __init__(ledger_id: str, body: JSONLike) ``` Initialise an instance of SignedTransaction. @@ -424,7 +424,7 @@ This class represents an instance of State. #### `__`init`__` ```python - | __init__(ledger_id: str, body: bytes) + | __init__(ledger_id: str, body: JSONLike) ``` Initialise an instance of State. @@ -444,7 +444,7 @@ Get the id of the ledger on which the terms are to be settled. ```python | @property - | body() + | body() -> JSONLike ``` Get the body. @@ -803,11 +803,21 @@ Get fee by currency. ```python | @property - | kwargs() -> Dict[str, Any] + | kwargs() -> JSONLike ``` Get the kwargs. + +#### is`_`strict + +```python + | @property + | is_strict() -> bool +``` + +Get is_strict. + #### get`_`hash @@ -905,7 +915,7 @@ This class represents an instance of TransactionDigest. #### `__`init`__` ```python - | __init__(ledger_id: str, body: Any) + | __init__(ledger_id: str, body: str) ``` Initialise an instance of TransactionDigest. @@ -925,7 +935,7 @@ Get the id of the ledger on which the terms are to be settled. ```python | @property - | body() -> Any + | body() -> str ``` Get the receipt. @@ -1002,7 +1012,7 @@ This class represents an instance of TransactionReceipt. #### `__`init`__` ```python - | __init__(ledger_id: str, receipt: Any, transaction: Any) + | __init__(ledger_id: str, receipt: JSONLike, transaction: JSONLike) ``` Initialise an instance of TransactionReceipt. @@ -1022,7 +1032,7 @@ Get the id of the ledger on which the terms are to be settled. ```python | @property - | receipt() -> Any + | receipt() -> JSONLike ``` Get the receipt. @@ -1032,7 +1042,7 @@ Get the receipt. ```python | @property - | transaction() -> Any + | transaction() -> JSONLike ``` Get the transaction. diff --git a/docs/api/protocols/base.md b/docs/api/protocols/base.md index 742c9200b3..4071d6f70a 100644 --- a/docs/api/protocols/base.md +++ b/docs/api/protocols/base.md @@ -44,6 +44,25 @@ Initialize a Message object. - `body`: the dictionary of values to hold. - `kwargs`: any additional value to add to the body. It will overwrite the body values. + +#### json + +```python + | json() -> dict +``` + +Get json friendly str representation of the message. + + +#### from`_`json + +```python + | @classmethod + | from_json(cls, data: dict) -> "Message" +``` + +Construct message instance from json data. + #### valid`_`performatives @@ -228,6 +247,16 @@ Get the string representation of the message. Abbreviated to prevent spamming of Encode the message. + +#### decode + +```python + | @classmethod + | decode(cls, data: bytes) -> "Message" +``` + +Decode the message. + #### has`_`dialogue`_`info diff --git a/docs/api/helpers/dialogue/base.md b/docs/api/protocols/dialogue/base.md similarity index 66% rename from docs/api/helpers/dialogue/base.md rename to docs/api/protocols/dialogue/base.md index b92a099cb3..f27455d135 100644 --- a/docs/api/helpers/dialogue/base.md +++ b/docs/api/protocols/dialogue/base.md @@ -332,6 +332,60 @@ Initialize a dialogue. None + +#### add`_`terminal`_`state`_`callback + +```python + | add_terminal_state_callback(fn: Callable[["Dialogue"], None]) -> None +``` + +Add callback to be called on dialogue reach terminal state. + +**Arguments**: + +- `fn`: callable to be called with one argument: Dialogue + +**Returns**: + +None + + +#### `__`eq`__` + +```python + | __eq__(other) -> bool +``` + +Compare two dialogues. + + +#### json + +```python + | json() -> dict +``` + +Get json representation of the dialogue. + + +#### from`_`json + +```python + | @classmethod + | from_json(cls, message_class: Type[Message], data: dict) -> "Dialogue" +``` + +Create a dialogue instance with all messages from json data. + +**Arguments**: + +- `message_class`: type of message used with this dialogue +- `data`: dict with data exported with Dialogue.to_json() method + +**Returns**: + +Dialogue instance + #### dialogue`_`label @@ -595,6 +649,305 @@ Add dialogue endstate stats. None + +#### find`_`caller`_`object + +```python +find_caller_object(object_type: Type) +``` + +Find caller object of certain type in the call stack. + + +## BasicDialoguesStorage Objects + +```python +class BasicDialoguesStorage() +``` + +Dialogues state storage. + + +#### `__`init`__` + +```python + | __init__(dialogues: "Dialogues") -> None +``` + +Init dialogues storage. + + +#### dialogues`_`in`_`terminal`_`state + +```python + | @property + | dialogues_in_terminal_state() -> List["Dialogue"] +``` + +Get all dialogues in terminal state. + + +#### dialogues`_`in`_`active`_`state + +```python + | @property + | dialogues_in_active_state() -> List["Dialogue"] +``` + +Get all dialogues in active state. + + +#### is`_`terminal`_`dialogues`_`kept + +```python + | @property + | is_terminal_dialogues_kept() -> bool +``` + +Return True if dialogues should stay after terminal state. + + +#### dialogue`_`terminal`_`state`_`callback + +```python + | dialogue_terminal_state_callback(dialogue: "Dialogue") -> None +``` + +Method to be called on dialogue terminal state reached. + + +#### setup + +```python + | setup() -> None +``` + +Set up dialogue storage. + + +#### teardown + +```python + | teardown() -> None +``` + +Tear down dialogue storage. + + +#### add + +```python + | add(dialogue: Dialogue) -> None +``` + +Add dialogue to storage. + +**Arguments**: + +- `dialogue`: dialogue to add. + +**Returns**: + +None + + +#### remove + +```python + | remove(dialogue_label: DialogueLabel) -> None +``` + +Remove dialogue from storage by it's label. + +**Arguments**: + +- `dialogue_label`: label of the dialogue to remove + +**Returns**: + +None + + +#### get + +```python + | get(dialogue_label: DialogueLabel) -> Optional[Dialogue] +``` + +Get dialogue stored by it's label. + +**Arguments**: + +- `dialogue_label`: label of the dialogue + +**Returns**: + +dialogue if presents or None + + +#### get`_`dialogues`_`with`_`counterparty + +```python + | get_dialogues_with_counterparty(counterparty: Address) -> List[Dialogue] +``` + +Get the dialogues by address. + +**Arguments**: + +- `counterparty`: the counterparty + +**Returns**: + +The dialogues with the counterparty. + + +#### is`_`in`_`incomplete + +```python + | is_in_incomplete(dialogue_label: DialogueLabel) -> bool +``` + +Check dialogue label presents in list of incomplete. + + +#### set`_`incomplete`_`dialogue + +```python + | set_incomplete_dialogue(incomplete_dialogue_label, complete_dialogue_label) -> None +``` + +Set incomplete dialogue label. + + +#### is`_`dialogue`_`present + +```python + | is_dialogue_present(dialogue_label: DialogueLabel) -> bool +``` + +Check dialogue with label specified presents in storage. + + +#### get`_`latest`_`label + +```python + | get_latest_label(dialogue_label: DialogueLabel) -> DialogueLabel +``` + +Get latest label for dialogue. + + +## PersistDialoguesStorage Objects + +```python +class PersistDialoguesStorage(BasicDialoguesStorage) +``` + +Persist dialogues storage. + +Uses generic storage to load/save dialogues data on setup/teardown. + + +#### `__`init`__` + +```python + | __init__(dialogues: "Dialogues") -> None +``` + +Init dialogues storage. + + +#### get`_`skill`_`component + +```python + | @staticmethod + | get_skill_component() -> Optional[SkillComponent] +``` + +Get skill component dialogues storage constructed for. + + +#### setup + +```python + | setup() -> None +``` + +Set up dialogue storage. + + +#### teardown + +```python + | teardown() -> None +``` + +Tear down dialogue storage. + + +#### remove + +```python + | remove(dialogue_label: DialogueLabel) -> None +``` + +Remove dialogue from memory and persistent storage. + + +## PersistDialoguesStorageWithOffloading Objects + +```python +class PersistDialoguesStorageWithOffloading(PersistDialoguesStorage) +``` + +Dialogue Storage with dialogues offloading. + + +#### dialogue`_`terminal`_`state`_`callback + +```python + | dialogue_terminal_state_callback(dialogue: "Dialogue") -> None +``` + +Call on dialogue reaches terminal staste. + + +#### get + +```python + | get(dialogue_label: DialogueLabel) -> Optional[Dialogue] +``` + +Try to get dialogue by label from memory or persists storage. + + +#### get`_`dialogues`_`with`_`counterparty + +```python + | get_dialogues_with_counterparty(counterparty: Address) -> List[Dialogue] +``` + +Get the dialogues by address. + +**Arguments**: + +- `counterparty`: the counterparty + +**Returns**: + +The dialogues with the counterparty. + + +#### dialogues`_`in`_`terminal`_`state + +```python + | @property + | dialogues_in_terminal_state() -> List["Dialogue"] +``` + +Get all dialogues in terminal state. + ## Dialogues Objects @@ -608,7 +961,7 @@ The dialogues class keeps track of all dialogues for an agent. #### `__`init`__` ```python - | __init__(self_address: Address, end_states: FrozenSet[Dialogue.EndState], message_class: Type[Message], dialogue_class: Type[Dialogue], role_from_first_message: Callable[[Message, Address], Dialogue.Role]) -> None + | __init__(self_address: Address, end_states: FrozenSet[Dialogue.EndState], message_class: Type[Message], dialogue_class: Type[Dialogue], role_from_first_message: Callable[[Message, Address], Dialogue.Role], keep_terminal_state_dialogues: Optional[bool] = None) -> None ``` Initialize dialogues. @@ -617,20 +970,21 @@ Initialize dialogues. - `self_address`: the address of the entity for whom dialogues are maintained - `end_states`: the list of dialogue endstates +- `keep_terminal_state_dialogues`: specify do dialogues in terminal state should stay or not **Returns**: None - -#### dialogues + +#### is`_`keep`_`dialogues`_`in`_`terminal`_`state ```python | @property - | dialogues() -> Dict[DialogueLabel, Dialogue] + | is_keep_dialogues_in_terminal_state() -> bool ``` -Get dictionary of dialogues in which the agent engages. +Is requrired to keep dialogues in terminal state. #### self`_`address @@ -790,3 +1144,21 @@ Retrieve the dialogue 'message' belongs to. the dialogue, or None in case such a dialogue does not exist + +#### setup + +```python + | setup() -> None +``` + +Set up. + + +#### teardown + +```python + | teardown() -> None +``` + +Tear down. + diff --git a/docs/api/runtime.md b/docs/api/runtime.md index 78c6fdc595..2fd1d308db 100644 --- a/docs/api/runtime.md +++ b/docs/api/runtime.md @@ -40,6 +40,16 @@ Init runtime. None + +#### storage + +```python + | @property + | storage() -> Optional[Storage] +``` + +Get optional storage. + #### loop`_`mode diff --git a/docs/api/skills/base.md b/docs/api/skills/base.md index 3912b3f890..b891f7aafc 100644 --- a/docs/api/skills/base.md +++ b/docs/api/skills/base.md @@ -173,6 +173,16 @@ Get connection status. Get outbox. + +#### storage + +```python + | @property + | storage() -> Optional[Storage] +``` + +Get optional storage for agent. + #### message`_`in`_`queue @@ -563,6 +573,26 @@ class Model(SkillComponent, ABC) This class implements an abstract model. + +#### `__`init`__` + +```python + | __init__(name: str, skill_context: SkillContext, configuration: Optional[SkillComponentConfiguration] = None, keep_terminal_state_dialogues: Optional[bool] = None, **kwargs, ,) -> None +``` + +Initialize a model. + +**Arguments**: + +- `name`: the name of the component. +- `configuration`: the configuration for the component. +- `skill_context`: the skill context. +- `keep_terminal_state_dialogues`: specify do dialogues in terminal state should stay or not + +**Returns**: + +None + #### setup diff --git a/docs/api/test_tools/constants.md b/docs/api/test_tools/constants.md new file mode 100644 index 0000000000..c19ee2722a --- /dev/null +++ b/docs/api/test_tools/constants.md @@ -0,0 +1,5 @@ + +# aea.test`_`tools.constants + +This is a module with constants for test tools. + diff --git a/docs/api/test_tools/exceptions.md b/docs/api/test_tools/exceptions.md new file mode 100644 index 0000000000..2c364e263a --- /dev/null +++ b/docs/api/test_tools/exceptions.md @@ -0,0 +1,14 @@ + +# aea.test`_`tools.exceptions + +Module with AEA testing exceptions. + + +## AEATestingException Objects + +```python +class AEATestingException(Exception) +``` + +An exception to be raised on incorrect testing tools usage. + diff --git a/docs/api/test_tools/test_cases.md b/docs/api/test_tools/test_cases.md index c298c5728f..c8079f6e51 100644 --- a/docs/api/test_tools/test_cases.md +++ b/docs/api/test_tools/test_cases.md @@ -463,6 +463,27 @@ Run from agent's directory. Result + +#### remove`_`private`_`key + +```python + | @classmethod + | remove_private_key(cls, ledger_api_id: str = DEFAULT_LEDGER, connection: bool = False) -> Result +``` + +Remove private key with CLI command. + +Run from agent's directory. + +**Arguments**: + +- `ledger_api_id`: ledger API ID. +- `connection`: whether or not the private key filepath is for a connection. + +**Returns**: + +Result + #### replace`_`private`_`key`_`in`_`file diff --git a/docs/app-areas.md b/docs/app-areas.md index 0f721d08f1..1ccfcce5f9 100644 --- a/docs/app-areas.md +++ b/docs/app-areas.md @@ -7,7 +7,7 @@ As described in the guide on agent-orien * multiple stakeholders, which * are represented by AEAs, that * interact autonomously and -* communicate decentrally. +* communicate _via_ a peer-to-peer network. There are at least five general application areas for AEAs: @@ -23,7 +23,7 @@ In the short-term we see AEAs primarily deployed in three areas: * Off-load repetitive tasks: AEAs can automate well defined processes in supply chain, transport and finance. -* Micro transactions: AEAs make it economically viable to execute trades which reference only small values. This is particularly relevant in areas where there is a (data) supply side constituted of many small actors and a single demand side. +* Micro transactions: AEAs make it economically viable to execute trades which involve small value transfers. This is particularly relevant in areas where there is a (data) supply side constituted of many small actors and a single demand side. * Wallet agents: AEAs can simplify the interactions with blockchains for end users. For instance, they can act as "smart wallets" which optimize blockchain interactions on behalf of the user. diff --git a/docs/aries-cloud-agent-demo.md b/docs/aries-cloud-agent-demo.md index 652c93d99f..19e472a8b3 100644 --- a/docs/aries-cloud-agent-demo.md +++ b/docs/aries-cloud-agent-demo.md @@ -180,7 +180,7 @@ Now you can create **Alice_AEA** and **Faber_AEA** in terminals 3 and 4 respecti In the third terminal, fetch **Alice_AEA** and move into its project folder: ``` bash -aea fetch fetchai/aries_alice:0.17.0 +aea fetch fetchai/aries_alice:0.18.0 cd aries_alice ``` @@ -191,11 +191,11 @@ The following steps create **Alice_AEA** from scratch: ``` bash aea create aries_alice cd aries_alice -aea add connection fetchai/p2p_libp2p:0.12.0 -aea add connection fetchai/soef:0.13.0 -aea add connection fetchai/http_client:0.14.0 -aea add connection fetchai/webhook:0.10.0 -aea add skill fetchai/aries_alice:0.13.0 +aea add connection fetchai/p2p_libp2p:0.13.0 +aea add connection fetchai/soef:0.14.0 +aea add connection fetchai/http_client:0.15.0 +aea add connection fetchai/webhook:0.11.0 +aea add skill fetchai/aries_alice:0.14.0 ```

@@ -265,7 +265,7 @@ Once you see a message of the form `To join its network use multiaddr: ['SOME_AD In the fourth terminal, fetch **Faber_AEA** and move into its project folder: ``` bash -aea fetch fetchai/aries_faber:0.17.0 +aea fetch fetchai/aries_faber:0.18.0 cd aries_faber ``` @@ -276,11 +276,11 @@ The following steps create **Faber_AEA** from scratch: ``` bash aea create aries_faber cd aries_faber -aea add connection fetchai/p2p_libp2p:0.12.0 -aea add connection fetchai/soef:0.13.0 -aea add connection fetchai/http_client:0.14.0 -aea add connection fetchai/webhook:0.10.0 -aea add skill fetchai/aries_faber:0.12.0 +aea add connection fetchai/p2p_libp2p:0.13.0 +aea add connection fetchai/soef:0.14.0 +aea add connection fetchai/http_client:0.15.0 +aea add connection fetchai/webhook:0.11.0 +aea add skill fetchai/aries_faber:0.13.0 ```

diff --git a/docs/assets/acn-tiers.jpg b/docs/assets/acn-tiers.jpg new file mode 100644 index 0000000000..b7594bed03 Binary files /dev/null and b/docs/assets/acn-tiers.jpg differ diff --git a/docs/assets/acn-tiers.png b/docs/assets/acn-tiers_.png similarity index 100% rename from docs/assets/acn-tiers.png rename to docs/assets/acn-tiers_.png diff --git a/docs/assets/acn-trust-security.jpg b/docs/assets/acn-trust-security.jpg new file mode 100644 index 0000000000..8d57dfb163 Binary files /dev/null and b/docs/assets/acn-trust-security.jpg differ diff --git a/docs/assets/acn-trust-security.png b/docs/assets/acn-trust-security_.png similarity index 100% rename from docs/assets/acn-trust-security.png rename to docs/assets/acn-trust-security_.png diff --git a/docs/assets/aea-vs-agent-vs-multiplexer.jpg b/docs/assets/aea-vs-agent-vs-multiplexer.jpg new file mode 100644 index 0000000000..634801bca5 Binary files /dev/null and b/docs/assets/aea-vs-agent-vs-multiplexer.jpg differ diff --git a/docs/assets/aea-vs-agent-vs-multiplexer.png b/docs/assets/aea-vs-agent-vs-multiplexer_.png similarity index 100% rename from docs/assets/aea-vs-agent-vs-multiplexer.png rename to docs/assets/aea-vs-agent-vs-multiplexer_.png diff --git a/docs/assets/contracts.jpg b/docs/assets/contracts.jpg new file mode 100644 index 0000000000..189eaf8444 Binary files /dev/null and b/docs/assets/contracts.jpg differ diff --git a/docs/assets/contracts.png b/docs/assets/contracts_.png similarity index 100% rename from docs/assets/contracts.png rename to docs/assets/contracts_.png diff --git a/docs/assets/decision-maker.jpg b/docs/assets/decision-maker.jpg new file mode 100644 index 0000000000..acf10704b3 Binary files /dev/null and b/docs/assets/decision-maker.jpg differ diff --git a/docs/assets/decision-maker.png b/docs/assets/decision-maker_.png similarity index 100% rename from docs/assets/decision-maker.png rename to docs/assets/decision-maker_.png diff --git a/docs/assets/dht.jpg b/docs/assets/dht.jpg new file mode 100644 index 0000000000..7688c339c9 Binary files /dev/null and b/docs/assets/dht.jpg differ diff --git a/docs/assets/dht.png b/docs/assets/dht_.png similarity index 100% rename from docs/assets/dht.png rename to docs/assets/dht_.png diff --git a/docs/assets/envelope.jpg b/docs/assets/envelope.jpg new file mode 100644 index 0000000000..e9a823bfa9 Binary files /dev/null and b/docs/assets/envelope.jpg differ diff --git a/docs/assets/envelope.png b/docs/assets/envelope_.png similarity index 100% rename from docs/assets/envelope.png rename to docs/assets/envelope_.png diff --git a/docs/assets/execution.jpg b/docs/assets/execution.jpg new file mode 100644 index 0000000000..448619d2e3 Binary files /dev/null and b/docs/assets/execution.jpg differ diff --git a/docs/assets/execution.png b/docs/assets/execution_.png similarity index 100% rename from docs/assets/execution.png rename to docs/assets/execution_.png diff --git a/docs/assets/gym-skill.jpg b/docs/assets/gym-skill.jpg new file mode 100644 index 0000000000..2fe6d20896 Binary files /dev/null and b/docs/assets/gym-skill.jpg differ diff --git a/docs/assets/gym-skill.png b/docs/assets/gym-skill_.png similarity index 100% rename from docs/assets/gym-skill.png rename to docs/assets/gym-skill_.png diff --git a/docs/assets/http-integration.jpg b/docs/assets/http-integration.jpg new file mode 100644 index 0000000000..e815825572 Binary files /dev/null and b/docs/assets/http-integration.jpg differ diff --git a/docs/assets/http-integration.png b/docs/assets/http-integration_.png similarity index 100% rename from docs/assets/http-integration.png rename to docs/assets/http-integration_.png diff --git a/docs/assets/keys.jpg b/docs/assets/keys.jpg new file mode 100644 index 0000000000..549e140912 Binary files /dev/null and b/docs/assets/keys.jpg differ diff --git a/docs/assets/keys.png b/docs/assets/keys_.png similarity index 100% rename from docs/assets/keys.png rename to docs/assets/keys_.png diff --git a/docs/assets/multiplexer.jpg b/docs/assets/multiplexer.jpg new file mode 100644 index 0000000000..7b204e157d Binary files /dev/null and b/docs/assets/multiplexer.jpg differ diff --git a/docs/assets/multiplexer.png b/docs/assets/multiplexer_.png similarity index 100% rename from docs/assets/multiplexer.png rename to docs/assets/multiplexer_.png diff --git a/docs/assets/oef-ledger.jpg b/docs/assets/oef-ledger.jpg new file mode 100644 index 0000000000..3c9b01d5c8 Binary files /dev/null and b/docs/assets/oef-ledger.jpg differ diff --git a/docs/assets/oef-ledger.png b/docs/assets/oef-ledger_.png similarity index 100% rename from docs/assets/oef-ledger.png rename to docs/assets/oef-ledger_.png diff --git a/docs/assets/simplified-aea.jpg b/docs/assets/simplified-aea.jpg new file mode 100644 index 0000000000..4b65a3e56d Binary files /dev/null and b/docs/assets/simplified-aea.jpg differ diff --git a/docs/assets/simplified-aea.png b/docs/assets/simplified-aea_.png similarity index 100% rename from docs/assets/simplified-aea.png rename to docs/assets/simplified-aea_.png diff --git a/docs/assets/skill-components.jpg b/docs/assets/skill-components.jpg new file mode 100644 index 0000000000..1af755af68 Binary files /dev/null and b/docs/assets/skill-components.jpg differ diff --git a/docs/assets/skill-components.png b/docs/assets/skill-components_.png similarity index 100% rename from docs/assets/skill-components.png rename to docs/assets/skill-components_.png diff --git a/docs/assets/skills.jpg b/docs/assets/skills.jpg new file mode 100644 index 0000000000..8b0848f48f Binary files /dev/null and b/docs/assets/skills.jpg differ diff --git a/docs/assets/skills.png b/docs/assets/skills_.png similarity index 100% rename from docs/assets/skills.png rename to docs/assets/skills_.png diff --git a/docs/build-aea-programmatically.md b/docs/build-aea-programmatically.md index 9d1e838198..a7b01900fa 100644 --- a/docs/build-aea-programmatically.md +++ b/docs/build-aea-programmatically.md @@ -56,7 +56,7 @@ We will use the stub connection to pass envelopes in and out of the AEA. Ensure ``` ## Initialise the AEA -We use the
`AEABuilder` to readily build an AEA. By default, the `AEABuilder` adds the `fetchai/default:0.9.0` protocol, the `fetchai/stub:0.12.0` connection and the `fetchai/error:0.9.0` skill. +We use the `AEABuilder` to readily build an AEA. By default, the `AEABuilder` adds the `fetchai/default:0.10.0` protocol, the `fetchai/stub:0.13.0` connection and the `fetchai/error:0.10.0` skill. ``` python # Instantiate the builder and build the AEA # By default, the default protocol, error skill and stub connection are added @@ -130,7 +130,7 @@ We run the AEA from a different thread so that we can still use the main thread We use the input and output text files to send an envelope to our AEA and receive a response (from the echo skill) ``` python # Create a message inside an envelope and get the stub connection to pass it on to the echo skill - message_text = b"my_aea,other_agent,fetchai/default:0.9.0,\x12\x10\x08\x01\x12\x011*\t*\x07\n\x05hello," + message_text = b"my_aea,other_agent,fetchai/default:0.10.0,\x12\x10\x08\x01\x12\x011*\t*\x07\n\x05hello," with open(INPUT_FILE, "wb") as f: write_with_lock(f, message_text) print(b"input message: " + message_text) @@ -156,8 +156,8 @@ Finally stop our AEA and wait for it to finish ## Running the AEA If you now run this python script file, you should see this output: - input message: my_aea,other_agent,fetchai/default:0.9.0,\x12\x10\x08\x01\x12\x011*\t*\x07\n\x05hello, - output message: other_agent,my_aea,fetchai/default:0.9.0,...\x05hello + input message: my_aea,other_agent,fetchai/default:0.10.0,\x12\x10\x08\x01\x12\x011*\t*\x07\n\x05hello, + output message: other_agent,my_aea,fetchai/default:0.10.0,...\x05hello ## Entire code listing @@ -249,7 +249,7 @@ def run(): time.sleep(4) # Create a message inside an envelope and get the stub connection to pass it on to the echo skill - message_text = b"my_aea,other_agent,fetchai/default:0.9.0,\x12\x10\x08\x01\x12\x011*\t*\x07\n\x05hello," + message_text = b"my_aea,other_agent,fetchai/default:0.10.0,\x12\x10\x08\x01\x12\x011*\t*\x07\n\x05hello," with open(INPUT_FILE, "wb") as f: write_with_lock(f, message_text) print(b"input message: " + message_text) diff --git a/docs/car-park-skills.md b/docs/car-park-skills.md index c32313399d..1c5116918f 100644 --- a/docs/car-park-skills.md +++ b/docs/car-park-skills.md @@ -55,7 +55,7 @@ Follow the Preliminaries and @@ -89,7 +89,7 @@ default_routing: Then, fetch the car data client AEA: ``` bash -aea fetch fetchai/car_data_buyer:0.19.0 +aea fetch fetchai/car_data_buyer:0.20.0 cd car_data_buyer aea install ``` @@ -101,19 +101,19 @@ The following steps create the car data client from scratch: ``` bash aea create car_data_buyer cd car_data_buyer -aea add connection fetchai/p2p_libp2p:0.12.0 -aea add connection fetchai/soef:0.13.0 -aea add connection fetchai/ledger:0.10.0 -aea add skill fetchai/carpark_client:0.17.0 +aea add connection fetchai/p2p_libp2p:0.13.0 +aea add connection fetchai/soef:0.14.0 +aea add connection fetchai/ledger:0.11.0 +aea add skill fetchai/carpark_client:0.18.0 aea install -aea config set agent.default_connection fetchai/p2p_libp2p:0.12.0 +aea config set agent.default_connection fetchai/p2p_libp2p:0.13.0 ``` In `car_data_buyer/aea-config.yaml` add ``` yaml default_routing: - fetchai/ledger_api:0.7.0: fetchai/ledger:0.10.0 - fetchai/oef_search:0.10.0: fetchai/soef:0.13.0 + fetchai/ledger_api:0.8.0: fetchai/ledger:0.11.0 + fetchai/oef_search:0.11.0: fetchai/soef:0.14.0 ```

diff --git a/docs/cli-commands.md b/docs/cli-commands.md index a345fb3003..738a6bbbb5 100644 --- a/docs/cli-commands.md +++ b/docs/cli-commands.md @@ -3,7 +3,8 @@ | Command | Description | | ------------------------------------------- | ---------------------------------------------------------------------------- | | `add [package_type] [public_id]` | Add a `package_type` connection, contract, protocol, or skill, with `[public_id]`, to the AEA. `add --local` to add from local `packages` directory. | -| `add-key [ledger_id] file` | Add a private key from a file for `ledger_id`. | +| `add-key [ledger_id] file [--connection]` | Add a private key from a file for `ledger_id`. | +| `build` | Build the agent and its components. | | `create [name]` | Create a new aea project called `name`. | | `config get [path]` | Reads the config specified in `path` and prints its target. | | `config set [path] [--type TYPE]` | Sets a new value for the target of the `path`. Optionally cast to type. | @@ -27,11 +28,12 @@ | `login USERNAME [--password password]` | Login to a registry account with credentials. | | `logout` | Logout from registry account. | | `publish` | Publish the AEA to registry. Needs to be executed from an AEA project.`publish --local` to publish to local `packages` directory. | -| `push [package_type] [public_id]` | Push connection, protocol, or skill with `public_id` to registry. `push --local` to push to local `packages` directory. | -| `remove [package_type] [name]` | Remove connection, protocol, or skill, called `name`, from AEA. | +| `push [package_type] [public_id]` | Push connection, protocol, or skill with `public_id` to registry. `push --local` to push to local `packages` directory. | +| `remove [package_type] [name]` | Remove connection, protocol, or skill, called `name`, from AEA. | +| `remove-key [ledger_id] [name]` | Remove a private key registered with id `ledger_id`. | | `run {using [connections, ...]}` | Run the AEA on the Fetch.ai network with default or specified connections. | -| `search [package_type]` | Search for components in the registry. `search --local [package_type] [--query searching_query]` to search in local `packages` directory. | -| `scaffold [package_type] [name]` | Scaffold a new connection, protocol, or skill called `name`. | +| `search [package_type]` | Search for components in the registry. `search --local [package_type] [--query searching_query]` to search in local `packages` directory. | +| `scaffold [package_type] [name]` | Scaffold a new connection, protocol, or skill called `name`. | | `-v DEBUG run` | Run with debugging. |