diff --git a/.coveragerc b/.coveragerc index 78fabe2025..b404932192 100644 --- a/.coveragerc +++ b/.coveragerc @@ -15,9 +15,10 @@ omit = hummingbot/client/ui/parser.py hummingbot/connector/derivative/position.py hummingbot/connector/exchange/bitfinex* - hummingbot/connector/exchange/coinbase_pro* hummingbot/connector/exchange/foxbit* hummingbot/connector/exchange/hitbtc* + hummingbot/connector/derivative/dydx_v4_perpetual/* + hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/* hummingbot/connector/exchange/injective_v2/account_delegation_script.py hummingbot/connector/exchange/paper_trade* hummingbot/connector/gateway/** diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8e3bc0dc60..8b6d4684f0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -80,7 +80,7 @@ If the Foundation team requests changes, make more commits to your branch to add ## Unit Test Coverage -A minimum of 75% unit test coverage is required for all changes included in a pull request. However, some components, like UI components, are excluded from this validation. +A minimum of 80% unit test coverage is required for all changes included in a pull request. However, some components, like UI components, are excluded from this validation. To run tests locally, run `make test` after activating the environment. diff --git a/DOCKER.md b/DOCKER.md deleted file mode 100644 index eb717a5901..0000000000 --- a/DOCKER.md +++ /dev/null @@ -1,146 +0,0 @@ -# Docker - -## Why use Docker Compose? - -Using Docker for Hummingbot deployment offers several benefits, such as simplifying the installation process, enabling easy versioning and scaling, and ensuring a consistent and isolated environment for running the bot. This repository aims to help users get started with deploying Hummingbot using Docker by providing different examples that demonstrate how to set up and customize the bot according to their needs. - -## Install Docker Compose - -The examples below use Docker Compose, a tool for defining and running multi-container Docker applications. You can install Docker Compose either via command line or by running an installer. - -Linux (Ubuntu / Debian): - -```bash -sudo apt-get update -sudo apt-get install docker-compose-plugin -``` - -Mac (Homebrew): - -```bash -brew install docker-compose -``` - -If you want to be guided through the installation, install [Docker Desktop](https://www.docker.com/products/docker-desktop/) includes Docker Compose along with Docker Engine and Docker CLI which are Compose prerequisites: - -* [Linux](https://docs.docker.com/desktop/install/linux-install/) -* [Mac](https://docs.docker.com/desktop/install/mac-install/) -* [Windows](https://docs.docker.com/desktop/install/windows-install/) - - -Verify that Docker Compose is installed correctly by checking the version: - -```bash -docker compose version -``` - -Hummingbot's [deploy-examples](https://github.com/hummingbot/deploy-examples) repository provides various examples of how to deploy Hummingbot using Docker Compose, a tool for defining and running multi-container Docker applications. - -Compiled images of `hummingbot` are available on our official DockerHub: https://hub.docker.com/r/hummingbot/hummingbot - -## Building a Docker Image - -You can also build and run a Docker-based Hummingbot image using the `docker-compose.yml` file in the root folder: -```yml -version: "3.9" -services: - hummingbot: - container_name: hummingbot - build: - context: . - dockerfile: Dockerfile - volumes: - - ./conf:/home/hummingbot/conf - - ./conf/connectors:/home/hummingbot/conf/connectors - - ./conf/strategies:/home/hummingbot/conf/strategies - - ./logs:/home/hummingbot/logs - - ./data:/home/hummingbot/data - - ./scripts:/home/hummingbot/scripts - environment: - # - CONFIG_PASSWORD=a - # - CONFIG_FILE_NAME=directional_strategy_rsi.py - logging: - driver: "json-file" - options: - max-size: "10m" - max-file: 5 - tty: true - stdin_open: true - network_mode: host - - dashboard: - container_name: dashboard - image: hummingbot/dashboard:latest - volumes: - - ./data:/home/dashboard/data - ports: - - "8501:8501" -``` - -Build and launch the image by running: -``` -docker compose up -d -``` - -Uncomment the following lines in the YML file before running the command above if you would like to: -* Bypass the password screen by entering the previously set password -* Auto-starting a script -``` - # environment: - # - CONFIG_PASSWORD=a - # - CONFIG_FILE_NAME=directional_strategy_rsi.py -``` - -## Useful Docker Commands - -Use the commands below or use the Docker Desktop application to manage your containers: - -### Create the Compose project -``` -docker compose up -d -``` - -### Stop the Compose project -``` -docker compose down -``` - -### Update the Compose project for the latest images -``` -docker compose up --force-recreate --build -d -``` - -### Give all users read/write permissions to local files -``` -sudo chmod -R a+rw -``` - -### Attach to the container -``` -docker attach -``` - -### Detach from the container and return to command line - -Press keys Ctrl + P then Ctrl + Q - - -### Update the container to the latest image -``` -docker compose up --force-recreate --build -d -``` - -### List all containers -``` -docker ps -a -``` - -### Stop a container -``` -docker stop -``` - -### Remove a container -``` -docker rm -``` diff --git a/Makefile b/Makefile index 7b59ca52fa..d881e66a9d 100644 --- a/Makefile +++ b/Makefile @@ -11,13 +11,13 @@ test: coverage run -m nose \ - --exclude-dir="test/connector" \ - --exclude-dir="test/debug" \ --exclude-dir="test/mock" \ --exclude-dir="test/hummingbot/connector/gateway/amm" \ - --exclude-dir="test/hummingbot/connector/exchange/coinbase_pro" \ --exclude-dir="test/hummingbot/connector/exchange/hitbtc" \ + --exclude-dir="test/hummingbot/connector/exchange/coinbase_advance_trade" \ + --exclude-dir="test/hummingbot/connector/exchange/ndax" \ --exclude-dir="test/hummingbot/connector/exchange/foxbit" \ + --exclude-dir="test/hummingbot/connector/derivative/dydx_v4_perpetual" \ --exclude-dir="test/hummingbot/connector/gateway/clob_spot/data_sources/dexalot" \ --exclude-dir="test/hummingbot/strategy/amm_arb" \ --exclude-dir="test/hummingbot/core/gateway" \ diff --git a/README.md b/README.md index e12206e582..d4b95b428d 100644 --- a/README.md +++ b/README.md @@ -25,8 +25,8 @@ Help us **democratize high-frequency trading** and make powerful trading algorit * [Website and Docs](https://hummingbot.org): Official Hummingbot website and documentation * [Installation](https://hummingbot.org/installation/docker/): Install Hummingbot on various platforms * [FAQs](https://hummingbot.org/faq/): Answers to all your burning questions -* [Botcamp](https://hummingbot.org/botcamp/): Learn how build your own custom HFT strategy in Hummingbot with our hands-on bootcamp! -* [Newsletter](https://hummingbot.substack.com): Get our monthly newletter whenever we ship a new release +* [Botcamp](https://hummingbot.org/botcamp/): Learn how to build your own custom HFT strategy in Hummingbot with our hands-on bootcamp! +* [Newsletter](https://hummingbot.substack.com): Get our monthly newsletter whenever we ship a new release * [Discord](https://discord.gg/hummingbot): The main gathering spot for the global Hummingbot community * [YouTube](https://www.youtube.com/c/hummingbot): Videos that teach you how to get the most of of Hummingbot * [Twitter](https://twitter.com/_hummingbot): Get the latest announcements about Hummingbot diff --git a/assets/Pangolin-logo.png b/assets/Pangolin-logo.png deleted file mode 100644 index 0dd250a53e..0000000000 Binary files a/assets/Pangolin-logo.png and /dev/null differ diff --git a/assets/altmarkets_logo1.png b/assets/altmarkets_logo1.png deleted file mode 100644 index 316405774e..0000000000 Binary files a/assets/altmarkets_logo1.png and /dev/null differ diff --git a/assets/ascendex-logo.jpg b/assets/ascendex-logo.jpg deleted file mode 100644 index 56fc8c4a81..0000000000 Binary files a/assets/ascendex-logo.jpg and /dev/null differ diff --git a/assets/balancer-logo.jpg b/assets/balancer-logo.jpg deleted file mode 100644 index 64f3d52dac..0000000000 Binary files a/assets/balancer-logo.jpg and /dev/null differ diff --git a/assets/bamboorelay-logo.jpg b/assets/bamboorelay-logo.jpg deleted file mode 100644 index b78860ade0..0000000000 Binary files a/assets/bamboorelay-logo.jpg and /dev/null differ diff --git a/assets/beaxy-logo.png b/assets/beaxy-logo.png deleted file mode 100644 index 8c8c8c1551..0000000000 Binary files a/assets/beaxy-logo.png and /dev/null differ diff --git a/assets/binance-logo.jpg b/assets/binance-logo.jpg deleted file mode 100644 index f04b9a2dfd..0000000000 Binary files a/assets/binance-logo.jpg and /dev/null differ diff --git a/assets/binance_futures-logo.jpg b/assets/binance_futures-logo.jpg deleted file mode 100644 index 8d974a6759..0000000000 Binary files a/assets/binance_futures-logo.jpg and /dev/null differ diff --git a/assets/binance_us-logo.jpg b/assets/binance_us-logo.jpg deleted file mode 100644 index eb227786b2..0000000000 Binary files a/assets/binance_us-logo.jpg and /dev/null differ diff --git a/assets/bitfinex-logo.jpg b/assets/bitfinex-logo.jpg deleted file mode 100644 index c1b30b869d..0000000000 Binary files a/assets/bitfinex-logo.jpg and /dev/null differ diff --git a/assets/bitget-logo.png b/assets/bitget-logo.png deleted file mode 100644 index 396b6ead23..0000000000 Binary files a/assets/bitget-logo.png and /dev/null differ diff --git a/assets/bitmart-logo.jpg b/assets/bitmart-logo.jpg deleted file mode 100644 index ff235ef9af..0000000000 Binary files a/assets/bitmart-logo.jpg and /dev/null differ diff --git a/assets/bitmex-logo.png b/assets/bitmex-logo.png deleted file mode 100644 index b2f4271453..0000000000 Binary files a/assets/bitmex-logo.png and /dev/null differ diff --git a/assets/bittrex_global-logo.jpg b/assets/bittrex_global-logo.jpg deleted file mode 100644 index ba62878fe0..0000000000 Binary files a/assets/bittrex_global-logo.jpg and /dev/null differ diff --git a/assets/btcmarkets-logo.jpg b/assets/btcmarkets-logo.jpg deleted file mode 100644 index da37bcfde5..0000000000 Binary files a/assets/btcmarkets-logo.jpg and /dev/null differ diff --git a/assets/bybit-logo.jpg b/assets/bybit-logo.jpg deleted file mode 100644 index 3e0a834314..0000000000 Binary files a/assets/bybit-logo.jpg and /dev/null differ diff --git a/assets/celo-logo.jpg b/assets/celo-logo.jpg deleted file mode 100644 index 16a540ef4e..0000000000 Binary files a/assets/celo-logo.jpg and /dev/null differ diff --git a/assets/coinbase_pro-logo.jpg b/assets/coinbase_pro-logo.jpg deleted file mode 100644 index f825b86205..0000000000 Binary files a/assets/coinbase_pro-logo.jpg and /dev/null differ diff --git a/assets/coinzoom-logo.jpg b/assets/coinzoom-logo.jpg deleted file mode 100644 index 844fbeb46e..0000000000 Binary files a/assets/coinzoom-logo.jpg and /dev/null differ diff --git a/assets/cryptocom-logo.jpg b/assets/cryptocom-logo.jpg deleted file mode 100644 index 0ec498cea1..0000000000 Binary files a/assets/cryptocom-logo.jpg and /dev/null differ diff --git a/assets/digifinex-logo.jpg b/assets/digifinex-logo.jpg deleted file mode 100644 index 5f648f813e..0000000000 Binary files a/assets/digifinex-logo.jpg and /dev/null differ diff --git a/assets/dolomite-logo.jpg b/assets/dolomite-logo.jpg deleted file mode 100644 index 50aa522eaa..0000000000 Binary files a/assets/dolomite-logo.jpg and /dev/null differ diff --git a/assets/dydx-logo.jpg b/assets/dydx-logo.jpg deleted file mode 100644 index 89799e1104..0000000000 Binary files a/assets/dydx-logo.jpg and /dev/null differ diff --git a/assets/eve_exchange_logo.png b/assets/eve_exchange_logo.png deleted file mode 100644 index 0ce59f663a..0000000000 Binary files a/assets/eve_exchange_logo.png and /dev/null differ diff --git a/assets/foxbit-exchange-logo.png b/assets/foxbit-exchange-logo.png deleted file mode 100644 index 7edbfb592d..0000000000 Binary files a/assets/foxbit-exchange-logo.png and /dev/null differ diff --git a/assets/ftx-logo.jpg b/assets/ftx-logo.jpg deleted file mode 100644 index 63e62da6c1..0000000000 Binary files a/assets/ftx-logo.jpg and /dev/null differ diff --git a/assets/gate-io-logo.jpg b/assets/gate-io-logo.jpg deleted file mode 100644 index 943369779f..0000000000 Binary files a/assets/gate-io-logo.jpg and /dev/null differ diff --git a/assets/himalaya_exchange-logo.jpg b/assets/himalaya_exchange-logo.jpg deleted file mode 100644 index 136bbeaac5..0000000000 Binary files a/assets/himalaya_exchange-logo.jpg and /dev/null differ diff --git a/assets/hitbtc-logo.jpg b/assets/hitbtc-logo.jpg deleted file mode 100644 index 0d11076d4b..0000000000 Binary files a/assets/hitbtc-logo.jpg and /dev/null differ diff --git a/assets/huobi_global-logo.jpg b/assets/huobi_global-logo.jpg deleted file mode 100644 index 08c6a3653d..0000000000 Binary files a/assets/huobi_global-logo.jpg and /dev/null differ diff --git a/assets/injective.jpg b/assets/injective.jpg deleted file mode 100644 index 39512849d2..0000000000 Binary files a/assets/injective.jpg and /dev/null differ diff --git a/assets/kraken-logo.jpg b/assets/kraken-logo.jpg deleted file mode 100644 index 1a9a89c0ca..0000000000 Binary files a/assets/kraken-logo.jpg and /dev/null differ diff --git a/assets/kucoin-logo.jpg b/assets/kucoin-logo.jpg deleted file mode 100644 index 3c48810605..0000000000 Binary files a/assets/kucoin-logo.jpg and /dev/null differ diff --git a/assets/latoken-logo.png b/assets/latoken-logo.png deleted file mode 100644 index c20723f000..0000000000 Binary files a/assets/latoken-logo.png and /dev/null differ diff --git a/assets/lbank.jpg b/assets/lbank.jpg deleted file mode 100644 index 15e6d257c3..0000000000 Binary files a/assets/lbank.jpg and /dev/null differ diff --git a/assets/lbank.png b/assets/lbank.png deleted file mode 100644 index bee10124e2..0000000000 Binary files a/assets/lbank.png and /dev/null differ diff --git a/assets/liquid-logo.jpg b/assets/liquid-logo.jpg deleted file mode 100644 index 01c22e7029..0000000000 Binary files a/assets/liquid-logo.jpg and /dev/null differ diff --git a/assets/loopring-logo.jpg b/assets/loopring-logo.jpg deleted file mode 100644 index aa4a103df6..0000000000 Binary files a/assets/loopring-logo.jpg and /dev/null differ diff --git a/assets/mexc.jpg b/assets/mexc.jpg deleted file mode 100644 index 36d0d003c1..0000000000 Binary files a/assets/mexc.jpg and /dev/null differ diff --git a/assets/mm-finance-logo.png b/assets/mm-finance-logo.png deleted file mode 100644 index 6928824bd1..0000000000 Binary files a/assets/mm-finance-logo.png and /dev/null differ diff --git a/assets/ndax-logo.jpg b/assets/ndax-logo.jpg deleted file mode 100644 index 39e4ba58fe..0000000000 Binary files a/assets/ndax-logo.jpg and /dev/null differ diff --git a/assets/okex-logo.jpg b/assets/okex-logo.jpg deleted file mode 100644 index f4635bc982..0000000000 Binary files a/assets/okex-logo.jpg and /dev/null differ diff --git a/assets/pancakeswap-logo.png b/assets/pancakeswap-logo.png deleted file mode 100644 index a17db0f5fd..0000000000 Binary files a/assets/pancakeswap-logo.png and /dev/null differ diff --git a/assets/perpetual_protocol-logo.jpg b/assets/perpetual_protocol-logo.jpg deleted file mode 100644 index 8e3d4113a0..0000000000 Binary files a/assets/perpetual_protocol-logo.jpg and /dev/null differ diff --git a/assets/probit-logo.jpg b/assets/probit-logo.jpg deleted file mode 100644 index 5110d662ca..0000000000 Binary files a/assets/probit-logo.jpg and /dev/null differ diff --git a/assets/probit_kr-logo.jpg b/assets/probit_kr-logo.jpg deleted file mode 100644 index 482fc0f504..0000000000 Binary files a/assets/probit_kr-logo.jpg and /dev/null differ diff --git a/assets/quickswap-logo.png b/assets/quickswap-logo.png deleted file mode 100644 index cfa280dd94..0000000000 Binary files a/assets/quickswap-logo.png and /dev/null differ diff --git a/assets/radar_logo.png b/assets/radar_logo.png deleted file mode 100644 index 0cd4880955..0000000000 Binary files a/assets/radar_logo.png and /dev/null differ diff --git a/assets/ref-finance-logo.png b/assets/ref-finance-logo.png deleted file mode 100644 index fbf1269851..0000000000 Binary files a/assets/ref-finance-logo.png and /dev/null differ diff --git a/assets/serum-logo.jpg b/assets/serum-logo.jpg deleted file mode 100644 index cf1a808b75..0000000000 Binary files a/assets/serum-logo.jpg and /dev/null differ diff --git a/assets/sushiswap-logo.jpg b/assets/sushiswap-logo.jpg deleted file mode 100644 index 75d7c25aa7..0000000000 Binary files a/assets/sushiswap-logo.jpg and /dev/null differ diff --git a/assets/terra-logo.jpg b/assets/terra-logo.jpg deleted file mode 100644 index d9818a6492..0000000000 Binary files a/assets/terra-logo.jpg and /dev/null differ diff --git a/assets/traderjoe-logo.png b/assets/traderjoe-logo.png deleted file mode 100644 index 3fbaa12355..0000000000 Binary files a/assets/traderjoe-logo.png and /dev/null differ diff --git a/assets/uniswap-logo.jpg b/assets/uniswap-logo.jpg deleted file mode 100644 index 9b0a1bd040..0000000000 Binary files a/assets/uniswap-logo.jpg and /dev/null differ diff --git a/assets/uniswap_v3-logo.jpg b/assets/uniswap_v3-logo.jpg deleted file mode 100644 index d0d1582229..0000000000 Binary files a/assets/uniswap_v3-logo.jpg and /dev/null differ diff --git a/assets/vvs-finance-logo.png b/assets/vvs-finance-logo.png deleted file mode 100644 index 8f85bfbb19..0000000000 Binary files a/assets/vvs-finance-logo.png and /dev/null differ diff --git a/assets/wazirX-logo.jpg b/assets/wazirX-logo.jpg deleted file mode 100644 index e74c8e317f..0000000000 Binary files a/assets/wazirX-logo.jpg and /dev/null differ diff --git a/assets/white-bit.png b/assets/white-bit.png deleted file mode 100644 index fa3c90594f..0000000000 Binary files a/assets/white-bit.png and /dev/null differ diff --git a/conf/__init__.py b/conf/__init__.py index 806c15d69b..37d1e18d9c 100644 --- a/conf/__init__.py +++ b/conf/__init__.py @@ -52,11 +52,6 @@ coinbase_advanced_trade_api_key = os.getenv("COINBASE_ADVANCED_TRADE_API_KEY") coinbase_advanced_trade_secret_key = os.getenv("COINBASE_ADVANCED_TRADE_SECRET_KEY") -# Coinbase Pro Tests -coinbase_pro_api_key = os.getenv("COINBASE_PRO_API_KEY") -coinbase_pro_secret_key = os.getenv("COINBASE_PRO_SECRET_KEY") -coinbase_pro_passphrase = os.getenv("COINBASE_PRO_PASSPHRASE") - # Htx Tests htx_api_key = os.getenv("HTX_API_KEY") diff --git a/controllers/market_making/pmm_dynamic.py b/controllers/market_making/pmm_dynamic.py index 9d0273306b..9592ab237c 100644 --- a/controllers/market_making/pmm_dynamic.py +++ b/controllers/market_making/pmm_dynamic.py @@ -87,7 +87,7 @@ class PMMDynamicController(MarketMakingControllerBase): """ def __init__(self, config: PMMDynamicControllerConfig, *args, **kwargs): self.config = config - self.max_records = max(config.macd_slow, config.macd_fast, config.macd_signal, config.natr_length) + 10 + self.max_records = max(config.macd_slow, config.macd_fast, config.macd_signal, config.natr_length) + 100 if len(self.config.candles_config) == 0: self.config.candles_config = [CandlesConfig( connector=config.candles_connector, diff --git a/hooks/README.md b/hooks/README.md deleted file mode 100644 index 3f4e8340dc..0000000000 --- a/hooks/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Docker Autobuild Hooks - -This folder containers hooks for docker autobuild. - -[Hummingbot builds](https://hub.docker.com/r/coinalpha/hummingbot/builds) \ No newline at end of file diff --git a/hooks/build b/hooks/build deleted file mode 100644 index d31653c7ee..0000000000 --- a/hooks/build +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env sh - -# Get the build time stamp -BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") - -docker build --build-arg COMMIT=$GIT_TAG --build-arg BRANCH=$SOURCE_BRANCH --build-arg BUILD_DATE=$BUILD_DATE -t $IMAGE_NAME -f Dockerfile . \ No newline at end of file diff --git a/hummingbot/VERSION b/hummingbot/VERSION index 227cea2156..38f77a65b3 100644 --- a/hummingbot/VERSION +++ b/hummingbot/VERSION @@ -1 +1 @@ -2.0.0 +2.0.1 diff --git a/hummingbot/client/command/__init__.py b/hummingbot/client/command/__init__.py index 6c4d91b624..cb67f6beaa 100644 --- a/hummingbot/client/command/__init__.py +++ b/hummingbot/client/command/__init__.py @@ -10,7 +10,6 @@ from .import_command import ImportCommand from .mqtt_command import MQTTCommand from .order_book_command import OrderBookCommand -from .pmm_script_command import PMMScriptCommand from .previous_strategy_command import PreviousCommand from .rate_command import RateCommand from .silly_commands import SillyCommands @@ -31,7 +30,6 @@ HistoryCommand, ImportCommand, OrderBookCommand, - PMMScriptCommand, PreviousCommand, RateCommand, SillyCommands, diff --git a/hummingbot/client/command/config_command.py b/hummingbot/client/command/config_command.py index e86926d985..9b983490fb 100644 --- a/hummingbot/client/command/config_command.py +++ b/hummingbot/client/command/config_command.py @@ -67,8 +67,6 @@ "mqtt_autostart", "instance_id", "send_error_logs", - "pmm_script_mode", - "pmm_script_file_path", "ethereum_chain_name", "gateway", "gateway_api_host", diff --git a/hummingbot/client/command/pmm_script_command.py b/hummingbot/client/command/pmm_script_command.py deleted file mode 100644 index bbbc6e0770..0000000000 --- a/hummingbot/client/command/pmm_script_command.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import List - - -class PMMScriptCommand: - - def pmm_script_command(self, cmd: str = None, args: List[str] = None): - if self._pmm_script_iterator is not None: - self._pmm_script_iterator.request_command(cmd, args) - else: - self.notify('No PMM script is active, command ignored') - - return True diff --git a/hummingbot/client/command/start_command.py b/hummingbot/client/command/start_command.py index 52e085d835..310159ef33 100644 --- a/hummingbot/client/command/start_command.py +++ b/hummingbot/client/command/start_command.py @@ -255,15 +255,6 @@ async def start_market_making(self, # type: HummingbotApplication await market.cancel_all(10.0) if self.strategy: self.clock.add_iterator(self.strategy) - try: - self._pmm_script_iterator = self.client_config_map.pmm_script_mode.get_iterator( - self.strategy_name, list(self.markets.values()), self.strategy - ) - except ValueError as e: - self.notify(f"Error: {e}") - if self._pmm_script_iterator is not None: - self.clock.add_iterator(self._pmm_script_iterator) - self.notify(f"PMM script ({self.client_config_map.pmm_script_mode.pmm_script_file_path}) started.") self.strategy_task: asyncio.Task = safe_ensure_future(self._run_clock(), loop=self.ev_loop) self.notify(f"\n'{self.strategy_name}' strategy started.\n" f"Run `status` command to query the progress.") diff --git a/hummingbot/client/command/status_command.py b/hummingbot/client/command/status_command.py index 9710dde61e..36d670e347 100644 --- a/hummingbot/client/command/status_command.py +++ b/hummingbot/client/command/status_command.py @@ -77,8 +77,6 @@ async def strategy_status(self, live: bool = False): else: st_status = self.strategy.format_status() status = paper_trade + "\n" + st_status - if self._pmm_script_iterator is not None and live is False: - self._pmm_script_iterator.request_status() return status def application_warning(self): diff --git a/hummingbot/client/command/stop_command.py b/hummingbot/client/command/stop_command.py index 4585ef167b..6406481974 100644 --- a/hummingbot/client/command/stop_command.py +++ b/hummingbot/client/command/stop_command.py @@ -29,11 +29,8 @@ async def stop_loop(self, # type: HummingbotApplication import appnope appnope.nap() - if self._pmm_script_iterator is not None: - self._pmm_script_iterator.stop(self.clock) - if isinstance(self.strategy, ScriptStrategyBase): - self.strategy.on_stop() + await self.strategy.on_stop() if self._trading_required and not skip_order_cancellation: # Remove the strategy from clock before cancelling orders, to diff --git a/hummingbot/client/config/client_config_map.py b/hummingbot/client/config/client_config_map.py index b8782ac8a9..bd97bacc70 100644 --- a/hummingbot/client/config/client_config_map.py +++ b/hummingbot/client/config/client_config_map.py @@ -1,10 +1,8 @@ import json -import os.path import random import re from abc import ABC, abstractmethod from decimal import Decimal -from os.path import dirname from pathlib import Path from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Union @@ -14,12 +12,7 @@ from hummingbot.client.config.config_data_types import BaseClientModel, ClientConfigEnum, ClientFieldData from hummingbot.client.config.config_methods import using_exchange as using_exchange_pointer from hummingbot.client.config.config_validators import validate_bool, validate_float -from hummingbot.client.settings import ( - DEFAULT_GATEWAY_CERTS_PATH, - DEFAULT_LOG_FILE_PATH, - PMM_SCRIPTS_PATH, - AllConnectorSettings, -) +from hummingbot.client.settings import DEFAULT_GATEWAY_CERTS_PATH, DEFAULT_LOG_FILE_PATH, AllConnectorSettings from hummingbot.connector.connector_base import ConnectorBase from hummingbot.connector.connector_metrics_collector import ( DummyMetricsCollector, @@ -30,13 +23,10 @@ from hummingbot.connector.exchange.gate_io.gate_io_utils import GateIOConfigMap from hummingbot.connector.exchange.kraken.kraken_utils import KrakenConfigMap from hummingbot.connector.exchange.kucoin.kucoin_utils import KuCoinConfigMap -from hummingbot.connector.exchange_base import ExchangeBase from hummingbot.core.rate_oracle.rate_oracle import RATE_ORACLE_SOURCES, RateOracle from hummingbot.core.rate_oracle.sources.rate_source_base import RateSourceBase from hummingbot.core.utils.kill_switch import ActiveKillSwitch, KillSwitch, PassThroughKillSwitch from hummingbot.notifier.telegram_notifier import TelegramNotifier -from hummingbot.pmm_script.pmm_script_iterator import PMMScriptIterator -from hummingbot.strategy.strategy_base import StrategyBase if TYPE_CHECKING: from hummingbot.client.hummingbot_application import HummingbotApplication @@ -509,76 +499,6 @@ def validate_db_engine(cls, v: str): } -class PMMScriptMode(BaseClientModel, ABC): - @abstractmethod - def get_iterator( - self, - strategy_name: str, - markets: List[ExchangeBase], - strategy: StrategyBase) -> Optional[PMMScriptIterator]: - ... - - -class PMMScriptDisabledMode(PMMScriptMode): - class Config: - title = "pmm_script_disabled" - - def get_iterator( - self, - strategy_name: str, - markets: List[ExchangeBase], - strategy: StrategyBase) -> Optional[PMMScriptIterator]: - return None - - -class PMMScriptEnabledMode(PMMScriptMode): - pmm_script_file_path: str = Field( - default=..., - client_data=ClientFieldData(prompt=lambda cm: "Enter path to your PMM script file"), - ) - - class Config: - title = "pmm_script_enabled" - - def get_iterator( - self, - strategy_name: str, - markets: List[ExchangeBase], - strategy: StrategyBase) -> Optional[PMMScriptIterator]: - if strategy_name != "pure_market_making": - raise ValueError("PMM script feature is only available for pure_market_making strategy.") - folder = dirname(self.pmm_script_file_path) - pmm_script_file = ( - PMM_SCRIPTS_PATH / self.pmm_script_file_path - if folder == "" - else self.pmm_script_file_path - ) - pmm_script_iterator = PMMScriptIterator( - pmm_script_file, - markets, - strategy, - queue_check_interval=0.1, - ) - return pmm_script_iterator - - @validator("pmm_script_file_path", pre=True) - def validate_pmm_script_file_path(cls, v: str): - import hummingbot.client.settings as settings - file_path = v - path, name = os.path.split(file_path) - if path == "": - file_path = os.path.join(settings.PMM_SCRIPTS_PATH, file_path) - if not os.path.isfile(file_path): - raise ValueError(f"{file_path} file does not exist.") - return file_path - - -PMM_SCRIPT_MODES = { - PMMScriptDisabledMode.Config.title: PMMScriptDisabledMode, - PMMScriptEnabledMode.Config.title: PMMScriptEnabledMode, -} - - class GatewayConfigMap(BaseClientModel): gateway_api_host: str = Field( default="localhost", @@ -1034,12 +954,6 @@ class ClientConfigMap(BaseClientModel): prompt=lambda cm: f"Select the desired db mode ({'/'.join(list(DB_MODES.keys()))})", ), ) - pmm_script_mode: Union[tuple(PMM_SCRIPT_MODES.values())] = Field( - default=PMMScriptDisabledMode(), - client_data=ClientFieldData( - prompt=lambda cm: f"Select the desired PMM script mode ({'/'.join(list(PMM_SCRIPT_MODES.keys()))})", - ), - ) balance_asset_limit: Dict[str, Dict[str, Decimal]] = Field( default={exchange: {} for exchange in AllConnectorSettings.get_exchange_names()}, description=("Balance Limit Configurations" @@ -1207,18 +1121,6 @@ def validate_db_mode(cls, v: Union[(str, Dict) + tuple(DB_MODES.values())]): sub_model = DB_MODES[v].construct() return sub_model - @validator("pmm_script_mode", pre=True) - def validate_pmm_script_mode(cls, v: Union[(str, Dict) + tuple(PMM_SCRIPT_MODES.values())]): - if isinstance(v, tuple(PMM_SCRIPT_MODES.values()) + (Dict,)): - sub_model = v - elif v not in PMM_SCRIPT_MODES: - raise ValueError( - f"Invalid PMM script mode, please choose a value from {list(PMM_SCRIPT_MODES.keys())}." - ) - else: - sub_model = PMM_SCRIPT_MODES[v].construct() - return sub_model - @validator("anonymized_metrics_mode", pre=True) def validate_anonymized_metrics_mode(cls, v: Union[(str, Dict) + tuple(METRICS_MODES.values())]): if isinstance(v, tuple(METRICS_MODES.values()) + (Dict,)): diff --git a/hummingbot/client/config/conf_migration.py b/hummingbot/client/config/conf_migration.py index 8efeba3386..5f7390576a 100644 --- a/hummingbot/client/config/conf_migration.py +++ b/hummingbot/client/config/conf_migration.py @@ -18,8 +18,6 @@ DBSqliteMode, KillSwitchDisabledMode, KillSwitchEnabledMode, - PMMScriptDisabledMode, - PMMScriptEnabledMode, TelegramDisabledMode, TelegramEnabledMode, ) @@ -163,16 +161,6 @@ def _migrate_global_config_modes(client_config_map: ClientConfigAdapter, data: D db_name=db_name, ) - pmm_script_enabled = data.pop("pmm_script_enabled") - pmm_script_file_path = data.pop("pmm_script_file_path") - if pmm_script_enabled: - client_config_map.pmm_script_mode = PMMScriptEnabledMode(pmm_script_file_path=pmm_script_file_path) - else: - client_config_map.pmm_script_mode = PMMScriptDisabledMode() - - _migrate_global_config_field( - client_config_map.gateway, data, "gateway_api_host" - ) _migrate_global_config_field( client_config_map.gateway, data, "gateway_api_port" ) diff --git a/hummingbot/client/settings.py b/hummingbot/client/settings.py index 35afe487f6..8da7e3e683 100644 --- a/hummingbot/client/settings.py +++ b/hummingbot/client/settings.py @@ -43,7 +43,6 @@ CONTROLLERS_CONF_DIR_PATH = CONF_DIR_PATH / "controllers" CONF_PREFIX = "conf_" CONF_POSTFIX = "_strategy" -PMM_SCRIPTS_PATH = root_path() / "pmm_scripts" SCRIPT_STRATEGIES_MODULE = "scripts" SCRIPT_STRATEGIES_PATH = root_path() / SCRIPT_STRATEGIES_MODULE CONTROLLERS_MODULE = "controllers" @@ -370,6 +369,7 @@ def create_connector_settings(cls): """ cls.all_connector_settings = {} # reset connector_exceptions = ["mock_paper_exchange", "mock_pure_python_paper_exchange", "paper_trade"] + # connector_exceptions = ["mock_paper_exchange", "mock_pure_python_paper_exchange", "paper_trade", "injective_v2", "injective_v2_perpetual"] type_dirs: List[DirEntry] = [ cast(DirEntry, f) for f in scandir(f"{root_path() / 'hummingbot' / 'connector'}") diff --git a/hummingbot/client/ui/completer.py b/hummingbot/client/ui/completer.py index 83c922ab29..d5fea82a88 100644 --- a/hummingbot/client/ui/completer.py +++ b/hummingbot/client/ui/completer.py @@ -15,7 +15,6 @@ from hummingbot.client.config.config_data_types import BaseClientModel from hummingbot.client.settings import ( GATEWAY_CONNECTORS, - PMM_SCRIPTS_PATH, SCRIPT_STRATEGIES_PATH, SCRIPT_STRATEGY_CONF_DIR_PATH, STRATEGIES, @@ -93,7 +92,6 @@ def __init__(self, hummingbot_application): ) self._gateway_config_completer = WordCompleter(hummingbot_application.gateway_config_keys, ignore_case=True) self._strategy_completer = WordCompleter(STRATEGIES, ignore_case=True) - self._py_file_completer = WordCompleter(file_name_list(str(PMM_SCRIPTS_PATH), "py")) self._script_strategy_completer = WordCompleter(file_name_list(str(SCRIPT_STRATEGIES_PATH), "py")) self._scripts_config_completer = WordCompleter(file_name_list(str(SCRIPT_STRATEGY_CONF_DIR_PATH), "yml")) self._strategy_v2_create_config_completer = self.get_strategies_v2_with_config() diff --git a/hummingbot/client/ui/parser.py b/hummingbot/client/ui/parser.py index e2514078ed..c3fe2710c4 100644 --- a/hummingbot/client/ui/parser.py +++ b/hummingbot/client/ui/parser.py @@ -142,11 +142,6 @@ def load_parser(hummingbot: "HummingbotApplication", command_tabs) -> [ThrowingA ticker_parser.add_argument("--market", type=str, dest="market", help="The market (trading pair) of the order book") ticker_parser.set_defaults(func=hummingbot.ticker) - pmm_script_parser = subparsers.add_parser("pmm_script", help="Send command to running PMM script instance") - pmm_script_parser.add_argument("cmd", nargs="?", default=None, help="Command") - pmm_script_parser.add_argument("args", nargs="*", default=None, help="Arguments") - pmm_script_parser.set_defaults(func=hummingbot.pmm_script_command) - previous_strategy_parser = subparsers.add_parser("previous", help="Imports the last strategy used") previous_strategy_parser.add_argument("option", nargs="?", choices=["Yes,No"], default=None) previous_strategy_parser.set_defaults(func=hummingbot.previous_strategy) diff --git a/hummingbot/connector/client_order_tracker.py b/hummingbot/connector/client_order_tracker.py index 26ea75792f..7121ef745b 100644 --- a/hummingbot/connector/client_order_tracker.py +++ b/hummingbot/connector/client_order_tracker.py @@ -407,8 +407,8 @@ def _trigger_order_fills(self, if prev_executed_amount_base < tracked_order.executed_amount_base: self.logger().info( f"The {tracked_order.trade_type.name.upper()} order {tracked_order.client_order_id} " - f"amounting to {tracked_order.executed_amount_base}/{tracked_order.amount} " - f"{tracked_order.base_asset} has been filled." + f"amounting to {tracked_order.executed_amount_base}/{tracked_order.amount} {tracked_order.base_asset} " + f"has been filled at {fill_price} {tracked_order.quote_asset}." ) self._trigger_filled_event( order=tracked_order, diff --git a/hummingbot/connector/exchange/coinbase_pro/__init__.py b/hummingbot/connector/derivative/dydx_v4_perpetual/__init__.py similarity index 100% rename from hummingbot/connector/exchange/coinbase_pro/__init__.py rename to hummingbot/connector/derivative/dydx_v4_perpetual/__init__.py diff --git a/hummingbot/pmm_script/__init__.py b/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/__init__.py similarity index 100% rename from hummingbot/pmm_script/__init__.py rename to hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/__init__.py diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/dydx_v4_data_source.py b/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/dydx_v4_data_source.py new file mode 100644 index 0000000000..645b36ad62 --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/dydx_v4_data_source.py @@ -0,0 +1,297 @@ +from asyncio import Lock +from datetime import datetime, timedelta +from typing import Optional, Tuple + +import certifi +import grpc +from google.protobuf import json_format, message as _message +from v4_proto.cosmos.auth.v1beta1.auth_pb2 import BaseAccount +from v4_proto.cosmos.auth.v1beta1.query_pb2 import QueryAccountRequest +from v4_proto.cosmos.auth.v1beta1.query_pb2_grpc import QueryStub as AuthGrpcClient +from v4_proto.cosmos.bank.v1beta1 import query_pb2_grpc as bank_query_grpc +from v4_proto.cosmos.base.tendermint.v1beta1 import ( + query_pb2 as tendermint_query, + query_pb2_grpc as tendermint_query_grpc, +) +from v4_proto.cosmos.tx.v1beta1.service_pb2 import BroadcastMode, BroadcastTxRequest +from v4_proto.cosmos.tx.v1beta1.service_pb2_grpc import ServiceStub as TxGrpcClient +from v4_proto.dydxprotocol.clob.order_pb2 import Order, OrderId +from v4_proto.dydxprotocol.clob.tx_pb2 import MsgCancelOrder, MsgPlaceOrder +from v4_proto.dydxprotocol.subaccounts.subaccount_pb2 import SubaccountId + +from hummingbot.connector.derivative.dydx_v4_perpetual import dydx_v4_perpetual_constants as CONSTANTS +from hummingbot.connector.derivative.dydx_v4_perpetual.data_sources.keypairs import PrivateKey +from hummingbot.connector.derivative.dydx_v4_perpetual.data_sources.tx import SigningCfg, Transaction + + +class DydxPerpetualV4Client: + + def __init__( + self, + secret_phrase: str, + dydx_v4_chain_address: str, + connector, + subaccount_num=0, + ): + self._private_key = PrivateKey.from_mnemonic(secret_phrase) + self._dydx_v4_chain_address = dydx_v4_chain_address + self._connector = connector + self._subaccount_num = subaccount_num + self.transaction_lock = Lock() + self.number = 0 + self.sequence = 0 + self._is_trading_account_initialized = False + + with open(certifi.where(), "rb") as f: + trusted_certs = f.read() + credentials = grpc.ssl_channel_credentials( + root_certificates=trusted_certs + ) + + host_and_port = CONSTANTS.DYDX_V4_AERIAL_CONFIG_URL + grpc_client = ( + grpc.aio.secure_channel(host_and_port, credentials) + if credentials is not None else grpc.aio.insecure_channel(host_and_port) + ) + query_grpc_client = ( + grpc.aio.secure_channel(CONSTANTS.DYDX_V4_QUERY_AERIAL_CONFIG_URL, credentials) + if credentials is not None else grpc.aio.insecure_channel(host_and_port) + ) + self.stubBank = bank_query_grpc.QueryStub(grpc_client) + self.auth_client = AuthGrpcClient(query_grpc_client) + self.txs = TxGrpcClient(grpc_client) + self.stubCosmosTendermint = tendermint_query_grpc.ServiceStub( + grpc_client + ) + + @staticmethod + def calculate_quantums( + size: float, + atomic_resolution: int, + step_base_quantums: int, + ): + raw_quantums = size * 10 ** (-1 * atomic_resolution) + return int(max(raw_quantums, step_base_quantums)) + + @staticmethod + def calculate_subticks( + price: float, + atomic_resolution: int, + quantum_conversion_exponent: int, + subticks_per_tick: int + ): + exponent = atomic_resolution - quantum_conversion_exponent - CONSTANTS.QUOTE_QUANTUMS_ATOMIC_RESOLUTION + raw_subticks = price * 10 ** (exponent) + return int(max(raw_subticks, subticks_per_tick)) + + def calculate_good_til_block_time(self, good_til_time_in_seconds: int) -> int: + now = datetime.now() + interval = timedelta(seconds=good_til_time_in_seconds) + future = now + interval + return int(future.timestamp()) + + def get_sequence(self): + current_seq = self.sequence + self.sequence += 1 + return current_seq + + def get_number(self): + return self.number + + async def trading_account_sequence(self) -> int: + if not self._is_trading_account_initialized: + await self.initialize_trading_account() + return self.get_sequence() + + async def trading_account_number(self) -> int: + if not self._is_trading_account_initialized: + await self.initialize_trading_account() + return self.get_number() + + async def initialize_trading_account(self): + await self.query_account() + self._is_trading_account_initialized = True + + def generate_good_til_fields( + self, + order_flags: int, + good_til_block: int, + good_til_time_in_seconds: int, + ) -> Tuple[int, int]: + if order_flags == CONSTANTS.ORDER_FLAGS_LONG_TERM: + return 0, self.calculate_good_til_block_time(good_til_time_in_seconds) + else: + return good_til_block, 0 + + async def latest_block(self) -> tendermint_query.GetLatestBlockResponse: + ''' + Get lastest block + + :returns: Response, containing block information + + ''' + return await self.stubCosmosTendermint.GetLatestBlock( + tendermint_query.GetLatestBlockRequest() + ) + + async def send_message( + self, + msg: _message.Message, + ): + tx = Transaction() + tx.add_message(msg) + return await self.prepare_and_broadcast_basic_transaction( + tx=tx, + memo=None, + ) + + async def cancel_order( + self, + client_id: int, + clob_pair_id: int, + order_flags: int, + good_til_block_time: int, + ): + + subaccount_id = SubaccountId(owner=self._dydx_v4_chain_address, number=self._subaccount_num) + order_id = OrderId( + subaccount_id=subaccount_id, + client_id=client_id, + order_flags=order_flags, + clob_pair_id=int(clob_pair_id) + ) + msg = MsgCancelOrder( + order_id=order_id, + good_til_block_time=good_til_block_time + ) + result = await self.send_message(msg) + return result + + async def place_order( + self, + market, + type, + side, + price, + size, + client_id: int, + post_only: bool, + reduce_only: bool = False, + good_til_time_in_seconds: int = 6000, + ): + + clob_pair_id = self._connector._margin_fractions[market]["clob_pair_id"] + atomic_resolution = self._connector._margin_fractions[market]["atomicResolution"] + step_base_quantums = self._connector._margin_fractions[market]["stepBaseQuantums"] + quantum_conversion_exponent = self._connector._margin_fractions[market]["quantumConversionExponent"] + subticks_per_tick = self._connector._margin_fractions[market]["subticksPerTick"] + + order_side = Order.SIDE_BUY if side == "BUY" else Order.SIDE_SELL + quantums = self.calculate_quantums(size, atomic_resolution, step_base_quantums) + subticks = self.calculate_subticks(price, atomic_resolution, quantum_conversion_exponent, subticks_per_tick) + order_flags = CONSTANTS.ORDER_FLAGS_SHORT_TERM if type == "MARKET" else CONSTANTS.ORDER_FLAGS_LONG_TERM + + if type == "MARKET": + time_in_force = CONSTANTS.TIME_IN_FORCE_IOC + latest_block_result = await self.latest_block() + good_til_block = latest_block_result.block.header.height + 1 + 10 + else: + good_til_block = 0 + if post_only: + time_in_force = CONSTANTS.TIME_IN_FORCE_POST_ONLY + else: + time_in_force = CONSTANTS.TIME_IN_FORCE_UNSPECIFIED + + good_til_block, good_til_block_time = self.generate_good_til_fields( + order_flags, + good_til_block, + good_til_time_in_seconds, + ) + client_metadata = 1 if type == "MARKET" else 0 + condition_type = Order.CONDITION_TYPE_UNSPECIFIED + conditional_order_trigger_subticks = 0 + + subaccount_id = SubaccountId(owner=self._dydx_v4_chain_address, number=self._subaccount_num) + + order_id = OrderId( + subaccount_id=subaccount_id, + client_id=client_id, + order_flags=order_flags, + clob_pair_id=int(clob_pair_id) + ) + order = Order( + order_id=order_id, + side=order_side, + quantums=quantums, + subticks=subticks, + good_til_block=good_til_block, + time_in_force=time_in_force, + reduce_only=reduce_only, + client_metadata=client_metadata, + condition_type=condition_type, + conditional_order_trigger_subticks=conditional_order_trigger_subticks, + ) if (good_til_block != 0) else Order( + order_id=order_id, + side=order_side, + quantums=quantums, + subticks=subticks, + good_til_block_time=good_til_block_time, + time_in_force=time_in_force, + reduce_only=reduce_only, + client_metadata=client_metadata, + condition_type=condition_type, + conditional_order_trigger_subticks=conditional_order_trigger_subticks, + ) + msg = MsgPlaceOrder(order=order) + return await self.send_message(msg=msg) + + async def query_account(self): + request = QueryAccountRequest(address=self._dydx_v4_chain_address) + response = await self.auth_client.Account(request) + + account = BaseAccount() + if not response.account.Is(BaseAccount.DESCRIPTOR): + raise RuntimeError("Unexpected account type returned from query") + response.account.Unpack(account) + self.sequence = account.sequence + self.number = account.account_number + return account.sequence, account.account_number + + async def prepare_and_broadcast_basic_transaction( + self, + tx: "Transaction", # type: ignore # noqa: F821 + memo: Optional[str] = None, + ): + async with self.transaction_lock: + # query the account information for the sender + sequence = await self.trading_account_sequence() + number = await self.trading_account_number() + # finally, build the final transaction that will be executed with the correct gas and fee values + tx.seal( + SigningCfg.direct(self._private_key, sequence), + fee=f"{CONSTANTS.TX_FEE}{CONSTANTS.FEE_DENOMINATION}", + gas_limit=CONSTANTS.TX_GAS_LIMIT, + memo=memo, + ) + tx.sign(self._private_key, CONSTANTS.CHAIN_ID, number) + tx.complete() + + broadcast_req = BroadcastTxRequest( + tx_bytes=tx.tx.SerializeToString(), mode=BroadcastMode.BROADCAST_MODE_SYNC + ) + result = await self.send_tx_sync_mode(broadcast_req) + err_msg = result.get("raw_log", "") + if CONSTANTS.ACCOUNT_SEQUENCE_MISMATCH_ERROR in err_msg: + await self.initialize_trading_account() + + return result + + async def send_tx_sync_mode(self, broadcast_req): + resp = await self.txs.BroadcastTx(broadcast_req) + result = json_format.MessageToDict( + message=resp, + always_print_fields_with_no_presence=True, + preserving_proto_field_name=True, + use_integers_for_enums=True, + ).get("tx_response", {}) + return result diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/keypairs.py b/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/keypairs.py new file mode 100644 index 0000000000..d637a05519 --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/keypairs.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2023 dYdX Trading Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""Interface for a Signer.""" + +import base64 +import hashlib +from typing import Callable, Optional, Union + +import ecdsa +from bip_utils import Bip39SeedGenerator, Bip44, Bip44Coins # type: ignore +from ecdsa.curves import Curve +from ecdsa.util import sigencode_string, sigencode_string_canonize + +# """Crypto KeyPairs (Public Key and Private Key).""" + + +def _base64_decode(value: str) -> bytes: + try: + return base64.b64decode(value) + except Exception as error: + raise RuntimeError("Unable to parse base64 value") from error + + +class PublicKey: + """Public key class.""" + + curve: Curve = ecdsa.SECP256k1 + hash_function: Callable = hashlib.sha256 + + def __init__(self, public_key: Union[bytes, "PublicKey", ecdsa.VerifyingKey]): + """Initialize. + + :param public_key: butes, public key or ecdsa verifying key instance + :raises RuntimeError: Invalid public key + """ + if isinstance(public_key, bytes): + self._verifying_key = ecdsa.VerifyingKey.from_string( + public_key, curve=self.curve, hashfunc=self.hash_function + ) + elif isinstance(public_key, PublicKey): + self._verifying_key = public_key._verifying_key + elif isinstance(public_key, ecdsa.VerifyingKey): + self._verifying_key = public_key + else: + raise RuntimeError("Invalid public key type") # noqa + + self._public_key_bytes: bytes = self._verifying_key.to_string("compressed") + self._public_key: str = base64.b64encode(self._public_key_bytes).decode() + + @property + def public_key(self) -> str: + """ + Get public key. + + :return: str public key. + """ + return self._public_key + + @property + def public_key_hex(self) -> str: + """ + Get public key hex. + + :return: str public key hex. + """ + return self.public_key_bytes.hex() + + @property + def public_key_bytes(self) -> bytes: + """ + Get bytes public key. + + :return: bytes public key. + """ + return self._public_key_bytes + + def verify(self, message: bytes, signature: bytes) -> bool: + """ + Verify message and signature. + + :param message: bytes message content. + :param signature: bytes signature. + :return: bool is message and signature valid. + """ + success: bool = False + + try: + success = self._verifying_key.verify(signature, message) + + except ecdsa.keys.BadSignatureError: + ... + + return success + + def verify_digest(self, digest: bytes, signature: bytes) -> bool: + """ + Verify digest. + + :param digest: bytes digest. + :param signature: bytes signature. + :return: bool is digest valid. + """ + success: bool = False + + try: + success = self._verifying_key.verify_digest(signature, digest) + + except ecdsa.keys.BadSignatureError: # pragma: no cover + ... + + return success + + +class PrivateKey(PublicKey): + """Private key class.""" + + @staticmethod + def from_mnemonic(mnemonic: str) -> "PrivateKey": + """Generate local wallet from mnemonic. + + :param mnemonic: mnemonic + :param prefix: prefix, defaults to None + :return: local wallet + """ + seed_bytes = Bip39SeedGenerator(mnemonic).Generate() + bip44_def_ctx = Bip44.FromSeed( + seed_bytes, Bip44Coins.COSMOS + ).DeriveDefaultPath() + return PrivateKey(bip44_def_ctx.PrivateKey().Raw().ToBytes()) + + def __init__(self, private_key: Optional[Union[bytes, str]] = None): + """ + Initialize. + + :param private_key: bytes private key (optional, None by default). + :raises RuntimeError: if unable to load private key from input. + """ + if private_key is None: + self._signing_key = ecdsa.SigningKey.generate( + curve=self.curve, hashfunc=self.hash_function + ) + elif isinstance(private_key, bytes): + self._signing_key = ecdsa.SigningKey.from_string( + private_key, curve=self.curve, hashfunc=self.hash_function + ) + elif isinstance(private_key, str): + raw_private_key = _base64_decode(private_key) + self._signing_key = ecdsa.SigningKey.from_string( + raw_private_key, curve=self.curve, hashfunc=self.hash_function + ) + + else: + raise RuntimeError("Unable to load private key from input") + + # cache the binary representations of the private key + self._private_key_bytes = self._signing_key.to_string() + self._private_key = base64.b64encode(self._private_key_bytes).decode() + + # construct the base class + super().__init__(self._signing_key.get_verifying_key()) + + @property + def private_key(self) -> str: + """ + Get private key. + + :return: str private key. + """ + return self._private_key + + @property + def private_key_hex(self) -> str: + """ + Get private key hex. + + :return: str private key hex. + """ + return self.private_key_bytes.hex() + + @property + def private_key_bytes(self) -> bytes: + """ + Get bytes private key. + + :return: bytes private key. + """ + return self._private_key_bytes + + def sign( + self, message: bytes, deterministic: bool = True, canonicalise: bool = True + ) -> bytes: + """ + Sign message. + + :param message: bytes message content. + :param deterministic: bool is deterministic. + :param canonicalise: bool is canonicalise. + + :return: bytes signed message. + """ + sigencode = sigencode_string_canonize if canonicalise else sigencode_string + sign_fnc = ( + self._signing_key.sign_deterministic + if deterministic + else self._signing_key.sign + ) + + return sign_fnc(message, sigencode=sigencode) + + def sign_digest( + self, digest: bytes, deterministic=True, canonicalise: bool = True + ) -> bytes: + """ + Sign digest. + + :param digest: bytes digest content. + :param deterministic: bool is deterministic. + :param canonicalise: bool is canonicalise. + + :return: bytes signed digest. + """ + sigencode = sigencode_string_canonize if canonicalise else sigencode_string + sign_fnc = ( + self._signing_key.sign_digest_deterministic + if deterministic + else self._signing_key.sign_digest + ) + + return sign_fnc(digest, sigencode=sigencode) diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/tx.py b/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/tx.py new file mode 100644 index 0000000000..593332f4b3 --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/tx.py @@ -0,0 +1,269 @@ +"""Transaction.""" + +import re +from dataclasses import dataclass +from enum import Enum +from typing import Any, List, Optional, Union + +from google.protobuf.any_pb2 import Any as ProtoAny +from v4_proto.cosmos.base.v1beta1.coin_pb2 import Coin +from v4_proto.cosmos.crypto.secp256k1.keys_pb2 import PubKey as ProtoPubKey +from v4_proto.cosmos.tx.signing.v1beta1.signing_pb2 import SignMode +from v4_proto.cosmos.tx.v1beta1.tx_pb2 import AuthInfo, Fee, ModeInfo, SignDoc, SignerInfo, Tx, TxBody + +from hummingbot.connector.derivative.dydx_v4_perpetual.data_sources.keypairs import PublicKey + + +def parse_coins(value: str) -> List[Coin]: + """Parse the coins. + + :param value: coins + :raises RuntimeError: If unable to parse the value + :return: coins + """ + coins = [] + + parts = re.split(r",\s*", value) + for part in parts: + part = part.strip() + if part == "": + continue + + match = re.match(r"(\d+)(\w+)", part) + if match is None: + raise RuntimeError(f"Unable to parse value {part}") + + # extract out the groups + amount, denom = match.groups() + coins.append(Coin(amount=amount, denom=denom)) + + return coins + + +class TxState(Enum): + """Transaction state. + + :param Enum: Draft, Sealed, Final + """ + + Draft = 0 + Sealed = 1 + Final = 2 + + +def _is_iterable(value) -> bool: + try: + iter(value) + return True + except TypeError: + return False + + +def _wrap_in_proto_any(values: List[Any]) -> List[ProtoAny]: + any_values = [] + for value in values: + proto_any = ProtoAny() + proto_any.Pack(value, type_url_prefix="/") # type: ignore + any_values.append(proto_any) + return any_values + + +def _create_proto_public_key(public_key: PublicKey) -> ProtoAny: + proto_public_key = ProtoAny() + proto_public_key.Pack( + ProtoPubKey( + key=public_key.public_key_bytes, + ), + type_url_prefix="/", + ) + return proto_public_key + + +class SigningMode(Enum): + """Signing mode. + + :param Enum: Direct + """ + + Direct = 1 + + +@dataclass +class SigningCfg: + """Transaction signing configuration.""" + + mode: SigningMode + sequence_num: int + public_key: PublicKey + + @staticmethod + def direct(public_key: PublicKey, sequence_num: int) -> "SigningCfg": + """Transaction signing configuration using direct mode. + + :param public_key: public key + :param sequence_num: sequence number + :return: Transaction signing configuration + """ + return SigningCfg( + mode=SigningMode.Direct, + sequence_num=sequence_num, + public_key=public_key, + ) + + +class Transaction: + """Transaction.""" + + def __init__(self): + """Init the Transactions with transaction message, state, fee and body.""" + self._msgs: List[Any] = [] + self._state: TxState = TxState.Draft + self._tx_body: Optional[TxBody] = None + self._tx = None + self._fee = None + + @property # noqa + def state(self) -> TxState: + """Get the transaction state. + + :return: current state of the transaction + """ + return self._state + + @property # noqa + def msgs(self): + """Get the transaction messages. + + :return: transaction messages + """ + return self._msgs + + @property + def fee(self) -> Optional[str]: + """Get the transaction fee. + + :return: transaction fee + """ + return self._fee + + @property + def tx(self): + """Initialize. + + :raises RuntimeError: If the transaction has not been completed. + :return: transaction + """ + if self._state != TxState.Final: + raise RuntimeError("The transaction has not been completed") + return self._tx + + def add_message(self, msg: Any) -> "Transaction": + """Initialize. + + :param msg: transaction message (memo) + :raises RuntimeError: If the transaction is not in the draft state. + :return: transaction with message added + """ + if self._state != TxState.Draft: + raise RuntimeError( + "The transaction is not in the draft state. No further messages may be appended" + ) + self._msgs.append(msg) + return self + + def seal( + self, + signing_cfgs: Union[SigningCfg, List[SigningCfg]], + fee: str, + gas_limit: int, + memo: Optional[str] = None, + ) -> "Transaction": + """Seal the transaction. + + :param signing_cfgs: signing configs + :param fee: transaction fee + :param gas_limit: transaction gas limit + :param memo: transaction memo, defaults to None + :return: sealed transaction. + """ + self._state = TxState.Sealed + + input_signing_cfgs: List[SigningCfg] = ( + signing_cfgs if _is_iterable(signing_cfgs) else [signing_cfgs] # type: ignore + ) + + signer_infos = [] + for signing_cfg in input_signing_cfgs: + assert signing_cfg.mode == SigningMode.Direct + + signer_infos.append( + SignerInfo( + public_key=_create_proto_public_key(signing_cfg.public_key), + mode_info=ModeInfo( + single=ModeInfo.Single(mode=SignMode.SIGN_MODE_DIRECT) + ), + sequence=signing_cfg.sequence_num, + ) + ) + + auth_info = AuthInfo( + signer_infos=signer_infos, + fee=Fee(amount=parse_coins(fee), gas_limit=gas_limit), + ) + + self._fee = fee + + self._tx_body = TxBody() + self._tx_body.memo = memo or "" + self._tx_body.messages.extend( + _wrap_in_proto_any(self._msgs) + ) # pylint: disable=E1101 + + self._tx = Tx(body=self._tx_body, auth_info=auth_info) + return self + + def sign( + self, + signer, + chain_id: str, + account_number: int, + deterministic: bool = False, + ) -> "Transaction": + """Sign the transaction. + + :param signer: Signer + :param chain_id: chain id + :param account_number: account number + :param deterministic: deterministic, defaults to False + :raises RuntimeError: If transaction is not sealed + :return: signed transaction + """ + if self.state != TxState.Sealed: + raise RuntimeError( + "Transaction is not sealed. It must be sealed before signing is possible." + ) + + sd = SignDoc() + sd.body_bytes = self._tx.body.SerializeToString() + sd.auth_info_bytes = self._tx.auth_info.SerializeToString() + sd.chain_id = chain_id + sd.account_number = account_number + + data_for_signing = sd.SerializeToString() + + # Generating deterministic signature: + signature = signer.sign( + data_for_signing, + deterministic=deterministic, + canonicalise=True, + ) + self._tx.signatures.extend([signature]) + return self + + def complete(self) -> "Transaction": + """Update transaction state to Final. + + :return: transaction with updated state + """ + self._state = TxState.Final + return self diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_api_order_book_data_source.py b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_api_order_book_data_source.py new file mode 100644 index 0000000000..30581ddd66 --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_api_order_book_data_source.py @@ -0,0 +1,294 @@ +import asyncio +import sys +import time +from decimal import Decimal +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union + +import dateutil.parser as dp + +from hummingbot.connector.derivative.dydx_v4_perpetual import ( + dydx_v4_perpetual_constants as CONSTANTS, + dydx_v4_perpetual_web_utils as web_utils, +) +from hummingbot.core.data_type.common import TradeType +from hummingbot.core.data_type.funding_info import FundingInfo, FundingInfoUpdate +from hummingbot.core.data_type.order_book import OrderBookMessage +from hummingbot.core.data_type.order_book_message import OrderBookMessageType +from hummingbot.core.data_type.perpetual_api_order_book_data_source import PerpetualAPIOrderBookDataSource +from hummingbot.core.utils.tracking_nonce import NonceCreator +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant + +if TYPE_CHECKING: + from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_derivative import DydxV4PerpetualDerivative + + +class DydxV4PerpetualAPIOrderBookDataSource(PerpetualAPIOrderBookDataSource): + FULL_ORDER_BOOK_RESET_DELTA_SECONDS = sys.maxsize + + def __init__( + self, + trading_pairs: List[str], + connector: "DydxV4PerpetualDerivative", + api_factory: WebAssistantsFactory, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + ): + super().__init__(trading_pairs) + self._connector = connector + self._api_factory = api_factory + self._domain = domain + self._nonce_provider = NonceCreator.for_microseconds() + + def _time(self): + return time.time() + + async def get_last_traded_prices(self, trading_pairs: List[str], domain: Optional[str] = None) -> Dict[str, float]: + return await self._connector.get_last_traded_prices(trading_pairs=trading_pairs) + + async def get_funding_info(self, trading_pair: str) -> FundingInfo: + funding_info_response = await self._request_complete_funding_info(trading_pair) + market_info: Dict[str, Any] = funding_info_response["markets"][trading_pair] + funding_info = FundingInfo( + trading_pair=trading_pair, + index_price=Decimal(str(market_info["oraclePrice"])), + mark_price=Decimal(str(market_info["oraclePrice"])), + next_funding_utc_timestamp=self._next_funding_time(), + rate=Decimal(str(market_info["nextFundingRate"])), + ) + return funding_info + + async def _subscribe_channels(self, ws: WSAssistant): + try: + for trading_pair in self._trading_pairs: + subscribe_orderbook_request: WSJSONRequest = WSJSONRequest( + payload={ + "type": CONSTANTS.WS_TYPE_SUBSCRIBE, + "channel": CONSTANTS.WS_CHANNEL_ORDERBOOK, + "id": trading_pair, + }, + is_auth_required=False, + ) + subscribe_trades_request: WSJSONRequest = WSJSONRequest( + payload={ + "type": CONSTANTS.WS_TYPE_SUBSCRIBE, + "channel": CONSTANTS.WS_CHANNEL_TRADES, + "id": trading_pair, + }, + is_auth_required=False, + ) + subscribe_markets_request: WSJSONRequest = WSJSONRequest( + payload={ + "type": CONSTANTS.WS_TYPE_SUBSCRIBE, + "channel": CONSTANTS.WS_CHANNEL_MARKETS, + "id": trading_pair, + }, + is_auth_required=False, + ) + await ws.send(subscribe_orderbook_request) + await ws.send(subscribe_trades_request) + await ws.send(subscribe_markets_request) + self.logger().info("Subscribed to public orderbook and trade channels...") + except asyncio.CancelledError: + raise + except Exception: + self.logger().exception("Unexpected error occurred subscribing to order book trading and delta streams...") + raise + + def _channel_originating_message(self, event_message: Dict[str, Any]) -> str: + channel = "" + if "channel" in event_message: + event_channel = event_message["channel"] + event_type = event_message["type"] + if event_channel == CONSTANTS.WS_CHANNEL_TRADES: + channel = self._trade_messages_queue_key + elif event_channel == CONSTANTS.WS_CHANNEL_ORDERBOOK: + if event_type == CONSTANTS.WS_TYPE_SUBSCRIBED: + channel = self._snapshot_messages_queue_key + if event_type == CONSTANTS.WS_TYPE_CHANNEL_DATA: + channel = self._diff_messages_queue_key + elif event_channel == CONSTANTS.WS_CHANNEL_MARKETS: + channel = self._funding_info_messages_queue_key + return channel + + async def _make_order_book_message( + self, + raw_message: Dict[str, Any], + message_queue: asyncio.Queue, + bids: List[Tuple[float, float]], + asks: List[Tuple[float, float]], + message_type: OrderBookMessageType, + ): + symbol = raw_message["id"] + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol(symbol) + + timestamp_s = self._time() + update_id = self._nonce_provider.get_tracking_nonce(timestamp=timestamp_s) + + order_book_message_content = { + "trading_pair": trading_pair, + "update_id": update_id, + "bids": bids, + "asks": asks, + } + message = OrderBookMessage( + message_type=message_type, + content=order_book_message_content, + timestamp=timestamp_s, + ) + message_queue.put_nowait(message) + + async def _parse_order_book_snapshot_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + if raw_message["type"] in ["subscribed", "channel_data"]: + bids, asks = self._get_bids_and_asks_from_snapshot(raw_message["contents"]) + await self._make_order_book_message( + raw_message=raw_message, + message_queue=message_queue, + bids=bids, + asks=asks, + message_type=OrderBookMessageType.SNAPSHOT, + ) + + async def _parse_order_book_diff_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + if raw_message["type"] in ["subscribed", "channel_data"]: + bids, asks = self._get_bids_and_asks_from_diff(raw_message["contents"]) + await self._make_order_book_message( + raw_message=raw_message, + message_queue=message_queue, + bids=bids, + asks=asks, + message_type=OrderBookMessageType.DIFF, + ) + + async def _parse_trade_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + if raw_message["type"] == "channel_data": + symbol = raw_message["id"] + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol(symbol) + + trade_updates = raw_message["contents"]["trades"] + + for trade_data in trade_updates: + ts_ms = dp.parse(trade_data["createdAt"]).timestamp() * 1e3 + trade_type = float(TradeType.BUY.value) if trade_data["side"] == "BUY" else float(TradeType.SELL.value) + message_content = { + "trade_id": ts_ms, + "trading_pair": trading_pair, + "trade_type": trade_type, + "amount": trade_data["size"], + "price": trade_data["price"], + } + trade_message = OrderBookMessage( + message_type=OrderBookMessageType.TRADE, + content=message_content, + timestamp=ts_ms * 1e-3, + ) + message_queue.put_nowait(trade_message) + + async def _parse_funding_info_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + if raw_message["type"] == "channel_data": + print(raw_message) + for trading_pair in raw_message["contents"]["markets"].keys(): + if trading_pair in self._trading_pairs: + market_info = raw_message["contents"]["markets"][trading_pair] + + if any( + info in ["oraclePrice", "nextFundingRate", "nextFundingAt"] + for info in market_info.keys() + ): + + info_update = FundingInfoUpdate(trading_pair) + if "oraclePrice" in market_info.keys(): + info_update.index_price = Decimal(market_info["oraclePrice"]) + info_update.mark_price = Decimal(market_info["oraclePrice"]) + if "nextFundingRate" in market_info.keys(): + info_update.rate = Decimal(market_info["nextFundingRate"]) + info_update.next_funding_utc_timestamp = self._next_funding_time(), + + message_queue.put_nowait(info_update) + + async def _request_complete_funding_info(self, trading_pair: str) -> Dict[str, Any]: + ex_symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + + params = { + "limit": 1, + "ticker": ex_symbol + } + + rest_assistant = await self._api_factory.get_rest_assistant() + endpoint = CONSTANTS.PATH_MARKETS + url = web_utils.public_rest_url(path_url=endpoint) + data = await rest_assistant.execute_request( + url=url, + throttler_limit_id=endpoint, + params=params, + method=RESTMethod.GET, + ) + return data + + async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: + snapshot_response = await self._request_order_book_snapshot(trading_pair) + + timestamp = self._time() + update_id = self._nonce_provider.get_tracking_nonce(timestamp=timestamp) + + bids, asks = self._get_bids_and_asks_from_snapshot(snapshot_response) + order_book_message_content = { + "trading_pair": trading_pair, + "update_id": update_id, + "bids": bids, + "asks": asks, + } + snapshot_msg: OrderBookMessage = OrderBookMessage( + message_type=OrderBookMessageType.SNAPSHOT, + content=order_book_message_content, + timestamp=timestamp, + ) + + return snapshot_msg + + async def _request_order_book_snapshot(self, trading_pair: str) -> Dict[str, Any]: + rest_assistant = await self._api_factory.get_rest_assistant() + endpoint = CONSTANTS.PATH_SNAPSHOT + ex_symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + url = web_utils.public_rest_url(path_url=endpoint + "/" + ex_symbol) + data = await rest_assistant.execute_request( + url=url, + throttler_limit_id=endpoint, + method=RESTMethod.GET, + ) + + return data + + @staticmethod + def _get_bids_and_asks_from_snapshot( + snapshot: Dict[str, List[Dict[str, Union[str, int, float]]]] + ) -> Tuple[List[Tuple[float, float]], List[Tuple[float, float]]]: + + bids = [(Decimal(bid["price"]), Decimal(bid["size"])) for bid in snapshot["bids"]] + asks = [(Decimal(ask["price"]), Decimal(ask["size"])) for ask in snapshot["asks"]] + + return bids, asks + + @staticmethod + def _get_bids_and_asks_from_diff( + diff: Dict[str, List[Dict[str, Union[str, int, float]]]] + ) -> Tuple[List[Tuple[float, float]], List[Tuple[float, float]]]: + + bids = [(Decimal(bid[0]), Decimal(bid[1])) for bid in diff.get("bids", [])] + asks = [(Decimal(ask[0]), Decimal(ask[1])) for ask in diff.get("asks", [])] + + return bids, asks + + async def _connected_websocket_assistant(self) -> WSAssistant: + ws: WSAssistant = await self._api_factory.get_ws_assistant() + await ws.connect(ws_url=CONSTANTS.DYDX_V4_WS_URL, ping_timeout=CONSTANTS.HEARTBEAT_INTERVAL) + return ws + + async def _request_order_book_snapshots(self, output: asyncio.Queue): + pass # unused + + def _next_funding_time(self) -> int: + """ + Funding settlement occurs every 1 hours as mentioned in https://hyperliquid.gitbook.io/hyperliquid-docs/trading/funding + """ + return ((time.time() // 3600) + 1) * 3600 diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_constants.py b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_constants.py new file mode 100644 index 0000000000..98c045bc3e --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_constants.py @@ -0,0 +1,206 @@ +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit +from hummingbot.core.data_type.common import OrderType +from hummingbot.core.data_type.in_flight_order import OrderState + +# A single source of truth for constant variables related to the exchange + +EXCHANGE_NAME = "dydx_v4_perpetual" +DEFAULT_DOMAIN = "com" + +API_VERSION = "v4" +CURRENCY = "USD" + +HBOT_BROKER_ID = "Hummingbot" +MAX_ID_LEN = 40 +HEARTBEAT_INTERVAL = 30.0 +ORDER_EXPIRATION = 2419200 # 28 days + +# height limited to 2147483647 for 32-bit OS, equivalent to 2038-01-19T03:14Z +TX_MAX_HEIGHT = 2147483647 +LIMIT_FEE = 0.015 + +# API Base URLs +MAX_ID_BIT_COUNT = 31 + +# data_source grpc +DYDX_V4_AERIAL_GRPC_OR_REST_PREFIX = "grpc" +DYDX_V4_AERIAL_CONFIG_URL = 'dydx-grpc.publicnode.com:443' +DYDX_V4_QUERY_AERIAL_CONFIG_URL = 'dydx-grpc.publicnode.com:443' +CHAIN_ID = 'dydx-mainnet-1' +FEE_DENOMINATION = "afet" +TX_FEE = 0 +TX_GAS_LIMIT = 0 + +DYDX_V4_VALIDATOR_REST_BASE_URL = "https://dydx-grpc.publicnode.com:443" + +DYDX_V4_INDEXER_REST_BASE_URL = "https://indexer.dydx.trade" + +DYDX_V4_REST_URL = "{}/{}".format(DYDX_V4_INDEXER_REST_BASE_URL, API_VERSION) + +DYDX_V4_WS_URL = "wss://indexer.dydx.trade/{}/ws".format(API_VERSION) + +# Public REST Endpoints + +PATH_MARKETS = "/perpetualMarkets" + +PATH_HISTORY_FUNDING = "/historicalFunding" +PATH_TICKER = "/stats" + +PATH_SNAPSHOT = "/orderbooks/perpetualMarket" +PATH_TIME = "/time" + +PATH_ORDERS = "/orders" + +PATH_FILLS = "/fills" +PATH_POSITIONS = "/perpetualPositions" + +PATH_ACCOUNTS = "/accounts" +PATH_CONFIG = "/config" + +PATH_FUNDING = "/historical-pnl" + +PATH_SUBACCOUNT = "/addresses" + +# WS Endpoints +WS_PATH_ACCOUNTS = "/ws/accounts" + +# WS Channels + +WS_CHANNEL_TRADES = "v4_trades" +WS_CHANNEL_ORDERBOOK = "v4_orderbook" +WS_CHANNEL_MARKETS = "v4_markets" +WS_CHANNEL_ACCOUNTS = "v4_subaccounts" + +WS_TYPE_SUBSCRIBE = "subscribe" +WS_TYPE_SUBSCRIBED = "subscribed" +WS_TYPE_CHANNEL_DATA = "channel_data" + +TIF_GOOD_TIL_TIME = "GTT" +TIF_FILL_OR_KILL = "FOK" +TIF_IMMEDIATE_OR_CANCEL = "IOC" + +FEES_KEY = "*" +FEE_MAKER_KEY = "maker" +FEE_TAKER_KEY = "taker" + +ORDER_TYPE_MAP = { + OrderType.LIMIT: "LIMIT", + OrderType.LIMIT_MAKER: "LIMIT", + OrderType.MARKET: "MARKET", +} + +ORDER_STATE = { + "PENDING": OrderState.OPEN, + "OPEN": OrderState.OPEN, + "BEST_EFFORT_OPENED": OrderState.OPEN, + "FILLED": OrderState.FILLED, + "CANCELED": OrderState.CANCELED, + "BEST_EFFORT_CANCELED": OrderState.PENDING_CANCEL, +} + +WS_CHANNEL_TO_PATH = {WS_CHANNEL_ACCOUNTS: WS_PATH_ACCOUNTS} + +LAST_FEE_PAYMENTS_MAX = 1 +LAST_FILLS_MAX = 100 + +LIMIT_ID_GET = "LIMIT_ID_GET" +LIMIT_ID_ORDER_CANCEL = "LIMIT_ID_ORDER_CANCEL" +LIMIT_ID_LONG_TERM_ORDER_PLACE = "LIMIT_ID_LONG_TERM_ORDER_PLACE" + +LIMIT_LONG_TERM_ORDER_PLACE = "LIMIT_LONG_TERM_ORDER_PLACE" +MARKET_SHORT_TERM_ORDER_PLACE = "LIMIT_LONG_TERM_ORDER_PLACE" + +NO_LIMIT = 1000 +ONE_SECOND = 1 +ONE_HUNDRED_SECOND = 100 + +QUOTE_QUANTUMS_ATOMIC_RESOLUTION = -6 +ORDER_FLAGS_SHORT_TERM = 0 +ORDER_FLAGS_LONG_TERM = 64 + +TIME_IN_FORCE_IOC = 1 +TIME_IN_FORCE_POST_ONLY = 2 +TIME_IN_FORCE_UNSPECIFIED = 0 + +RATE_LIMITS = [ + # Pools + RateLimit(limit_id=LIMIT_ID_GET, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=LIMIT_LONG_TERM_ORDER_PLACE, limit=20, time_interval=ONE_HUNDRED_SECOND), + RateLimit(limit_id=MARKET_SHORT_TERM_ORDER_PLACE, limit=200, time_interval=ONE_SECOND), + # Weighted limits + RateLimit( + limit_id=PATH_CONFIG, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_FILLS, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_ORDERS, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_FUNDING, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_HISTORY_FUNDING, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_ACCOUNTS, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_SUBACCOUNT, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_MARKETS, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_TIME, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=PATH_SNAPSHOT, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_ID_GET)], + ), + RateLimit( + limit_id=LIMIT_ID_LONG_TERM_ORDER_PLACE, + limit=2, + time_interval=ONE_SECOND, + linked_limits=[LinkedLimitWeightPair(LIMIT_LONG_TERM_ORDER_PLACE)], + ), + RateLimit( + limit_id=LIMIT_ID_ORDER_CANCEL, + limit=NO_LIMIT, + time_interval=ONE_SECOND, + + ), +] + +ACCOUNT_SEQUENCE_MISMATCH_ERROR = "account sequence mismatch" +ERR_MSG_NO_ORDER_FOUND = "Stateful order does not exist" diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_derivative.py b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_derivative.py new file mode 100644 index 0000000000..e3efe64d44 --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_derivative.py @@ -0,0 +1,858 @@ +import asyncio +import time +from decimal import Decimal +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple + +from bidict import bidict + +import hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_constants as CONSTANTS +from hummingbot.connector.constants import s_decimal_0, s_decimal_NaN +from hummingbot.connector.derivative.dydx_v4_perpetual import dydx_v4_perpetual_web_utils as web_utils +from hummingbot.connector.derivative.dydx_v4_perpetual.data_sources.dydx_v4_data_source import DydxPerpetualV4Client +from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source import ( + DydxV4PerpetualAPIOrderBookDataSource, +) +from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_user_stream_data_source import ( + DydxV4PerpetualUserStreamDataSource, +) +from hummingbot.connector.derivative.position import Position +from hummingbot.connector.perpetual_derivative_py_base import PerpetualDerivativePyBase +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair, get_new_numeric_client_order_id +from hummingbot.core.api_throttler.data_types import RateLimit +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, PositionSide, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState, OrderUpdate, TradeUpdate +from hummingbot.core.data_type.trade_fee import TokenAmount, TradeFeeBase +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.event.events import AccountEvent, PositionModeChangeEvent +from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.core.utils.estimate_fee import build_perpetual_trade_fee +from hummingbot.core.utils.tracking_nonce import NonceCreator +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + +if TYPE_CHECKING: + from hummingbot.client.config.config_helpers import ClientConfigAdapter + + +class DydxV4PerpetualDerivative(PerpetualDerivativePyBase): + web_utils = web_utils + + def __init__( + self, + client_config_map: "ClientConfigAdapter", + dydx_v4_perpetual_secret_phrase: str, + dydx_v4_perpetual_chain_address: str, + trading_pairs: Optional[List[str]] = None, + trading_required: bool = True, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + ): + self._dydx_v4_perpetual_secret_phrase = dydx_v4_perpetual_secret_phrase + self._dydx_v4_perpetual_chain_address = dydx_v4_perpetual_chain_address + self._trading_pairs = trading_pairs + self._trading_required = trading_required + self._domain = domain + self._client_order_id_nonce_provider = NonceCreator.for_microseconds() + + self._tx_client: DydxPerpetualV4Client = self._create_tx_client() + + self._margin_fractions = {} + self._position_id = None + + self._allocated_collateral = {} + self.subaccount_id = 0 + + super().__init__(client_config_map=client_config_map) + + @property + def name(self) -> str: + return CONSTANTS.EXCHANGE_NAME + + @property + def authenticator(self) -> AuthBase: + return None + + @property + def rate_limits_rules(self) -> List[RateLimit]: + return CONSTANTS.RATE_LIMITS + + @property + def domain(self) -> str: + return self._domain + + @property + def client_order_id_max_length(self) -> int: + return CONSTANTS.MAX_ID_LEN + + @property + def client_order_id_prefix(self) -> str: + return CONSTANTS.HBOT_BROKER_ID + + @property + def trading_rules_request_path(self) -> str: + return CONSTANTS.PATH_MARKETS + + @property + def trading_pairs_request_path(self) -> str: + return CONSTANTS.PATH_MARKETS + + @property + def check_network_request_path(self) -> str: + return CONSTANTS.PATH_TIME + + @property + def trading_pairs(self) -> List[str]: + return self._trading_pairs + + @property + def is_cancel_request_in_exchange_synchronous(self) -> bool: + return True + + @property + def is_trading_required(self) -> bool: + return self._trading_required + + @property + def funding_fee_poll_interval(self) -> int: + return 120 + + def supported_order_types(self) -> List[OrderType]: + return [OrderType.LIMIT, OrderType.LIMIT_MAKER, OrderType.MARKET] + + def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception) -> bool: + return False + + def _is_request_result_an_error_related_to_time_synchronizer(self, request_result: Dict[str, Any]) -> bool: + if "errors" in request_result and "msg" in request_result["errors"]: + if "Timestamp must be within" in request_result["errors"]["msg"]: + return True + return False + + async def _make_trading_rules_request(self) -> Any: + exchange_info = await self._api_get(path_url=self.trading_rules_request_path, + params={}) + return exchange_info + + async def _make_trading_pairs_request(self) -> Any: + exchange_info = await self._api_get(path_url=self.trading_pairs_request_path, + params={}) + return exchange_info + + def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool: + # TODO: implement this method correctly for the connector + # The default implementation was added when the functionality to detect not found orders was introduced in the + # ExchangePyBase class. Also fix the unit test test_lost_order_removed_if_not_found_during_order_status_update + # when replacing the dummy implementation + return False + + def _is_order_not_found_during_cancelation_error(self, cancelation_exception: Exception) -> bool: + # The default implementation was added when the functionality to detect not found orders was introduced in the + # ExchangePyBase class. Also fix the unit test test_cancel_order_not_found_in_the_exchange when replacing the + # dummy implementation + return False + + async def start_network(self): + await super().start_network() + await self._update_trading_rules() + await self._tx_client.initialize_trading_account() + + async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder): + async with self._throttler.execute_task(limit_id=CONSTANTS.LIMIT_ID_ORDER_CANCEL): + if not self._margin_fractions: + await self._update_trading_rules() + for i in range(3): + resp = await self._tx_client.cancel_order( + client_id=int(tracked_order.client_order_id), + clob_pair_id=self._margin_fractions[tracked_order.trading_pair]["clob_pair_id"], + order_flags=CONSTANTS.ORDER_FLAGS_LONG_TERM, + good_til_block_time=int(time.time()) + CONSTANTS.ORDER_EXPIRATION + ) + if CONSTANTS.ACCOUNT_SEQUENCE_MISMATCH_ERROR in resp['raw_log']: + self.logger().warning( + f"Failed to cancel order {tracked_order.client_order_id} (retry {i + 1}), {resp['raw_log']}") + await asyncio.sleep(1) + continue + else: + break + if resp["raw_log"] != "[]" and CONSTANTS.ERR_MSG_NO_ORDER_FOUND not in resp['raw_log']: + raise ValueError(f"Error sending the order cancel transaction ({resp['raw_log']})") + else: + return True + + def buy(self, + trading_pair: str, + amount: Decimal, + order_type=OrderType.LIMIT, + price: Decimal = s_decimal_NaN, + **kwargs) -> str: + """ + Creates a promise to create a buy order using the parameters + + :param trading_pair: the token pair to operate with + :param amount: the order amount + :param order_type: the type of order to create (MARKET, LIMIT, LIMIT_MAKER) + :param price: the order price + + :return: the id assigned by the connector to the order (the client id) + """ + order_id = str(get_new_numeric_client_order_id( + nonce_creator=self._client_order_id_nonce_provider, + max_id_bit_count=CONSTANTS.MAX_ID_BIT_COUNT, + )) + safe_ensure_future(self._create_order( + trade_type=TradeType.BUY, + order_id=order_id, + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price, + **kwargs)) + return order_id + + def sell(self, + trading_pair: str, + amount: Decimal, + order_type: OrderType = OrderType.LIMIT, + price: Decimal = s_decimal_NaN, + **kwargs) -> str: + """ + Creates a promise to create a sell order using the parameters. + :param trading_pair: the token pair to operate with + :param amount: the order amount + :param order_type: the type of order to create (MARKET, LIMIT, LIMIT_MAKER) + :param price: the order price + :return: the id assigned by the connector to the order (the client id) + """ + order_id = str(get_new_numeric_client_order_id( + nonce_creator=self._client_order_id_nonce_provider, + max_id_bit_count=CONSTANTS.MAX_ID_BIT_COUNT, + )) + safe_ensure_future(self._create_order( + trade_type=TradeType.SELL, + order_id=order_id, + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price, + **kwargs)) + return order_id + + async def _place_order( + self, + order_id: str, + trading_pair: str, + amount: Decimal, + trade_type: TradeType, + order_type: OrderType, + price: Decimal, + position_action: PositionAction = PositionAction.NIL, + **kwargs, + ): + if not self._margin_fractions: + await self._update_trading_rules() + if order_type.is_limit_type(): + _order_type = "LIMIT" + limit_id = CONSTANTS.LIMIT_LONG_TERM_ORDER_PLACE + else: + limit_id = CONSTANTS.MARKET_SHORT_TERM_ORDER_PLACE + _order_type = "MARKET" + if trade_type.name.lower() == 'buy': + # The price needs to be relatively high before the transaction, whether the test will be cancelled + price = Decimal("1.5") * self.get_price_for_volume( + trading_pair, + True, + amount + ).result_price + else: + price = Decimal("0.75") * self.get_price_for_volume( + trading_pair, + False, + amount + ).result_price + price = self.quantize_order_price(trading_pair, price) + side = "BUY" if trade_type == TradeType.BUY else "SELL" + expiration = CONSTANTS.ORDER_EXPIRATION + reduce_only = False + + post_only = order_type is OrderType.LIMIT_MAKER + market = await self.exchange_symbol_associated_to_pair(trading_pair) + try: + async with self._throttler.execute_task(limit_id=limit_id): + for i in range(3): + resp = await self._tx_client.place_order( + market=market, + type=_order_type, + side=side, + price=price, + size=amount, + client_id=int(order_id), + post_only=post_only, + reduce_only=reduce_only, + good_til_time_in_seconds=expiration, + ) + if CONSTANTS.ACCOUNT_SEQUENCE_MISMATCH_ERROR in resp['raw_log']: + self.logger().warning(f"Failed to submit order {order_id} (retry {i + 1}), {resp['raw_log']}") + await asyncio.sleep(1) + continue + else: + break + if resp["raw_log"] != "[]" or resp["txhash"] in [None, ""]: + raise ValueError(f"Error sending the order creation transaction ({resp['raw_log']})") + except asyncio.CancelledError: + raise + except Exception: + raise + # no exchange order id + return None, self._time() + + async def _place_order_and_process_update(self, order: InFlightOrder, **kwargs) -> str: + exchange_order_id, update_timestamp = await self._place_order( + order_id=order.client_order_id, + trading_pair=order.trading_pair, + amount=order.amount, + trade_type=order.trade_type, + order_type=order.order_type, + price=order.price, + **kwargs, + ) + + order_update: OrderUpdate = OrderUpdate( + client_order_id=order.client_order_id, + exchange_order_id=exchange_order_id, + trading_pair=order.trading_pair, + update_timestamp=update_timestamp, + new_state=OrderState.OPEN, + ) + self._order_tracker.process_order_update(order_update) + + return exchange_order_id + + def _on_order_failure( + self, + order_id: str, + trading_pair: str, + amount: Decimal, + trade_type: TradeType, + order_type: OrderType, + price: Optional[Decimal], + exception: Exception, + **kwargs, + ): + self.logger().network( + f"Error submitting {trade_type.name.lower()} {order_type.name.upper()} order to {self.name_cap} for " + f"{amount} {trading_pair} {price}.", + exc_info=exception, + app_warning_msg=f"Failed to submit {trade_type.name.upper()} order to {self.name_cap}. Check API key and network connection." + ) + self._update_order_after_failure(order_id=order_id, trading_pair=trading_pair) + + def _get_fee( + self, + base_currency: str, + quote_currency: str, + order_type: OrderType, + order_side: TradeType, + position_action: PositionAction, + amount: Decimal, + price: Decimal = s_decimal_NaN, + is_maker: Optional[bool] = None, + ) -> TradeFeeBase: + is_maker = is_maker or False + fee = build_perpetual_trade_fee( + self.name, + is_maker, + position_action=position_action, + base_currency=base_currency, + quote_currency=quote_currency, + order_type=order_type, + order_side=order_side, + amount=amount, + price=price, + ) + return fee + + async def _update_trading_fees(self): + pass + + async def _user_stream_event_listener(self): + + async for event_message in self._iter_user_event_queue(): + try: + event: Dict[str, Any] = event_message + data: Dict[str, Any] = event["contents"] + quote = "USD" + if "subaccount" in data.keys() and len(data["subaccount"]) > 0: + self._account_balances[quote] = Decimal(data["subaccount"]["equity"]) + self._account_available_balances[quote] = Decimal(data["subaccount"]["freeCollateral"]) + if "openPerpetualPositions" in data["subaccount"]: + await self._process_open_positions(data["subaccount"]["openPerpetualPositions"]) + if "orders" in data.keys() and len(data["orders"]) > 0: + for order in data["orders"]: + client_order_id: str = order["clientId"] + exchange_order_id: str = order["id"] + tracked_order = self._order_tracker.all_updatable_orders.get(client_order_id) + trading_pair = await self.trading_pair_associated_to_exchange_symbol(order["ticker"]) + if tracked_order is not None: + state = CONSTANTS.ORDER_STATE[order["status"]] + new_order_update: OrderUpdate = OrderUpdate( + trading_pair=tracked_order.trading_pair, + update_timestamp=self.current_timestamp, + new_state=state, + client_order_id=tracked_order.client_order_id, + exchange_order_id=exchange_order_id, + ) + self._order_tracker.process_order_update(new_order_update) + # Processing all orders of the account, not just the client's + if order["status"] in ["OPEN"]: + initial_margin_requirement = ( + Decimal(order["price"]) + * Decimal(order["size"]) + * self._margin_fractions[trading_pair]["initial"] + ) + initial_margin_requirement = abs(initial_margin_requirement) + self._allocated_collateral[order["id"]] = initial_margin_requirement + # self._allocated_collateral_sum += initial_margin_requirement + self._account_available_balances[quote] -= initial_margin_requirement + if order["status"] in ["FILLED", "CANCELED"]: + if order["id"] in self._allocated_collateral: + # Only deduct orders that were previously in the OPEN state + # Some orders are filled instantly and reach only the PENDING state + # self._allocated_collateral_sum -= self._allocated_collateral[order["id"]] + self._account_available_balances[quote] += self._allocated_collateral[order["id"]] + del self._allocated_collateral[order["id"]] + if "fills" in data.keys() and len(data["fills"]) > 0: + trade_updates = await self._process_ws_fills(data["fills"]) + for trade_update in trade_updates: + self._order_tracker.process_trade_update(trade_update) + + if "perpetualPositions" in data.keys() and len(data["perpetualPositions"]) > 0: + # this is hit when a position is closed + positions = data["perpetualPositions"] + for position in positions: + trading_pair = position["market"] + if trading_pair not in self._trading_pairs: + continue + position_side = PositionSide[position["side"]] + pos_key = self._perpetual_trading.position_key(trading_pair, position_side) + amount = Decimal(position.get("size")) + + if amount != s_decimal_0: + position = Position( + trading_pair=trading_pair, + position_side=position_side, + unrealized_pnl=Decimal(position.get("unrealizedPnl", 0)), + entry_price=Decimal(position.get("entryPrice")), + amount=amount, + leverage=self.get_leverage(trading_pair), + ) + self._perpetual_trading.set_position(pos_key, position) + else: + if self._perpetual_trading.get_position(pos_key): + self._perpetual_trading.remove_position(pos_key) + + # if "fundingPayments" in data: + # if event["type"] != CONSTANTS.WS_TYPE_SUBSCRIBED: # Only subsequent funding payments + # funding_payments = await self._process_funding_payments(data["fundingPayments"]) + # for trading_pair in funding_payments: + # timestamp = funding_payments[trading_pair]["timestamp"] + # funding_rate = funding_payments[trading_pair]["funding_rate"] + # payment = funding_payments[trading_pair]["payment"] + # self._emit_funding_payment_event(trading_pair, timestamp, funding_rate, payment, False) + + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error in user stream listener loop.", exc_info=True) + + async def _format_trading_rules(self, exchange_info_dict: Dict[str, Any]) -> List[TradingRule]: + trading_rules = [] + markets_info = exchange_info_dict["markets"] + for market_name, market_info in markets_info.items(): + if web_utils.is_exchange_information_valid(market_info): + trading_pair = await self.trading_pair_associated_to_exchange_symbol(symbol=market_name) + market = markets_info[market_name] + try: + collateral_token = CONSTANTS.CURRENCY + trading_rules += [ + TradingRule( + trading_pair=trading_pair, + min_price_increment=Decimal(market["tickSize"]), + min_base_amount_increment=Decimal(market["stepSize"]), + supports_limit_orders=True, + supports_market_orders=True, + buy_order_collateral_token=collateral_token, + sell_order_collateral_token=collateral_token, + ) + ] + self._margin_fractions[trading_pair] = { + "initial": Decimal(market["initialMarginFraction"]), + "maintenance": Decimal(market["maintenanceMarginFraction"]), + "clob_pair_id": market["clobPairId"], + "atomicResolution": market["atomicResolution"], + "stepBaseQuantums": market["stepBaseQuantums"], + "quantumConversionExponent": market["quantumConversionExponent"], + "subticksPerTick": market["subticksPerTick"], + } + except Exception: + self.logger().exception("Error updating trading rules") + return trading_rules + + async def _update_balances(self): + path = f"{CONSTANTS.PATH_SUBACCOUNT}/{self._dydx_v4_perpetual_chain_address}/subaccountNumber/{self.subaccount_id}" + response: Dict[str, Dict[str, Any]] = await self._api_get( + path_url=path, params={}, limit_id=CONSTANTS.PATH_SUBACCOUNT + ) + quote = CONSTANTS.CURRENCY + self._account_available_balances.clear() + self._account_balances.clear() + + self._account_balances[quote] = Decimal(response["subaccount"]["equity"]) + self._account_available_balances[quote] = Decimal(response["subaccount"]["freeCollateral"]) + + async def _process_ws_fills(self, fills_data: List) -> List[TradeUpdate]: + trade_updates = [] + + for fill_data in fills_data: + exchange_order_id: str = fill_data["orderId"] + all_orders = self._order_tracker.all_fillable_orders + try: + for k, v in all_orders.items(): + await v.get_exchange_order_id() + except Exception as e: + self.logger().info( + f"Unable to locate order {exchange_order_id} on exchange. Pending update from blockchain {e}") + _cli_tracked_orders = [o for o in all_orders.values() if exchange_order_id == o.exchange_order_id] + if len(_cli_tracked_orders) == 0 or _cli_tracked_orders[0] is None: + order_update: OrderUpdate = await self._request_order_status(tracked_order=None, + exchange_order_id=exchange_order_id) + # NOTE: Untracked order + if order_update is None: + self.logger().debug(f"Received untracked order with exchange order id of {exchange_order_id}") + return trade_updates + client_order_id = order_update.client_order_id + tracked_order = self._order_tracker.all_updatable_orders.get(client_order_id) + else: + tracked_order = _cli_tracked_orders[0] + trade_update = self._process_order_fills(fill_data=fill_data, order=tracked_order) + if trade_update is not None: + trade_updates.append(trade_update) + return trade_updates + + async def _process_open_positions(self, open_positions: Dict): + for market, position in open_positions.items(): + trading_pair = await self.trading_pair_associated_to_exchange_symbol(symbol=market) + position_side = PositionSide[position["side"]] + pos_key = self._perpetual_trading.position_key(trading_pair, position_side) + amount = Decimal(position.get("size")) + if amount != s_decimal_0: + entry_price = Decimal(position.get("entryPrice")) + unrealized_pnl = Decimal(position.get("unrealizedPnl")) + position = Position( + trading_pair=trading_pair, + position_side=position_side, + unrealized_pnl=unrealized_pnl, + entry_price=entry_price, + amount=amount, + leverage=self.get_leverage(trading_pair), + ) + self._perpetual_trading.set_position(pos_key, position) + else: + self._perpetual_trading.remove_position(pos_key) + + async def _all_trade_updates_for_order(self, order: InFlightOrder) -> List[TradeUpdate]: + trade_updates = [] + + if order.exchange_order_id is not None: + try: + all_fills_response = await self._request_order_fills(order=order) + fills_data = all_fills_response["fills"] + trade_updates_tmp = self._process_rest_fills(fills_data) + trade_updates += trade_updates_tmp + + except IOError as ex: + if not self._is_request_exception_related_to_time_synchronizer(request_exception=ex): + raise + return trade_updates + + def _process_rest_fills(self, fills_data: List) -> List[TradeUpdate]: + trade_updates = [] + all_fillable_orders_by_exchange_order_id = { + order.exchange_order_id: order for order in self._order_tracker.all_fillable_orders.values() + } + for fill_data in fills_data: + exchange_order_id: str = fill_data["orderId"] + order = all_fillable_orders_by_exchange_order_id.get(exchange_order_id) + trade_update = self._process_order_fills(fill_data=fill_data, order=order) + if trade_update is not None: + trade_updates.append(trade_update) + return trade_updates + + def _process_order_fills(self, fill_data: Dict, order: InFlightOrder) -> Optional[TradeUpdate]: + trade_update = None + if order is not None: + fee_asset = order.quote_asset + fee_amount = Decimal(fill_data["fee"]) + flat_fees = [] if fee_amount == Decimal("0") else [TokenAmount(amount=fee_amount, token=fee_asset)] + position_side = fill_data["side"] + + position_action = (PositionAction.OPEN + if (order.trade_type is TradeType.BUY and position_side == "BUY" + or order.trade_type is TradeType.SELL and position_side == "SELL") + else PositionAction.CLOSE) + + fee = TradeFeeBase.new_perpetual_fee( + fee_schema=self.trade_fee_schema(), + position_action=position_action, + percent_token=fee_asset, + flat_fees=flat_fees, + ) + + trade_update = TradeUpdate( + trade_id=fill_data["id"], + client_order_id=order.client_order_id, + exchange_order_id=order.exchange_order_id, + trading_pair=order.trading_pair, + fill_timestamp=fill_data["createdAt"], + fill_price=Decimal(fill_data["price"]), + fill_base_amount=Decimal(fill_data["size"]), + fill_quote_amount=Decimal(fill_data["price"]) * Decimal(fill_data["size"]), + fee=fee, + ) + return trade_update + + async def _request_order_fills(self, order: InFlightOrder) -> Dict[str, Any]: + + body_params = { + 'address': self._dydx_v4_perpetual_chain_address, + 'subaccountNumber': self.subaccount_id, + 'marketType': 'PERPETUAL', + 'market': order.trading_pair, + 'limit': CONSTANTS.LAST_FILLS_MAX, + } + + res = await self._api_get( + path_url=CONSTANTS.PATH_FILLS, + params=body_params, + ) + return res + + async def _request_order_status(self, tracked_order: InFlightOrder, exchange_order_id=None) -> OrderUpdate: + try: + orders_rsp = await self._api_get( + path_url=CONSTANTS.PATH_ORDERS, + limit_id=CONSTANTS.PATH_ORDERS, + params={ + 'address': self._dydx_v4_perpetual_chain_address, + 'subaccountNumber': self.subaccount_id, + 'goodTilBlockBeforeOrAt': CONSTANTS.TX_MAX_HEIGHT, + 'limit': CONSTANTS.LAST_FILLS_MAX, + } + ) + if exchange_order_id: + updated_order_data = next( + (order for order in orders_rsp if + order["id"] == exchange_order_id), None + ) + if updated_order_data is None: + return None + tracked_order = self._order_tracker.all_updatable_orders.get(str(updated_order_data["clientId"])) + else: + updated_order_data = next( + (order for order in orders_rsp if + int(order["clientId"]) == int(tracked_order.client_order_id)), None + ) + + if updated_order_data is None: + # If the order is not found in the response, return an OrderUpdate with the same status as before + return OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=tracked_order.exchange_order_id, + trading_pair=tracked_order.trading_pair, + update_timestamp=self._time_synchronizer.time(), + new_state=tracked_order.current_state, + ) + client_order_id = str(updated_order_data["clientId"]) + + order_update: OrderUpdate = OrderUpdate( + trading_pair=tracked_order.trading_pair, + update_timestamp=self.current_timestamp, + new_state=CONSTANTS.ORDER_STATE[updated_order_data["status"]], + client_order_id=client_order_id, + exchange_order_id=updated_order_data["id"], + ) + except IOError as ex: + if self._is_request_exception_related_to_time_synchronizer(request_exception=ex): + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + trading_pair=tracked_order.trading_pair, + update_timestamp=self.current_timestamp, + new_state=tracked_order.current_state, + ) + else: + raise + return order_update + + def _create_web_assistants_factory(self) -> WebAssistantsFactory: + return web_utils.build_api_factory( + throttler=self._throttler, + ) + + def _create_tx_client(self) -> DydxPerpetualV4Client: + return DydxPerpetualV4Client( + self._dydx_v4_perpetual_secret_phrase, + self._dydx_v4_perpetual_chain_address, + connector=self + ) + + def _create_order_book_data_source(self) -> DydxV4PerpetualAPIOrderBookDataSource: + return DydxV4PerpetualAPIOrderBookDataSource( + self.trading_pairs, + connector=self, + api_factory=self._web_assistants_factory, + domain=self._domain, + ) + + def _create_user_stream_data_source(self) -> UserStreamTrackerDataSource: + return DydxV4PerpetualUserStreamDataSource(api_factory=self._web_assistants_factory, + connector=self) + + def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: Dict[str, Any]): + markets = exchange_info["markets"] + + mapping = bidict() + for key, val in markets.items(): + if web_utils.is_exchange_information_valid(val): + exchange_symbol = val["ticker"] + base = exchange_symbol.split("-")[0] + quote = CONSTANTS.CURRENCY + trading_pair = combine_to_hb_trading_pair(base, quote) + if trading_pair in mapping.inverse: + self._resolve_trading_pair_symbols_duplicate(mapping, exchange_symbol, base, quote) + else: + mapping[exchange_symbol] = trading_pair + + self._set_trading_pair_symbol_map(mapping) + + def _resolve_trading_pair_symbols_duplicate(self, mapping: bidict, new_exchange_symbol: str, base: str, quote: str): + """Resolves name conflicts provoked by futures contracts. + If the expected BASEQUOTE combination matches one of the exchange symbols, it is the one taken, otherwise, + the trading pair is removed from the map and an error is logged. + """ + expected_exchange_symbol = f"{base}{quote}" + trading_pair = combine_to_hb_trading_pair(base, quote) + current_exchange_symbol = mapping.inverse[trading_pair] + if current_exchange_symbol == expected_exchange_symbol: + pass + elif new_exchange_symbol == expected_exchange_symbol: + mapping.pop(current_exchange_symbol) + mapping[new_exchange_symbol] = trading_pair + else: + self.logger().error( + f"Could not resolve the exchange symbols {new_exchange_symbol} and {current_exchange_symbol}" + ) + mapping.pop(current_exchange_symbol) + + async def _get_last_traded_price(self, trading_pair: str) -> float: + exchange_symbol = await self.exchange_symbol_associated_to_pair(trading_pair) + params = {} + + response: Dict[str, Dict[str, Any]] = await self._api_get( + path_url=CONSTANTS.PATH_MARKETS, params=params, is_auth_required=False + ) + price = float(response["markets"][exchange_symbol]["oraclePrice"]) + return price + + def supported_position_modes(self) -> List[PositionMode]: + return [PositionMode.ONEWAY] + + def get_buy_collateral_token(self, trading_pair: str) -> str: + trading_rule: TradingRule = self._trading_rules[trading_pair] + return trading_rule.buy_order_collateral_token + + def get_sell_collateral_token(self, trading_pair: str) -> str: + trading_rule: TradingRule = self._trading_rules[trading_pair] + return trading_rule.sell_order_collateral_token + + async def _update_positions(self): + params = {} + path = f"{CONSTANTS.PATH_SUBACCOUNT}/{self._dydx_v4_perpetual_chain_address}/subaccountNumber/{self.subaccount_id}" + response: Dict[str, Dict[str, Any]] = await self._api_get( + path_url=path, params=params, limit_id=CONSTANTS.PATH_SUBACCOUNT + ) + + # account = await self._get_account() + await self._process_open_positions(response["subaccount"]["openPerpetualPositions"]) + + async def _trading_pair_position_mode_set(self, mode: PositionMode, trading_pair: str) -> Tuple[bool, str]: + """ + :return: A tuple of boolean (true if success) and error message if the exchange returns one on failure. + """ + if mode != PositionMode.ONEWAY: + self.trigger_event( + AccountEvent.PositionModeChangeFailed, + PositionModeChangeEvent( + self.current_timestamp, trading_pair, mode, "dydx_v4 only supports the ONEWAY position mode." + ), + ) + self.logger().debug( + f"dydx_v4 encountered a problem switching position mode to " + f"{mode} for {trading_pair}" + f" (dydx_v4 only supports the ONEWAY position mode)" + ) + else: + self._position_mode = PositionMode.ONEWAY + super().set_position_mode(PositionMode.ONEWAY) + self.trigger_event( + AccountEvent.PositionModeChangeSucceeded, + PositionModeChangeEvent(self.current_timestamp, trading_pair, mode), + ) + self.logger().debug(f"dydx_v4 switching position mode to " f"{mode} for {trading_pair} succeeded.") + + async def _set_trading_pair_leverage(self, trading_pair: str, leverage: int) -> Tuple[bool, str]: + success = True + msg = "" + + response: Dict[str, Dict[str, Any]] = await self._api_get( + path_url=CONSTANTS.PATH_MARKETS, + is_auth_required=False, + ) + + if "markets" not in response: + msg = "Failed to obtain markets information." + success = False + + if success: + markets_info = response["markets"][trading_pair] + self._margin_fractions[trading_pair] = { + "initial": Decimal(markets_info["initialMarginFraction"]), + "maintenance": Decimal(markets_info["maintenanceMarginFraction"]), + "clob_pair_id": markets_info["clobPairId"], + "atomicResolution": markets_info["atomicResolution"], + "stepBaseQuantums": markets_info["stepBaseQuantums"], + "quantumConversionExponent": markets_info["quantumConversionExponent"], + "subticksPerTick": markets_info["subticksPerTick"], + } + + max_leverage = int(Decimal("1") / self._margin_fractions[trading_pair]["initial"]) + if leverage > max_leverage: + self._perpetual_trading.set_leverage(trading_pair=trading_pair, leverage=max_leverage) + self.logger().warning(f"Exceeded max leverage allowed." + f" Leverage for {trading_pair} has been reduced to {max_leverage}") + else: + self._perpetual_trading.set_leverage(trading_pair=trading_pair, leverage=leverage) + self.logger().info(f"Leverage for {trading_pair} successfully set to {leverage}.") + return success, msg + + async def _execute_set_leverage(self, trading_pair: str, leverage: int): + try: + success, msg = await self._set_trading_pair_leverage(trading_pair, leverage) + if not success: + self.logger().network(f"Error setting leverage {leverage} for {trading_pair}: {msg}") + except Exception: + self.logger().network(f"Error setting leverage {leverage} for {trading_pair}") + + async def _fetch_last_fee_payment(self, trading_pair: str) -> Tuple[int, Decimal, Decimal]: + pass + + async def _update_funding_payment(self, trading_pair: str, fire_event_on_new: bool) -> bool: + return True diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_user_stream_data_source.py b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_user_stream_data_source.py new file mode 100644 index 0000000000..a77d4ce679 --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_user_stream_data_source.py @@ -0,0 +1,61 @@ +import asyncio +import logging +from typing import Any, Dict, Optional + +import hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_constants as CONSTANTS +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.logger import HummingbotLogger + + +class DydxV4PerpetualUserStreamDataSource(UserStreamTrackerDataSource): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, api_factory: Optional[WebAssistantsFactory], connector): + self._api_factory: WebAssistantsFactory = api_factory + self._ws_assistant: Optional[WSAssistant] = None + self._connector = connector + + super().__init__() + + @property + def last_recv_time(self): + if self._ws_assistant: + return self._ws_assistant.last_recv_time + return -1 + + async def _subscribe_channels(self, websocket_assistant: WSAssistant): + pass + + # ping的回调还没写,不然会断掉 + async def _connected_websocket_assistant(self) -> WSAssistant: + if self._ws_assistant is None: + self.logger().info(f"Connecting to {CONSTANTS.DYDX_V4_WS_URL}") + self._ws_assistant = await self._api_factory.get_ws_assistant() + await self._ws_assistant.connect(ws_url=CONSTANTS.DYDX_V4_WS_URL, ping_timeout=CONSTANTS.HEARTBEAT_INTERVAL) + + subaccount_id = f"{self._connector._dydx_v4_perpetual_chain_address}/{self._connector.subaccount_id}" + + subscribe_account_request: WSJSONRequest = WSJSONRequest( + payload={ + "type": "subscribe", + "channel": CONSTANTS.WS_CHANNEL_ACCOUNTS, + "id": subaccount_id, + }, + is_auth_required=False, + ) + await self._ws_assistant.send(subscribe_account_request) + self.logger().info("Authenticated user stream...") + return self._ws_assistant + + async def _process_event_message(self, event_message: Dict[str, Any], queue: asyncio.Queue): + if event_message.get("type", "") in [CONSTANTS.WS_TYPE_SUBSCRIBED, CONSTANTS.WS_TYPE_CHANNEL_DATA]: + await super()._process_event_message(event_message=event_message, queue=queue) diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_utils.py b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_utils.py new file mode 100644 index 0000000000..178e8094fb --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_utils.py @@ -0,0 +1,47 @@ +from decimal import Decimal + +from pydantic import Field, SecretStr + +from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData +from hummingbot.core.data_type.trade_fee import TradeFeeSchema + +CENTRALIZED = True + +EXAMPLE_PAIR = "BTC-USD" + +DEFAULT_FEES = TradeFeeSchema( + maker_percent_fee_decimal=Decimal("0.0001"), + taker_percent_fee_decimal=Decimal("0.0005"), +) + + +def clamp(value, minvalue, maxvalue): + return max(minvalue, min(value, maxvalue)) + + +class DydxV4PerpetualConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="dydx_v4_perpetual", client_data=None) + dydx_v4_perpetual_secret_phrase: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your dydx v4 secret_phrase(24 words)", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + dydx_v4_perpetual_chain_address: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your dydx_v4 chain address ( starts with 'dydx' )", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + + class Config: + title = "dydx_v4_perpetual" + + +KEYS = DydxV4PerpetualConfigMap.construct() diff --git a/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_web_utils.py b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_web_utils.py new file mode 100644 index 0000000000..4cc8ac0311 --- /dev/null +++ b/hummingbot/connector/derivative/dydx_v4_perpetual/dydx_v4_perpetual_web_utils.py @@ -0,0 +1,90 @@ +from typing import Any, Dict + +import hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_constants as CONSTANTS +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest +from hummingbot.core.web_assistant.rest_pre_processors import RESTPreProcessorBase +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + + +class DydxV4PerpetualRESTPreProcessor(RESTPreProcessorBase): + + async def pre_process(self, request: RESTRequest) -> RESTRequest: + if request.headers is None: + request.headers = {} + request.headers["Accept"] = ( + "application/json" + ) + return request + + +def public_rest_url(path_url: str, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> str: + """ + Creates a full URL for provided public REST endpoint + :param path_url: a public REST endpoint + :param domain: the dydx_v4 domain to connect to ("exchange" or "us"). The default value is "exchange" + :return: the full URL to the endpoint + """ + return CONSTANTS.DYDX_V4_REST_URL + path_url + + +def private_rest_url(path_url: str, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> str: + """ + Creates a full URL for provided private REST endpoint + :param path_url: a private REST endpoint + :param domain: the dYdX domain to connect to ("exchange" or "us"). The default value is "exchange" + :return: the full URL to the endpoint + """ + return CONSTANTS.DYDX_V4_REST_URL + path_url + + +def build_api_factory( + throttler: AsyncThrottler = None, +) -> WebAssistantsFactory: + throttler = throttler or create_throttler() + api_factory = WebAssistantsFactory( + throttler=throttler, + rest_pre_processors=[ + DydxV4PerpetualRESTPreProcessor(), + ], + ) + return api_factory + + +def create_throttler() -> AsyncThrottler: + return AsyncThrottler(CONSTANTS.RATE_LIMITS) + + +async def get_current_server_time(throttler: AsyncThrottler, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> float: + api_factory = build_api_factory_without_time_synchronizer_pre_processor(throttler=throttler) + rest_assistant = await api_factory.get_rest_assistant() + url = public_rest_url(CONSTANTS.PATH_TIME) + limit_id = CONSTANTS.LIMIT_ID_GET + response = await rest_assistant.execute_request( + url=url, + throttler_limit_id=limit_id, + method=RESTMethod.GET, + ) + server_time = float(response["epoch"]) + + return server_time + + +def build_api_factory_without_time_synchronizer_pre_processor(throttler: AsyncThrottler) -> WebAssistantsFactory: + api_factory = WebAssistantsFactory(throttler=throttler) + return api_factory + + +def is_exchange_information_valid(rule: Dict[str, Any]) -> bool: + """ + Verifies if a trading pair is enabled to operate with based on its exchange information + + :param exchange_info: the exchange information for a trading pair + + :return: True if the trading pair is enabled, False otherwise + """ + if rule["status"] == "ACTIVE": + valid = True + else: + valid = False + return valid diff --git a/test/connector/__init__.py b/hummingbot/connector/derivative/hashkey_perpetual/__init__.py similarity index 100% rename from test/connector/__init__.py rename to hummingbot/connector/derivative/hashkey_perpetual/__init__.py diff --git a/hummingbot/connector/derivative/hashkey_perpetual/dummy.pxd b/hummingbot/connector/derivative/hashkey_perpetual/dummy.pxd new file mode 100644 index 0000000000..4b098d6f59 --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/dummy.pxd @@ -0,0 +1,2 @@ +cdef class dummy(): + pass diff --git a/hummingbot/connector/derivative/hashkey_perpetual/dummy.pyx b/hummingbot/connector/derivative/hashkey_perpetual/dummy.pyx new file mode 100644 index 0000000000..4b098d6f59 --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/dummy.pyx @@ -0,0 +1,2 @@ +cdef class dummy(): + pass diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_api_order_book_data_source.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_api_order_book_data_source.py new file mode 100644 index 0000000000..5c955baae5 --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_api_order_book_data_source.py @@ -0,0 +1,338 @@ +import asyncio +import time +from collections import defaultdict +from decimal import Decimal +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional + +from hummingbot.connector.derivative.hashkey_perpetual import ( + hashkey_perpetual_constants as CONSTANTS, + hashkey_perpetual_web_utils as web_utils, +) +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_order_book import HashkeyPerpetualsOrderBook +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.data_type.funding_info import FundingInfo, FundingInfoUpdate +from hummingbot.core.data_type.order_book_message import OrderBookMessage +from hummingbot.core.data_type.perpetual_api_order_book_data_source import PerpetualAPIOrderBookDataSource +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.logger import HummingbotLogger + +if TYPE_CHECKING: + from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_derivative import ( + HashkeyPerpetualDerivative, + ) + + +class HashkeyPerpetualAPIOrderBookDataSource(PerpetualAPIOrderBookDataSource): + HEARTBEAT_TIME_INTERVAL = 30.0 + ONE_HOUR = 60 * 60 + FIVE_MINUTE = 60 * 5 + EXCEPTION_INTERVAL = 5 + + _logger: Optional[HummingbotLogger] = None + _trading_pair_symbol_map: Dict[str, Mapping[str, str]] = {} + _mapping_initialization_lock = asyncio.Lock() + + def __init__(self, + trading_pairs: List[str], + connector: 'HashkeyPerpetualDerivative', + api_factory: Optional[WebAssistantsFactory] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + throttler: Optional[AsyncThrottler] = None, + time_synchronizer: Optional[TimeSynchronizer] = None): + super().__init__(trading_pairs) + self._connector = connector + self._domain = domain + self._snapshot_messages_queue_key = CONSTANTS.SNAPSHOT_EVENT_TYPE + self._trade_messages_queue_key = CONSTANTS.TRADE_EVENT_TYPE + self._time_synchronizer = time_synchronizer + self._throttler = throttler + self._api_factory = api_factory or web_utils.build_api_factory( + throttler=self._throttler, + time_synchronizer=self._time_synchronizer, + domain=self._domain, + ) + self._message_queue: Dict[str, asyncio.Queue] = defaultdict(asyncio.Queue) + self._last_ws_message_sent_timestamp = 0 + + async def get_last_traded_prices(self, + trading_pairs: List[str], + domain: Optional[str] = None) -> Dict[str, float]: + return await self._connector.get_last_traded_prices(trading_pairs=trading_pairs) + + async def _request_order_book_snapshot(self, trading_pair: str) -> Dict[str, Any]: + """ + Retrieves a copy of the full order book from the exchange, for a particular trading pair. + + :param trading_pair: the trading pair for which the order book will be retrieved + + :return: the response from the exchange (JSON dictionary) + """ + params = { + "symbol": await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair), + "limit": "1000" + } + data = await self._connector._api_request(path_url=CONSTANTS.SNAPSHOT_PATH_URL, + method=RESTMethod.GET, + params=params) + return data + + async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: + snapshot: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair) + snapshot_timestamp: float = float(snapshot["t"]) * 1e-3 + snapshot_msg: OrderBookMessage = HashkeyPerpetualsOrderBook.snapshot_message_from_exchange_rest( + snapshot, + snapshot_timestamp, + metadata={"trading_pair": trading_pair} + ) + return snapshot_msg + + async def _parse_trade_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol(symbol=raw_message["symbol"]) + for trades in raw_message["data"]: + trades["q"] = self._connector.get_amount_of_contracts(trading_pair, int(trades["q"])) + trade_message: OrderBookMessage = HashkeyPerpetualsOrderBook.trade_message_from_exchange( + trades, {"trading_pair": trading_pair}) + message_queue.put_nowait(trade_message) + + async def _parse_funding_info_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + # Hashkey not support funding info in websocket + pass + + async def listen_for_order_book_snapshots(self, ev_loop: asyncio.AbstractEventLoop, output: asyncio.Queue): + """ + This method runs continuously and request the full order book content from the exchange every hour. + The method uses the REST API from the exchange because it does not provide an endpoint to get the full order + book through websocket. With the information creates a snapshot messages that is added to the output queue + :param ev_loop: the event loop the method will run in + :param output: a queue to add the created snapshot messages + """ + while True: + try: + await asyncio.wait_for(self._process_ob_snapshot(snapshot_queue=output), timeout=self.ONE_HOUR) + except asyncio.TimeoutError: + await self._take_full_order_book_snapshot(trading_pairs=self._trading_pairs, snapshot_queue=output) + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error.", exc_info=True) + await self._take_full_order_book_snapshot(trading_pairs=self._trading_pairs, snapshot_queue=output) + await self._sleep(self.EXCEPTION_INTERVAL) + + async def listen_for_funding_info(self, output: asyncio.Queue): + """ + Reads the funding info events queue and updates the local funding info information. + """ + while True: + try: + # hashkey global not support funding rate event + await self._update_funding_info_by_api(self._trading_pairs, message_queue=output) + await self._sleep(self.FIVE_MINUTE) + except Exception as e: + self.logger().exception(f"Unexpected error when processing public funding info updates from exchange: {e}") + await self._sleep(self.EXCEPTION_INTERVAL) + + async def listen_for_subscriptions(self): + """ + Connects to the trade events and order diffs websocket endpoints and listens to the messages sent by the + exchange. Each message is stored in its own queue. + """ + ws = None + while True: + try: + ws: WSAssistant = await self._api_factory.get_ws_assistant() + await ws.connect(ws_url=CONSTANTS.WSS_PUBLIC_URL[self._domain]) + await self._subscribe_channels(ws) + self._last_ws_message_sent_timestamp = self._time() + + while True: + try: + seconds_until_next_ping = (CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL - ( + self._time() - self._last_ws_message_sent_timestamp)) + await asyncio.wait_for(self._process_ws_messages(ws=ws), timeout=seconds_until_next_ping) + except asyncio.TimeoutError: + ping_time = self._time() + payload = { + "ping": int(ping_time * 1e3) + } + ping_request = WSJSONRequest(payload=payload) + await ws.send(request=ping_request) + self._last_ws_message_sent_timestamp = ping_time + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred when listening to order book streams. Retrying in 5 seconds...", + exc_info=True, + ) + await self._sleep(self.EXCEPTION_INTERVAL) + finally: + ws and await ws.disconnect() + + async def _subscribe_channels(self, ws: WSAssistant): + """ + Subscribes to the trade events and diff orders events through the provided websocket connection. + :param ws: the websocket assistant used to connect to the exchange + """ + try: + for trading_pair in self._trading_pairs: + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + trade_payload = { + "topic": "trade", + "event": "sub", + "symbol": symbol, + "params": { + "binary": False + } + } + subscribe_trade_request: WSJSONRequest = WSJSONRequest(payload=trade_payload) + + depth_payload = { + "topic": "depth", + "event": "sub", + "symbol": symbol, + "params": { + "binary": False + } + } + subscribe_orderbook_request: WSJSONRequest = WSJSONRequest(payload=depth_payload) + + await ws.send(subscribe_trade_request) + await ws.send(subscribe_orderbook_request) + + self.logger().info(f"Subscribed to public order book and trade channels of {trading_pair}...") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred subscribing to order book trading and delta streams...", + exc_info=True + ) + raise + + async def _process_ws_messages(self, ws: WSAssistant): + async for ws_response in ws.iter_messages(): + data = ws_response.data + if data.get("msg") == "Success": + continue + event_type = data.get("topic") + if event_type == CONSTANTS.SNAPSHOT_EVENT_TYPE: + self._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE].put_nowait(data) + elif event_type == CONSTANTS.TRADE_EVENT_TYPE: + self._message_queue[CONSTANTS.TRADE_EVENT_TYPE].put_nowait(data) + + async def _process_ob_snapshot(self, snapshot_queue: asyncio.Queue): + message_queue = self._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] + while True: + try: + json_msg = await message_queue.get() + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol( + symbol=json_msg["symbol"]) + for snapshot_data in json_msg["data"]: + snapshot = self.convert_snapshot_amounts(snapshot_data, trading_pair) + order_book_message: OrderBookMessage = HashkeyPerpetualsOrderBook.snapshot_message_from_exchange_websocket( + snapshot, snapshot["t"], {"trading_pair": trading_pair}) + snapshot_queue.put_nowait(order_book_message) + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error when processing public order book updates from exchange") + raise + + def convert_snapshot_amounts(self, snapshot_data, trading_pair): + msg = {"a": [], "b": [], "t": snapshot_data["t"]} + for ask_order_book in snapshot_data["a"]: + msg["a"].append([ask_order_book[0], self._connector.get_amount_of_contracts(trading_pair, int(ask_order_book[1]))]) + for bid_order_book in snapshot_data["b"]: + msg["b"].append([bid_order_book[0], self._connector.get_amount_of_contracts(trading_pair, int(bid_order_book[1]))]) + + return msg + + async def _take_full_order_book_snapshot(self, trading_pairs: List[str], snapshot_queue: asyncio.Queue): + for trading_pair in trading_pairs: + try: + snapshot_data: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair=trading_pair) + snapshot = self.convert_snapshot_amounts(snapshot_data, trading_pair) + snapshot_timestamp: float = float(snapshot["t"]) * 1e-3 + snapshot_msg: OrderBookMessage = HashkeyPerpetualsOrderBook.snapshot_message_from_exchange_rest( + snapshot, + snapshot_timestamp, + metadata={"trading_pair": trading_pair} + ) + snapshot_queue.put_nowait(snapshot_msg) + self.logger().debug(f"Saved order book snapshot for {trading_pair}") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error(f"Unexpected error fetching order book snapshot for {trading_pair}.", + exc_info=True) + await self._sleep(self.EXCEPTION_INTERVAL) + + async def _update_funding_info_by_api(self, trading_pairs: list, message_queue: asyncio.Queue) -> None: + funding_rate_list = await self._request_funding_rate() + funding_infos = {item["symbol"]: item for item in funding_rate_list} + for trading_pair in trading_pairs: + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + index_symbol = await self._connector.exchange_index_symbol_associated_to_pair(trading_pair=trading_pair) + funding_rate_info = funding_infos[symbol] + mark_info, index_info = await asyncio.gather( + self._request_mark_price(symbol), + self._request_index_price(index_symbol), + ) + + funding_info = FundingInfoUpdate( + trading_pair=trading_pair, + index_price=Decimal(index_info["index"][index_symbol]), + mark_price=Decimal(mark_info["price"]), + next_funding_utc_timestamp=int(float(funding_rate_info["nextSettleTime"]) * 1e-3), + rate=Decimal(funding_rate_info["rate"]), + ) + + message_queue.put_nowait(funding_info) + + async def get_funding_info(self, trading_pair: str) -> FundingInfo: + funding_rate_info, mark_info, index_info = await self._request_complete_funding_info(trading_pair) + index_symbol = await self._connector.exchange_index_symbol_associated_to_pair(trading_pair=trading_pair) + funding_info = FundingInfo( + trading_pair=trading_pair, + index_price=Decimal(index_info["index"][index_symbol]), + mark_price=Decimal(mark_info["price"]), + next_funding_utc_timestamp=int(float(funding_rate_info["nextSettleTime"]) * 1e-3), + rate=Decimal(funding_rate_info["rate"]), + ) + return funding_info + + async def _request_complete_funding_info(self, trading_pair: str): + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + index_symbol = await self._connector.exchange_index_symbol_associated_to_pair(trading_pair=trading_pair) + + funding_rate_info, mark_info, index_info = await asyncio.gather( + self._request_funding_rate(symbol), + self._request_mark_price(symbol), + self._request_index_price(index_symbol), + ) + funding_rate_dict = {item["symbol"]: item for item in funding_rate_info} + return funding_rate_dict[symbol], mark_info, index_info + + async def _request_funding_rate(self, symbol: str = None): + params = {"timestamp": int(self._time_synchronizer.time() * 1e3)} + if symbol: + params["symbol"] = symbol, + return await self._connector._api_request(path_url=CONSTANTS.FUNDING_INFO_URL, + method=RESTMethod.GET, + params=params) + + async def _request_mark_price(self, symbol: str): + return await self._connector._api_request(path_url=CONSTANTS.MARK_PRICE_URL, + method=RESTMethod.GET, + params={"symbol": symbol}) + + async def _request_index_price(self, symbol: str): + return await self._connector._api_request(path_url=CONSTANTS.INDEX_PRICE_URL, + method=RESTMethod.GET, + params={"symbol": symbol}) + + def _time(self): + return time.time() diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_auth.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_auth.py new file mode 100644 index 0000000000..de7344f1a8 --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_auth.py @@ -0,0 +1,74 @@ +import hashlib +import hmac +from collections import OrderedDict +from typing import Any, Dict +from urllib.parse import urlencode + +import hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_constants as CONSTANTS +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.connections.data_types import RESTRequest, WSRequest + + +class HashkeyPerpetualAuth(AuthBase): + """ + Auth class required by Hashkey Perpetual API + """ + + def __init__(self, api_key: str, secret_key: str, time_provider: TimeSynchronizer): + self.api_key = api_key + self.secret_key = secret_key + self.time_provider = time_provider + + @staticmethod + def keysort(dictionary: Dict[str, str]) -> Dict[str, str]: + return OrderedDict(sorted(dictionary.items(), key=lambda t: t[0])) + + def _generate_signature(self, params: Dict[str, Any]) -> str: + encoded_params_str = urlencode(params) + digest = hmac.new(self.secret_key.encode("utf8"), encoded_params_str.encode("utf8"), hashlib.sha256).hexdigest() + return digest + + async def rest_authenticate(self, request: RESTRequest) -> RESTRequest: + """ + Adds the server time and the signature to the request, required for authenticated interactions. It also adds + the required parameter in the request header. + :param request: the request to be configured for authenticated interaction + """ + request.params = self.add_auth_to_params(params=request.params) + headers = { + "X-HK-APIKEY": self.api_key, + "INPUT-SOURCE": CONSTANTS.HBOT_BROKER_ID, + } + if request.headers is not None: + headers.update(request.headers) + request.headers = headers + return request + + async def ws_authenticate(self, request: WSRequest) -> WSRequest: + return request # pass-through + + def add_auth_to_params(self, + params: Dict[str, Any]): + timestamp = int(self.time_provider.time() * 1e3) + request_params = params or {} + request_params["timestamp"] = timestamp + request_params = self.keysort(request_params) + signature = self._generate_signature(params=request_params) + request_params["signature"] = signature + return request_params + + def generate_ws_authentication_message(self): + """ + Generates the authentication message to start receiving messages from + the 3 private ws channels + """ + expires = int((self.time_provider.time() + 10) * 1e3) + _val = f'GET/realtime{expires}' + signature = hmac.new(self.secret_key.encode("utf8"), + _val.encode("utf8"), hashlib.sha256).hexdigest() + auth_message = { + "op": "auth", + "args": [self.api_key, expires, signature] + } + return auth_message diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_constants.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_constants.py new file mode 100644 index 0000000000..10bf5eb10e --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_constants.py @@ -0,0 +1,128 @@ +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit +from hummingbot.core.data_type.in_flight_order import OrderState + +EXCHANGE_NAME = "hashkey_perpetual" +DEFAULT_DOMAIN = "hashkey_perpetual" +HBOT_BROKER_ID = "10000800001" +BROKER_ID = "HASHKEY-" +MAX_ORDER_ID_LEN = 32 + +TESTNET_DOMAIN = "hashkey_perpetual_testnet" + +PERPETUAL_BASE_URL = "https://api-glb.hashkey.com" +TESTNET_BASE_URL = "https://api-glb.sim.hashkeydev.com" + +WSS_PUBLIC_URL = {"hashkey_perpetual": "wss://stream-glb.hashkey.com/quote/ws/v1", + "hashkey_perpetual_testnet": "wss://stream.sim.bmuxdc.com/quote/ws/v1"} + +WSS_PRIVATE_URL = {"hashkey_perpetual": "wss://stream-glb.hashkey.com/api/v1/ws/{listenKey}", + "hashkey_perpetual_testnet": "wss://stream.sim.bmuxdc.com/api/v1/ws/{listenKey}"} + +# Websocket event types +TRADE_EVENT_TYPE = "trade" +SNAPSHOT_EVENT_TYPE = "depth" + +TIME_IN_FORCE_GTC = "GTC" # Good till cancelled +TIME_IN_FORCE_MAKER = "LIMIT_MAKER" # Maker +TIME_IN_FORCE_IOC = "IOC" # Immediate or cancel +TIME_IN_FORCE_FOK = "FOK" # Fill or kill + +# Public API Endpoints +SNAPSHOT_PATH_URL = "/quote/v1/depth" +TICKER_PRICE_URL = "/quote/v1/ticker/price" +TICKER_PRICE_CHANGE_URL = "/quote/v1/ticker/24hr" +EXCHANGE_INFO_URL = "/api/v1/exchangeInfo" +RECENT_TRADES_URL = "/quote/v1/trades" +PING_URL = "/api/v1/ping" +SERVER_TIME_PATH_URL = "/api/v1/time" + +# Public funding info +FUNDING_INFO_URL = "/api/v1/futures/fundingRate" +MARK_PRICE_URL = "/quote/v1/markPrice" +INDEX_PRICE_URL = "/quote/v1/index" + +# Private API Endpoints +ACCOUNT_INFO_URL = "/api/v1/futures/balance" +POSITION_INFORMATION_URL = "/api/v1/futures/positions" +ORDER_URL = "/api/v1/futures/order" +CANCEL_ALL_OPEN_ORDERS_URL = "/api/v1/futures/batchOrders" +ACCOUNT_TRADE_LIST_URL = "/api/v1/futures/userTrades" +SET_LEVERAGE_URL = "/api/v1/futures/leverage" +USER_STREAM_PATH_URL = "/api/v1/userDataStream" + +# Funding Settlement Time Span +FUNDING_SETTLEMENT_DURATION = (0, 30) # seconds before snapshot, seconds after snapshot + +# Order States +ORDER_STATE = { + "PENDING": OrderState.PENDING_CREATE, + "NEW": OrderState.OPEN, + "PARTIALLY_FILLED": OrderState.PARTIALLY_FILLED, + "FILLED": OrderState.FILLED, + "PENDING_CANCEL": OrderState.PENDING_CANCEL, + "CANCELED": OrderState.CANCELED, + "REJECTED": OrderState.FAILED, + "PARTIALLY_CANCELED": OrderState.CANCELED, +} + +# Rate Limit Type +REQUEST_WEIGHT = "REQUEST_WEIGHT" +ORDERS_1MIN = "ORDERS_1MIN" +ORDERS_1SEC = "ORDERS_1SEC" + +WS_HEARTBEAT_TIME_INTERVAL = 30.0 + +# Rate Limit time intervals +ONE_HOUR = 3600 +ONE_MINUTE = 60 +ONE_SECOND = 1 +ONE_DAY = 86400 + +MAX_REQUEST = 2400 + +RATE_LIMITS = [ + # Pool Limits + RateLimit(limit_id=REQUEST_WEIGHT, limit=2400, time_interval=ONE_MINUTE), + RateLimit(limit_id=ORDERS_1MIN, limit=1200, time_interval=ONE_MINUTE), + RateLimit(limit_id=ORDERS_1SEC, limit=300, time_interval=10), + # Weight Limits for individual endpoints + RateLimit(limit_id=SNAPSHOT_PATH_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=20)]), + RateLimit(limit_id=TICKER_PRICE_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=2)]), + RateLimit(limit_id=TICKER_PRICE_CHANGE_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=EXCHANGE_INFO_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=40)]), + RateLimit(limit_id=RECENT_TRADES_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=USER_STREAM_PATH_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=PING_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=SERVER_TIME_PATH_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=ORDER_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1), + LinkedLimitWeightPair(ORDERS_1MIN, weight=1), + LinkedLimitWeightPair(ORDERS_1SEC, weight=1)]), + RateLimit(limit_id=CANCEL_ALL_OPEN_ORDERS_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=ACCOUNT_TRADE_LIST_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=5)]), + RateLimit(limit_id=SET_LEVERAGE_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=ACCOUNT_INFO_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=5)]), + RateLimit(limit_id=POSITION_INFORMATION_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, weight=5, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=5)]), + RateLimit(limit_id=MARK_PRICE_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, weight=1, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=INDEX_PRICE_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, weight=1, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), + RateLimit(limit_id=FUNDING_INFO_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, weight=1, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=1)]), +] + +ORDER_NOT_EXIST_ERROR_CODE = -1143 +ORDER_NOT_EXIST_MESSAGE = "Order not found" diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_derivative.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_derivative.py new file mode 100644 index 0000000000..2cfbd08723 --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_derivative.py @@ -0,0 +1,844 @@ +import asyncio +import time +from collections import defaultdict +from decimal import Decimal +from typing import TYPE_CHECKING, Any, AsyncIterable, Dict, List, Optional, Tuple + +import pandas as pd +from bidict import bidict + +from hummingbot.connector.constants import s_decimal_0, s_decimal_NaN +from hummingbot.connector.derivative.hashkey_perpetual import ( + hashkey_perpetual_constants as CONSTANTS, + hashkey_perpetual_utils as hashkey_utils, + hashkey_perpetual_web_utils as web_utils, +) +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_api_order_book_data_source import ( + HashkeyPerpetualAPIOrderBookDataSource, +) +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_auth import HashkeyPerpetualAuth +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_user_stream_data_source import ( + HashkeyPerpetualUserStreamDataSource, +) +from hummingbot.connector.derivative.position import Position +from hummingbot.connector.perpetual_derivative_py_base import PerpetualDerivativePyBase +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair +from hummingbot.core.api_throttler.data_types import RateLimit +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, PositionSide, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderUpdate, TradeUpdate +from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource +from hummingbot.core.data_type.trade_fee import TokenAmount, TradeFeeBase +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.utils.async_utils import safe_gather +from hummingbot.core.utils.estimate_fee import build_perpetual_trade_fee +from hummingbot.core.web_assistant.connections.data_types import RESTMethod +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + +if TYPE_CHECKING: + from hummingbot.client.config.config_helpers import ClientConfigAdapter + +bpm_logger = None + + +class HashkeyPerpetualDerivative(PerpetualDerivativePyBase): + web_utils = web_utils + SHORT_POLL_INTERVAL = 5.0 + UPDATE_ORDER_STATUS_MIN_INTERVAL = 10.0 + LONG_POLL_INTERVAL = 120.0 + + def __init__( + self, + client_config_map: "ClientConfigAdapter", + hashkey_perpetual_api_key: str = None, + hashkey_perpetual_secret_key: str = None, + trading_pairs: Optional[List[str]] = None, + trading_required: bool = True, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + ): + self.hashkey_perpetual_api_key = hashkey_perpetual_api_key + self.hashkey_perpetual_secret_key = hashkey_perpetual_secret_key + self._trading_required = trading_required + self._trading_pairs = trading_pairs + self._domain = domain + self._position_mode = PositionMode.HEDGE + self._last_trade_history_timestamp = None + super().__init__(client_config_map) + self._perpetual_trading.set_position_mode(PositionMode.HEDGE) + + @property + def name(self) -> str: + return CONSTANTS.EXCHANGE_NAME + + @property + def authenticator(self) -> HashkeyPerpetualAuth: + return HashkeyPerpetualAuth(self.hashkey_perpetual_api_key, self.hashkey_perpetual_secret_key, + self._time_synchronizer) + + @property + def rate_limits_rules(self) -> List[RateLimit]: + return CONSTANTS.RATE_LIMITS + + @property + def domain(self) -> str: + return self._domain + + @property + def client_order_id_max_length(self) -> int: + return CONSTANTS.MAX_ORDER_ID_LEN + + @property + def client_order_id_prefix(self) -> str: + return CONSTANTS.BROKER_ID + + @property + def trading_rules_request_path(self) -> str: + return CONSTANTS.EXCHANGE_INFO_URL + + @property + def trading_pairs_request_path(self) -> str: + return CONSTANTS.EXCHANGE_INFO_URL + + @property + def check_network_request_path(self) -> str: + return CONSTANTS.PING_URL + + @property + def trading_pairs(self): + return self._trading_pairs + + @property + def is_cancel_request_in_exchange_synchronous(self) -> bool: + return True + + @property + def is_trading_required(self) -> bool: + return self._trading_required + + @property + def funding_fee_poll_interval(self) -> int: + return 600 + + def supported_order_types(self) -> List[OrderType]: + """ + :return a list of OrderType supported by this connector + """ + return [OrderType.LIMIT, OrderType.MARKET, OrderType.LIMIT_MAKER] + + def supported_position_modes(self): + """ + This method needs to be overridden to provide the accurate information depending on the exchange. + """ + return [PositionMode.HEDGE] + + def get_buy_collateral_token(self, trading_pair: str) -> str: + trading_rule: TradingRule = self._trading_rules[trading_pair] + return trading_rule.buy_order_collateral_token + + def get_sell_collateral_token(self, trading_pair: str) -> str: + trading_rule: TradingRule = self._trading_rules[trading_pair] + return trading_rule.sell_order_collateral_token + + def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception): + error_description = str(request_exception) + is_time_synchronizer_related = ("-1021" in error_description + and "Timestamp for this request" in error_description) + return is_time_synchronizer_related + + def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool: + return str(CONSTANTS.ORDER_NOT_EXIST_ERROR_CODE) in str( + status_update_exception + ) and CONSTANTS.ORDER_NOT_EXIST_MESSAGE in str(status_update_exception) + + def _is_order_not_found_during_cancelation_error(self, cancelation_exception: Exception) -> bool: + return False + + def _create_web_assistants_factory(self) -> WebAssistantsFactory: + return web_utils.build_api_factory( + throttler=self._throttler, + time_synchronizer=self._time_synchronizer, + domain=self._domain, + auth=self._auth) + + def _create_order_book_data_source(self) -> OrderBookTrackerDataSource: + return HashkeyPerpetualAPIOrderBookDataSource( + trading_pairs=self._trading_pairs, + connector=self, + domain=self.domain, + api_factory=self._web_assistants_factory, + throttler=self._throttler, + time_synchronizer=self._time_synchronizer) + + def _create_user_stream_data_source(self) -> UserStreamTrackerDataSource: + return HashkeyPerpetualUserStreamDataSource( + auth=self._auth, + trading_pairs=self._trading_pairs, + connector=self, + api_factory=self._web_assistants_factory, + domain=self.domain, + ) + + def _get_fee(self, + base_currency: str, + quote_currency: str, + order_type: OrderType, + order_side: TradeType, + position_action: PositionAction, + amount: Decimal, + price: Decimal = s_decimal_NaN, + is_maker: Optional[bool] = None) -> TradeFeeBase: + is_maker = is_maker or False + fee = build_perpetual_trade_fee( + self.name, + is_maker, + position_action=position_action, + base_currency=base_currency, + quote_currency=quote_currency, + order_type=order_type, + order_side=order_side, + amount=amount, + price=price, + ) + return fee + + async def _update_trading_fees(self): + """ + Update fees information from the exchange + """ + pass + + async def _status_polling_loop_fetch_updates(self): + await safe_gather( + self._update_order_fills_from_trades(), + self._update_order_status(), + self._update_balances(), + self._update_positions(), + ) + + async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder): + api_params = {"type": "LIMIT"} + if tracked_order.exchange_order_id: + api_params["orderId"] = tracked_order.exchange_order_id + else: + api_params["clientOrderId"] = tracked_order.client_order_id + cancel_result = await self._api_delete( + path_url=CONSTANTS.ORDER_URL, + params=api_params, + is_auth_required=True) + if cancel_result.get("code") == -2011 and "Unknown order sent." == cancel_result.get("msg", ""): + self.logger().debug(f"The order {order_id} does not exist on Hashkey Perpetuals. " + f"No cancelation needed.") + await self._order_tracker.process_order_not_found(order_id) + raise IOError(f"{cancel_result.get('code')} - {cancel_result['msg']}") + if cancel_result.get("status") == "CANCELED": + return True + return False + + async def _place_order( + self, + order_id: str, + trading_pair: str, + amount: Decimal, + trade_type: TradeType, + order_type: OrderType, + price: Decimal, + position_action: PositionAction = PositionAction.NIL, + **kwargs, + ) -> Tuple[str, float]: + + price_str = f"{price:f}" + symbol = await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + side = f"BUY_{position_action.value}" if trade_type is TradeType.BUY else f"SELL_{position_action.value}" + api_params = {"symbol": symbol, + "side": side, + "quantity": self.get_quantity_of_contracts(trading_pair, amount), + "type": "LIMIT", + "priceType": "MARKET" if order_type is OrderType.MARKET else "INPUT", + "clientOrderId": order_id + } + if order_type.is_limit_type(): + api_params["price"] = price_str + if order_type == OrderType.LIMIT: + api_params["timeInForce"] = CONSTANTS.TIME_IN_FORCE_GTC + if order_type == OrderType.LIMIT_MAKER: + api_params["timeInForce"] = CONSTANTS.TIME_IN_FORCE_MAKER + try: + order_result = await self._api_post( + path_url=CONSTANTS.ORDER_URL, + params=api_params, + is_auth_required=True) + o_id = str(order_result["orderId"]) + transact_time = int(order_result["time"]) * 1e-3 + except IOError as e: + error_description = str(e) + is_server_overloaded = ("status is 503" in error_description + and "Unknown error, please check your request or try again later." in error_description) + if is_server_overloaded: + o_id = "UNKNOWN" + transact_time = time.time() + else: + raise + return o_id, transact_time + + async def _all_trade_updates_for_order(self, order: InFlightOrder) -> List[TradeUpdate]: + trade_updates = [] + + if order.exchange_order_id is not None: + exchange_order_id = int(order.exchange_order_id) + fills_data = await self._api_get( + path_url=CONSTANTS.ACCOUNT_TRADE_LIST_URL, + params={ + "clientOrderId": order.client_order_id, + }, + is_auth_required=True, + limit_id=CONSTANTS.ACCOUNT_TRADE_LIST_URL) + if fills_data is not None: + for trade in fills_data: + exchange_order_id = str(trade["orderId"]) + if exchange_order_id != str(order.exchange_order_id): + continue + fee = TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=order.trade_type, + percent_token=trade["commissionAsset"], + flat_fees=[TokenAmount(amount=Decimal(trade["commission"]), token=trade["commissionAsset"])] + ) + amount = self.get_amount_of_contracts(order.trading_pair, int(trade["quantity"])) + trade_update = TradeUpdate( + trade_id=str(trade["tradeId"]), + client_order_id=order.client_order_id, + exchange_order_id=exchange_order_id, + trading_pair=order.trading_pair, + fee=fee, + fill_base_amount=Decimal(amount), + fill_quote_amount=Decimal(trade["price"]) * amount, + fill_price=Decimal(trade["price"]), + fill_timestamp=int(trade["time"]) * 1e-3, + ) + trade_updates.append(trade_update) + + return trade_updates + + async def _request_order_status(self, tracked_order: InFlightOrder) -> OrderUpdate: + updated_order_data = await self._api_get( + path_url=CONSTANTS.ORDER_URL, + params={ + "clientOrderId": tracked_order.client_order_id}, + is_auth_required=True) + + new_state = CONSTANTS.ORDER_STATE[updated_order_data["status"]] + + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=str(updated_order_data["orderId"]), + trading_pair=tracked_order.trading_pair, + update_timestamp=int(updated_order_data["updateTime"]) * 1e-3, + new_state=new_state, + ) + + return order_update + + async def _iter_user_event_queue(self) -> AsyncIterable[Dict[str, any]]: + while True: + try: + yield await self._user_stream_tracker.user_stream.get() + except asyncio.CancelledError: + raise + except Exception: + self.logger().network( + "Unknown error. Retrying after 1 seconds.", + exc_info=True, + app_warning_msg="Could not fetch user events from Hashkey. Check API key and network connection.", + ) + await self._sleep(1.0) + + async def _user_stream_event_listener(self): + """ + This functions runs in background continuously processing the events received from the exchange by the user + stream data source. It keeps reading events from the queue until the task is interrupted. + The events received are balance updates, order updates and trade events. + """ + async for event_messages in self._iter_user_event_queue(): + if isinstance(event_messages, dict) and "ping" in event_messages: + continue + + for event_message in event_messages: + try: + event_type = event_message.get("e") + if event_type == "contractExecutionReport": + execution_type = event_message.get("X") + client_order_id = event_message.get("c") + updatable_order = self._order_tracker.all_updatable_orders.get(client_order_id) + if updatable_order is not None: + if execution_type in ["PARTIALLY_FILLED", "FILLED"]: + fee = TradeFeeBase.new_perpetual_fee( + fee_schema=self.trade_fee_schema(), + position_action=PositionAction.CLOSE if event_message["C"] else PositionAction.OPEN, + percent_token=event_message["N"], + flat_fees=[TokenAmount(amount=Decimal(event_message["n"]), token=event_message["N"])] + ) + base_amount = Decimal(self.get_amount_of_contracts(updatable_order.trading_pair, int(event_message["l"]))) + trade_update = TradeUpdate( + trade_id=str(event_message["d"]), + client_order_id=client_order_id, + exchange_order_id=str(event_message["i"]), + trading_pair=updatable_order.trading_pair, + fee=fee, + fill_base_amount=base_amount, + fill_quote_amount=base_amount * Decimal(event_message["L"] or event_message["p"]), + fill_price=Decimal(event_message["L"]), + fill_timestamp=int(event_message["E"]) * 1e-3, + ) + self._order_tracker.process_trade_update(trade_update) + + order_update = OrderUpdate( + trading_pair=updatable_order.trading_pair, + update_timestamp=int(event_message["E"]) * 1e-3, + new_state=CONSTANTS.ORDER_STATE[event_message["X"]], + client_order_id=client_order_id, + exchange_order_id=str(event_message["i"]), + ) + self._order_tracker.process_order_update(order_update=order_update) + + elif event_type == "outboundContractAccountInfo": + balances = event_message["B"] + for balance_entry in balances: + asset_name = balance_entry["a"] + free_balance = Decimal(balance_entry["f"]) + total_balance = Decimal(balance_entry["f"]) + Decimal(balance_entry["l"]) + self._account_available_balances[asset_name] = free_balance + self._account_balances[asset_name] = total_balance + + elif event_type == "outboundContractPositionInfo": + ex_trading_pair = event_message["s"] + hb_trading_pair = await self.trading_pair_associated_to_exchange_symbol(ex_trading_pair) + position_side = PositionSide(event_message["S"]) + unrealized_pnl = Decimal(str(event_message["up"])) + entry_price = Decimal(str(event_message["p"])) + amount = Decimal(self.get_amount_of_contracts(hb_trading_pair, int(event_message["P"]))) + leverage = Decimal(event_message["v"]) + pos_key = self._perpetual_trading.position_key(hb_trading_pair, position_side) + if amount != s_decimal_0: + position = Position( + trading_pair=hb_trading_pair, + position_side=position_side, + unrealized_pnl=unrealized_pnl, + entry_price=entry_price, + amount=amount * (Decimal("-1.0") if position_side == PositionSide.SHORT else Decimal("1.0")), + leverage=leverage, + ) + self._perpetual_trading.set_position(pos_key, position) + else: + self._perpetual_trading.remove_position(pos_key) + + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error in user stream listener loop.", exc_info=True) + await self._sleep(5.0) + + async def _format_trading_rules(self, exchange_info_dict: Dict[str, Any]) -> List[TradingRule]: + """ + Example: + { + "timezone": "UTC", + "serverTime": "1703696385826", + "brokerFilters": [], + "symbols": [], + "options": [], + "contracts": [ + { + "filters": [ + { + "minPrice": "0.1", + "maxPrice": "100000.00000000", + "tickSize": "0.1", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.001", + "maxQty": "10", + "stepSize": "0.001", + "marketOrderMinQty": "0", + "marketOrderMaxQty": "0", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "0", + "filterType": "MIN_NOTIONAL" + }, + { + "maxSellPrice": "999999", + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "maxEntrustNum": 200, + "maxConditionNum": 200, + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ], + "exchangeId": "301", + "symbol": "BTCUSDT-PERPETUAL", + "symbolName": "BTCUSDT-PERPETUAL", + "status": "TRADING", + "baseAsset": "BTCUSDT-PERPETUAL", + "baseAssetPrecision": "0.001", + "quoteAsset": "USDT", + "quoteAssetPrecision": "0.1", + "icebergAllowed": false, + "inverse": false, + "index": "USDT", + "marginToken": "USDT", + "marginPrecision": "0.0001", + "contractMultiplier": "0.001", + "underlying": "BTC", + "riskLimits": [ + { + "riskLimitId": "200000722", + "quantity": "1000.00", + "initialMargin": "0.10", + "maintMargin": "0.005", + "isWhite": false + } + ] + } + ], + "coins": [] + } + """ + trading_pair_rules = exchange_info_dict.get("contracts", []) + retval = [] + for rule in trading_pair_rules: + try: + if not hashkey_utils.is_exchange_information_valid(rule): + continue + + trading_pair = f"{rule['underlying']}-{rule['quoteAsset']}" + + trading_filter_info = {item["filterType"]: item for item in rule.get("filters", [])} + + min_order_size = trading_filter_info.get("LOT_SIZE", {}).get("minQty") + min_price_increment = trading_filter_info.get("PRICE_FILTER", {}).get("minPrice") + min_base_amount_increment = rule.get("baseAssetPrecision") + min_notional_size = trading_filter_info.get("MIN_NOTIONAL", {}).get("minNotional") + + retval.append( + TradingRule(trading_pair, + min_order_size=Decimal(min_order_size), + min_price_increment=Decimal(min_price_increment), + min_base_amount_increment=Decimal(min_base_amount_increment), + min_notional_size=Decimal(min_notional_size))) + + except Exception: + self.logger().exception(f"Error parsing the trading pair rule {rule.get('symbol')}. Skipping.") + return retval + + def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: Dict[str, Any]): + mapping = bidict() + for symbol_data in filter(hashkey_utils.is_exchange_information_valid, exchange_info["contracts"]): + mapping[symbol_data["symbol"]] = combine_to_hb_trading_pair(base=symbol_data["underlying"], + quote=symbol_data["quoteAsset"]) + self._set_trading_pair_symbol_map(mapping) + + async def exchange_index_symbol_associated_to_pair(self, trading_pair: str): + symbol = await self.exchange_symbol_associated_to_pair(trading_pair) + return symbol[:-10] + + async def _get_last_traded_price(self, trading_pair: str) -> float: + params = { + "symbol": await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair), + } + resp_json = await self._api_request( + method=RESTMethod.GET, + path_url=CONSTANTS.TICKER_PRICE_URL, + params=params, + ) + + return float(resp_json[0]["p"]) + + async def _update_balances(self): + local_asset_names = set(self._account_balances.keys()) + remote_asset_names = set() + + balances = await self._api_request( + method=RESTMethod.GET, + path_url=CONSTANTS.ACCOUNT_INFO_URL, + is_auth_required=True) + + for balance_entry in balances: + asset_name = balance_entry["asset"] + total_balance = Decimal(balance_entry["balance"]) + free_balance = Decimal(balance_entry["availableBalance"]) + self._account_available_balances[asset_name] = free_balance + self._account_balances[asset_name] = total_balance + remote_asset_names.add(asset_name) + + asset_names_to_remove = local_asset_names.difference(remote_asset_names) + for asset_name in asset_names_to_remove: + del self._account_available_balances[asset_name] + del self._account_balances[asset_name] + + async def _update_positions(self): + position_tasks = [] + + for trading_pair in self._trading_pairs: + ex_trading_pair = await self.exchange_symbol_associated_to_pair(trading_pair) + body_params = {"symbol": ex_trading_pair} + position_tasks.append( + asyncio.create_task(self._api_get( + path_url=CONSTANTS.POSITION_INFORMATION_URL, + params=body_params, + is_auth_required=True, + trading_pair=trading_pair, + )) + ) + + raw_responses: List[Dict[str, Any]] = await safe_gather(*position_tasks, return_exceptions=True) + + # Initial parsing of responses. Joining all the responses + parsed_resps: List[Dict[str, Any]] = [] + for resp, trading_pair in zip(raw_responses, self._trading_pairs): + if not isinstance(resp, Exception): + if resp: + position_entries = resp if isinstance(resp, list) else [resp] + parsed_resps.extend(position_entries) + else: + self.logger().error(f"Error fetching positions for {trading_pair}. Response: {resp}") + + for position in parsed_resps: + ex_trading_pair = position["symbol"] + hb_trading_pair = await self.trading_pair_associated_to_exchange_symbol(ex_trading_pair) + position_side = PositionSide(position["side"]) + unrealized_pnl = Decimal(str(position["unrealizedPnL"])) + entry_price = Decimal(str(position["avgPrice"])) + amount = Decimal(self.get_amount_of_contracts(hb_trading_pair, int(position["position"]))) + leverage = Decimal(position["leverage"]) + pos_key = self._perpetual_trading.position_key(hb_trading_pair, position_side) + if amount != s_decimal_0: + position = Position( + trading_pair=hb_trading_pair, + position_side=position_side, + unrealized_pnl=unrealized_pnl, + entry_price=entry_price, + amount=amount * (Decimal("-1.0") if position_side == PositionSide.SHORT else Decimal("1.0")), + leverage=leverage, + ) + self._perpetual_trading.set_position(pos_key, position) + else: + self._perpetual_trading.remove_position(pos_key) + + async def _update_order_fills_from_trades(self): + last_tick = int(self._last_poll_timestamp / self.UPDATE_ORDER_STATUS_MIN_INTERVAL) + current_tick = int(self.current_timestamp / self.UPDATE_ORDER_STATUS_MIN_INTERVAL) + if current_tick > last_tick and len(self._order_tracker.active_orders) > 0: + trading_pairs_to_order_map: Dict[str, Dict[str, Any]] = defaultdict(lambda: {}) + for order in self._order_tracker.active_orders.values(): + trading_pairs_to_order_map[order.trading_pair][order.exchange_order_id] = order + trading_pairs = list(trading_pairs_to_order_map.keys()) + tasks = [ + self._api_get( + path_url=CONSTANTS.ACCOUNT_TRADE_LIST_URL, + params={"symbol": await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair)}, + is_auth_required=True, + ) + for trading_pair in trading_pairs + ] + self.logger().debug(f"Polling for order fills of {len(tasks)} trading_pairs.") + results = await safe_gather(*tasks, return_exceptions=True) + for trades, trading_pair in zip(results, trading_pairs): + order_map = trading_pairs_to_order_map.get(trading_pair) + if isinstance(trades, Exception): + self.logger().network( + f"Error fetching trades update for the order {trading_pair}: {trades}.", + app_warning_msg=f"Failed to fetch trade update for {trading_pair}." + ) + continue + for trade in trades: + order_id = str(trade.get("orderId")) + if order_id in order_map: + tracked_order: InFlightOrder = order_map.get(order_id) + position_side = trade["side"] + position_action = (PositionAction.OPEN + if (tracked_order.trade_type is TradeType.BUY and position_side == "LONG" + or tracked_order.trade_type is TradeType.SELL and position_side == "SHORT") + else PositionAction.CLOSE) + fee = TradeFeeBase.new_perpetual_fee( + fee_schema=self.trade_fee_schema(), + position_action=position_action, + percent_token=trade["commissionAsset"], + flat_fees=[TokenAmount(amount=Decimal(trade["commission"]), token=trade["commissionAsset"])] + ) + amount = self.get_amount_of_contracts(trading_pair, int(trade["quantity"])) + trade_update: TradeUpdate = TradeUpdate( + trade_id=str(trade["tradeId"]), + client_order_id=tracked_order.client_order_id, + exchange_order_id=trade["orderId"], + trading_pair=tracked_order.trading_pair, + fill_timestamp=int(trade["time"]) * 1e-3, + fill_price=Decimal(trade["price"]), + fill_base_amount=Decimal(amount), + fill_quote_amount=Decimal(trade["price"]) * amount, + fee=fee, + ) + self._order_tracker.process_trade_update(trade_update) + + async def _update_order_status(self): + """ + Calls the REST API to get order/trade updates for each in-flight order. + """ + last_tick = int(self._last_poll_timestamp / self.UPDATE_ORDER_STATUS_MIN_INTERVAL) + current_tick = int(self.current_timestamp / self.UPDATE_ORDER_STATUS_MIN_INTERVAL) + if current_tick > last_tick and len(self._order_tracker.active_orders) > 0: + tracked_orders = list(self._order_tracker.active_orders.values()) + tasks = [ + self._api_get( + path_url=CONSTANTS.ORDER_URL, + params={ + "symbol": await self.exchange_symbol_associated_to_pair(trading_pair=order.trading_pair), + "clientOrderId": order.client_order_id + }, + is_auth_required=True, + return_err=True, + ) + for order in tracked_orders + ] + self.logger().debug(f"Polling for order status updates of {len(tasks)} orders.") + results = await safe_gather(*tasks, return_exceptions=True) + + for order_update, tracked_order in zip(results, tracked_orders): + client_order_id = tracked_order.client_order_id + if client_order_id not in self._order_tracker.all_orders: + continue + if isinstance(order_update, Exception) or order_update is None or "code" in order_update: + if not isinstance(order_update, Exception) and \ + (not order_update or (order_update["code"] == -2013 or order_update["msg"] == "Order does not exist.")): + await self._order_tracker.process_order_not_found(client_order_id) + else: + self.logger().network( + f"Error fetching status update for the order {client_order_id}: " f"{order_update}." + ) + continue + + new_order_update: OrderUpdate = OrderUpdate( + trading_pair=await self.trading_pair_associated_to_exchange_symbol(order_update['symbol']), + update_timestamp=int(order_update["updateTime"]) * 1e-3, + new_state=CONSTANTS.ORDER_STATE[order_update["status"]], + client_order_id=order_update["clientOrderId"], + exchange_order_id=order_update["orderId"], + ) + + self._order_tracker.process_order_update(new_order_update) + + async def _get_position_mode(self) -> Optional[PositionMode]: + return self._position_mode + + async def _trading_pair_position_mode_set(self, mode: PositionMode, trading_pair: str) -> Tuple[bool, str]: + return False, "Not support to set position mode" + + def get_quantity_of_contracts(self, trading_pair: str, amount: float) -> int: + trading_rule: TradingRule = self._trading_rules[trading_pair] + num_contracts = int(amount / trading_rule.min_base_amount_increment) + return num_contracts + + def get_amount_of_contracts(self, trading_pair: str, number: int) -> Decimal: + if len(self._trading_rules) > 0: + trading_rule: TradingRule = self._trading_rules[trading_pair] + contract_value = Decimal(number * trading_rule.min_base_amount_increment) + else: + contract_value = Decimal(number * 0.001) + return contract_value + + async def _set_trading_pair_leverage(self, trading_pair: str, leverage: int) -> Tuple[bool, str]: + symbol = await self.exchange_symbol_associated_to_pair(trading_pair) + params = {'symbol': symbol, 'leverage': leverage} + resp = await self._api_post( + path_url=CONSTANTS.SET_LEVERAGE_URL, + params=params, + is_auth_required=True, + ) + success = False + msg = "" + if "leverage" in resp and int(resp["leverage"]) == leverage: + success = True + elif "msg" in resp: + msg = resp["msg"] + else: + msg = 'Unable to set leverage' + return success, msg + + async def _fetch_last_fee_payment(self, trading_pair: str) -> Tuple[int, Decimal, Decimal]: + exchange_symbol = await self.exchange_symbol_associated_to_pair(trading_pair) + + params = { + "symbol": exchange_symbol, + "timestamp": int(self._time_synchronizer.time() * 1e3) + } + result = (await self._api_get( + path_url=CONSTANTS.FUNDING_INFO_URL, + params=params, + is_auth_required=True, + trading_pair=trading_pair, + ))[0] + + if not result: + # An empty funding fee/payment is retrieved. + timestamp, funding_rate, payment = 0, Decimal("-1"), Decimal("-1") + else: + funding_rate: Decimal = Decimal(str(result["rate"])) + position_size: Decimal = Decimal(0.0) + payment: Decimal = funding_rate * position_size + timestamp: int = int(pd.Timestamp(int(result["nextSettleTime"]), unit="ms", tz="UTC").timestamp()) + + return timestamp, funding_rate, payment + + async def _api_request(self, + path_url, + method: RESTMethod = RESTMethod.GET, + params: Optional[Dict[str, Any]] = None, + data: Optional[Dict[str, Any]] = None, + is_auth_required: bool = False, + return_err: bool = False, + limit_id: Optional[str] = None, + trading_pair: Optional[str] = None, + **kwargs) -> Dict[str, Any]: + last_exception = None + rest_assistant = await self._web_assistants_factory.get_rest_assistant() + url = web_utils.rest_url(path_url, domain=self.domain) + local_headers = { + "Content-Type": "application/x-www-form-urlencoded"} + for _ in range(2): + try: + request_result = await rest_assistant.execute_request( + url=url, + params=params, + data=data, + method=method, + is_auth_required=is_auth_required, + return_err=return_err, + headers=local_headers, + throttler_limit_id=limit_id if limit_id else path_url, + ) + return request_result + except IOError as request_exception: + last_exception = request_exception + if self._is_request_exception_related_to_time_synchronizer(request_exception=request_exception): + self._time_synchronizer.clear_time_offset_ms_samples() + await self._update_time_synchronizer() + else: + raise + + # Failed even after the last retry + raise last_exception diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_order_book.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_order_book.py new file mode 100644 index 0000000000..baba21d2a7 --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_order_book.py @@ -0,0 +1,71 @@ +from typing import Dict, Optional + +from hummingbot.core.data_type.common import TradeType +from hummingbot.core.data_type.order_book import OrderBook +from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType + + +class HashkeyPerpetualsOrderBook(OrderBook): + @classmethod + def snapshot_message_from_exchange_websocket(cls, + msg: Dict[str, any], + timestamp: float, + metadata: Optional[Dict] = None) -> OrderBookMessage: + """ + Creates a snapshot message with the order book snapshot message + :param msg: the response from the exchange when requesting the order book snapshot + :param timestamp: the snapshot timestamp + :param metadata: a dictionary with extra information to add to the snapshot data + :return: a snapshot message with the snapshot information received from the exchange + """ + if metadata: + msg.update(metadata) + ts = msg["t"] + return OrderBookMessage(OrderBookMessageType.SNAPSHOT, { + "trading_pair": msg["trading_pair"], + "update_id": ts, + "bids": msg["b"], + "asks": msg["a"] + }, timestamp=timestamp) + + @classmethod + def snapshot_message_from_exchange_rest(cls, + msg: Dict[str, any], + timestamp: float, + metadata: Optional[Dict] = None) -> OrderBookMessage: + """ + Creates a snapshot message with the order book snapshot message + :param msg: the response from the exchange when requesting the order book snapshot + :param timestamp: the snapshot timestamp + :param metadata: a dictionary with extra information to add to the snapshot data + :return: a snapshot message with the snapshot information received from the exchange + """ + if metadata: + msg.update(metadata) + ts = msg["t"] + return OrderBookMessage(OrderBookMessageType.SNAPSHOT, { + "trading_pair": msg["trading_pair"], + "update_id": ts, + "bids": msg["b"], + "asks": msg["a"] + }, timestamp=timestamp) + + @classmethod + def trade_message_from_exchange(cls, msg: Dict[str, any], metadata: Optional[Dict] = None): + """ + Creates a trade message with the information from the trade event sent by the exchange + :param msg: the trade event details sent by the exchange + :param metadata: a dictionary with extra information to add to trade message + :return: a trade message with the details of the trade as provided by the exchange + """ + if metadata: + msg.update(metadata) + ts = msg["t"] + return OrderBookMessage(OrderBookMessageType.TRADE, { + "trading_pair": msg["trading_pair"], + "trade_type": float(TradeType.BUY.value) if msg["m"] else float(TradeType.SELL.value), + "trade_id": ts, + "update_id": ts, + "price": msg["p"], + "amount": msg["q"] + }, timestamp=ts * 1e-3) diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_user_stream_data_source.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_user_stream_data_source.py new file mode 100644 index 0000000000..1db3453261 --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_user_stream_data_source.py @@ -0,0 +1,144 @@ +import asyncio +import time +from typing import TYPE_CHECKING, Any, List, Optional + +import hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_constants as CONSTANTS +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_auth import HashkeyPerpetualAuth +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.logger import HummingbotLogger + +if TYPE_CHECKING: + from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_derivative import ( + HashkeyPerpetualDerivative, + ) + + +class HashkeyPerpetualUserStreamDataSource(UserStreamTrackerDataSource): + + LISTEN_KEY_KEEP_ALIVE_INTERVAL = 1800 # Recommended to Ping/Update listen key to keep connection alive + HEARTBEAT_TIME_INTERVAL = 30.0 + + _logger: Optional[HummingbotLogger] = None + + def __init__(self, + auth: HashkeyPerpetualAuth, + trading_pairs: List[str], + connector: "HashkeyPerpetualDerivative", + api_factory: WebAssistantsFactory, + domain: str = CONSTANTS.DEFAULT_DOMAIN): + super().__init__() + self._auth: HashkeyPerpetualAuth = auth + self._current_listen_key = None + self._domain = domain + self._api_factory = api_factory + self._connector = connector + + self._listen_key_initialized_event: asyncio.Event = asyncio.Event() + self._last_listen_key_ping_ts = 0 + + async def _connected_websocket_assistant(self) -> WSAssistant: + """ + Creates an instance of WSAssistant connected to the exchange + """ + self._manage_listen_key_task = safe_ensure_future(self._manage_listen_key_task_loop()) + await self._listen_key_initialized_event.wait() + + ws: WSAssistant = await self._get_ws_assistant() + url = CONSTANTS.WSS_PRIVATE_URL[self._domain].format(listenKey=self._current_listen_key) + await ws.connect(ws_url=url, ping_timeout=CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL) + return ws + + async def _subscribe_channels(self, websocket_assistant: WSAssistant): + """ + Subscribes to the trade events and diff orders events through the provided websocket connection. + + Hashkey does not require any channel subscription. + + :param websocket_assistant: the websocket assistant used to connect to the exchange + """ + pass + + async def _get_listen_key(self): + try: + data = await self._connector._api_request( + method=RESTMethod.POST, + path_url=CONSTANTS.USER_STREAM_PATH_URL, + is_auth_required=True, + ) + except asyncio.CancelledError: + raise + except Exception as exception: + raise IOError(f"Error fetching user stream listen key. Error: {exception}") + + return data["listenKey"] + + async def _ping_listen_key(self) -> bool: + try: + data = await self._connector._api_request( + method=RESTMethod.PUT, + path_url=CONSTANTS.USER_STREAM_PATH_URL, + params={"listenKey": self._current_listen_key}, + return_err=True, + ) + if "code" in data: + self.logger().warning(f"Failed to refresh the listen key {self._current_listen_key}: {data}") + return False + + except asyncio.CancelledError: + raise + except Exception as exception: + self.logger().warning(f"Failed to refresh the listen key {self._current_listen_key}: {exception}") + return False + + return True + + async def _manage_listen_key_task_loop(self): + try: + while True: + now = int(time.time()) + if self._current_listen_key is None: + self._current_listen_key = await self._get_listen_key() + self.logger().info(f"Successfully obtained listen key {self._current_listen_key}") + self._listen_key_initialized_event.set() + self._last_listen_key_ping_ts = int(time.time()) + + if now - self._last_listen_key_ping_ts >= self.LISTEN_KEY_KEEP_ALIVE_INTERVAL: + success: bool = await self._ping_listen_key() + if not success: + self.logger().error("Error occurred renewing listen key ...") + break + else: + self.logger().info(f"Refreshed listen key {self._current_listen_key}.") + self._last_listen_key_ping_ts = int(time.time()) + else: + await self._sleep(self.LISTEN_KEY_KEEP_ALIVE_INTERVAL) + finally: + self._current_listen_key = None + self._listen_key_initialized_event.clear() + + async def _process_event_message(self, event_message: Any, queue: asyncio.Queue): + if event_message == "ping" and self._pong_response_event: + websocket_assistant = await self._get_ws_assistant() + pong_request = WSJSONRequest(payload={"pong": event_message["ping"]}) + await websocket_assistant.send(request=pong_request) + else: + await super()._process_event_message(event_message=event_message, queue=queue) + + async def _get_ws_assistant(self) -> WSAssistant: + if self._ws_assistant is None: + self._ws_assistant = await self._api_factory.get_ws_assistant() + return self._ws_assistant + + async def _on_user_stream_interruption(self, websocket_assistant: Optional[WSAssistant]): + await super()._on_user_stream_interruption(websocket_assistant=websocket_assistant) + self._manage_listen_key_task and self._manage_listen_key_task.cancel() + self._current_listen_key = None + self._listen_key_initialized_event.clear() + await self._sleep(5) + + def _time(self): + return time.time() diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_utils.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_utils.py new file mode 100644 index 0000000000..f0b60f286c --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_utils.py @@ -0,0 +1,108 @@ +from decimal import Decimal +from typing import Any, Dict + +from pydantic import Field, SecretStr + +from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData +from hummingbot.connector.utils import split_hb_trading_pair +from hummingbot.core.data_type.trade_fee import TradeFeeSchema + +DEFAULT_FEES = TradeFeeSchema( + maker_percent_fee_decimal=Decimal("0.0002"), + taker_percent_fee_decimal=Decimal("0.0004"), + buy_percent_fee_deducted_from_returns=True +) + +CENTRALIZED = True + +EXAMPLE_PAIR = "BTC-USDT" + + +def is_linear_perpetual(trading_pair: str) -> bool: + """ + Returns True if trading_pair is in USDT(Linear) Perpetual + """ + _, quote_asset = split_hb_trading_pair(trading_pair) + return quote_asset in ["USDT", "USDC"] + + +def get_next_funding_timestamp(current_timestamp: float) -> float: + # On Okx Perpetuals, funding occurs every 8 hours at 00:00UTC, 08:00UTC and 16:00UTC. + # Reference: https://help.okx.com/hc/en-us/articles/360039261134-Funding-fee-calculation + int_ts = int(current_timestamp) + eight_hours = 8 * 60 * 60 + mod = int_ts % eight_hours + return float(int_ts - mod + eight_hours) + + +def is_exchange_information_valid(rule: Dict[str, Any]) -> bool: + """ + Verifies if a trading pair is enabled to operate with based on its exchange information + + :param exchange_info: the exchange information for a trading pair + + :return: True if the trading pair is enabled, False otherwise + """ + if "status" in rule and rule["status"] == "TRADING": + valid = True + else: + valid = False + return valid + + +class HashkeyPerpetualConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="hashkey_perpetual", client_data=None) + hashkey_perpetual_api_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Perpetual API key", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ) + ) + hashkey_perpetual_secret_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Perpetual API secret", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ) + ) + + +KEYS = HashkeyPerpetualConfigMap.construct() + +OTHER_DOMAINS = ["hashkey_perpetual_testnet"] +OTHER_DOMAINS_PARAMETER = {"hashkey_perpetual_testnet": "hashkey_perpetual_testnet"} +OTHER_DOMAINS_EXAMPLE_PAIR = {"hashkey_perpetual_testnet": "BTC-USDT"} +OTHER_DOMAINS_DEFAULT_FEES = {"hashkey_perpetual_testnet": [0.02, 0.04]} + + +class HashkeyPerpetualTestnetConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="hashkey_perpetual_testnet", client_data=None) + hashkey_perpetual_testnet_api_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Perpetual testnet API key", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ) + ) + hashkey_perpetual_testnet_secret_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Perpetual testnet API secret", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ) + ) + + class Config: + title = "hashkey_perpetual" + + +OTHER_DOMAINS_KEYS = {"hashkey_perpetual_testnet": HashkeyPerpetualTestnetConfigMap.construct()} diff --git a/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_web_utils.py b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_web_utils.py new file mode 100644 index 0000000000..14d0311ecd --- /dev/null +++ b/hummingbot/connector/derivative/hashkey_perpetual/hashkey_perpetual_web_utils.py @@ -0,0 +1,80 @@ +from typing import Callable, Optional + +import hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_constants as CONSTANTS +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.connector.utils import TimeSynchronizerRESTPreProcessor +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest +from hummingbot.core.web_assistant.rest_pre_processors import RESTPreProcessorBase +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + + +class HashkeyPerpetualRESTPreProcessor(RESTPreProcessorBase): + + async def pre_process(self, request: RESTRequest) -> RESTRequest: + if request.headers is None: + request.headers = {} + request.headers["Content-Type"] = ( + "application/json" if request.method == RESTMethod.POST else "application/x-www-form-urlencoded" + ) + return request + + +def rest_url(path_url: str, domain: str = "hashkey_perpetual"): + base_url = CONSTANTS.PERPETUAL_BASE_URL if domain == "hashkey_perpetual" else CONSTANTS.TESTNET_BASE_URL + return base_url + path_url + + +def wss_url(endpoint: str, domain: str = "hashkey_perpetual"): + base_ws_url = CONSTANTS.PERPETUAL_WS_URL if domain == "hashkey_perpetual" else CONSTANTS.TESTNET_WS_URL + return base_ws_url + endpoint + + +def build_api_factory( + throttler: Optional[AsyncThrottler] = None, + time_synchronizer: Optional[TimeSynchronizer] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + time_provider: Optional[Callable] = None, + auth: Optional[AuthBase] = None) -> WebAssistantsFactory: + throttler = throttler or create_throttler() + time_synchronizer = time_synchronizer or TimeSynchronizer() + time_provider = time_provider or (lambda: get_current_server_time( + throttler=throttler, + domain=domain, + )) + api_factory = WebAssistantsFactory( + throttler=throttler, + auth=auth, + rest_pre_processors=[ + TimeSynchronizerRESTPreProcessor(synchronizer=time_synchronizer, time_provider=time_provider), + HashkeyPerpetualRESTPreProcessor(), + ]) + return api_factory + + +def build_api_factory_without_time_synchronizer_pre_processor(throttler: AsyncThrottler) -> WebAssistantsFactory: + api_factory = WebAssistantsFactory( + throttler=throttler, + rest_pre_processors=[HashkeyPerpetualRESTPreProcessor()]) + return api_factory + + +def create_throttler() -> AsyncThrottler: + return AsyncThrottler(CONSTANTS.RATE_LIMITS) + + +async def get_current_server_time( + throttler: Optional[AsyncThrottler] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, +) -> float: + throttler = throttler or create_throttler() + api_factory = build_api_factory_without_time_synchronizer_pre_processor(throttler=throttler) + rest_assistant = await api_factory.get_rest_assistant() + response = await rest_assistant.execute_request( + url=rest_url(path_url=CONSTANTS.SERVER_TIME_PATH_URL, domain=domain), + method=RESTMethod.GET, + throttler_limit_id=CONSTANTS.SERVER_TIME_PATH_URL, + ) + server_time = response["serverTime"] + return server_time diff --git a/hummingbot/connector/derivative/hyperliquid_perpetual/hyperliquid_perpetual_api_order_book_data_source.py b/hummingbot/connector/derivative/hyperliquid_perpetual/hyperliquid_perpetual_api_order_book_data_source.py index 09c34f422b..ac37ac0b87 100644 --- a/hummingbot/connector/derivative/hyperliquid_perpetual/hyperliquid_perpetual_api_order_book_data_source.py +++ b/hummingbot/connector/derivative/hyperliquid_perpetual/hyperliquid_perpetual_api_order_book_data_source.py @@ -208,7 +208,6 @@ async def _parse_funding_info_message(self, raw_message: Dict[str, Any], message pass async def _request_complete_funding_info(self, trading_pair: str): - data = await self._connector._api_post(path_url=CONSTANTS.EXCHANGE_INFO_URL, data={"type": CONSTANTS.ASSET_CONTEXT_TYPE}) return data diff --git a/hummingbot/connector/derivative/kucoin_perpetual/kucoin_perpetual_derivative.py b/hummingbot/connector/derivative/kucoin_perpetual/kucoin_perpetual_derivative.py index 5cf27e9178..71121bf4c4 100644 --- a/hummingbot/connector/derivative/kucoin_perpetual/kucoin_perpetual_derivative.py +++ b/hummingbot/connector/derivative/kucoin_perpetual/kucoin_perpetual_derivative.py @@ -154,14 +154,7 @@ def start(self, clock: Clock, timestamp: float): def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception): error_description = str(request_exception) - ts_error_target_str = self._format_ret_code_for_print(ret_code=CONSTANTS.RET_CODE_AUTH_TIMESTAMP_ERROR) - param_error_target_str = ( - "KC-API-TIMESTAMP Invalid -- Time differs from server time by more than 5 seconds" - ) - is_time_synchronizer_related = ( - ts_error_target_str in error_description or param_error_target_str in error_description - ) - return is_time_synchronizer_related + return CONSTANTS.RET_CODE_AUTH_TIMESTAMP_ERROR in error_description and "KC-API-TIMESTAMP" in error_description async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder): cancel_result = await self._api_delete( diff --git a/hummingbot/connector/exchange/binance/binance_constants.py b/hummingbot/connector/exchange/binance/binance_constants.py index 786d18b86f..83228ead3a 100644 --- a/hummingbot/connector/exchange/binance/binance_constants.py +++ b/hummingbot/connector/exchange/binance/binance_constants.py @@ -61,6 +61,7 @@ "CANCELED": OrderState.CANCELED, "REJECTED": OrderState.FAILED, "EXPIRED": OrderState.FAILED, + "EXPIRED_IN_MATCH": OrderState.FAILED, } # Websocket event types @@ -70,9 +71,9 @@ RATE_LIMITS = [ # Pools RateLimit(limit_id=REQUEST_WEIGHT, limit=6000, time_interval=ONE_MINUTE), - RateLimit(limit_id=ORDERS, limit=50, time_interval=10 * ONE_SECOND), - RateLimit(limit_id=ORDERS_24HR, limit=160000, time_interval=ONE_DAY), - RateLimit(limit_id=RAW_REQUESTS, limit=61000, time_interval= 5 * ONE_MINUTE), + RateLimit(limit_id=ORDERS, limit=100, time_interval=10 * ONE_SECOND), + RateLimit(limit_id=ORDERS_24HR, limit=200000, time_interval=ONE_DAY), + RateLimit(limit_id=RAW_REQUESTS, limit=61000, time_interval=5 * ONE_MINUTE), # Weighted Limits RateLimit(limit_id=TICKER_PRICE_CHANGE_PATH_URL, limit=MAX_REQUEST, time_interval=ONE_MINUTE, linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, 2), diff --git a/hummingbot/connector/exchange/bitfinex/bitfinex_exchange.pyx b/hummingbot/connector/exchange/bitfinex/bitfinex_exchange.pyx index 1c945c296e..dcb26a3349 100644 --- a/hummingbot/connector/exchange/bitfinex/bitfinex_exchange.pyx +++ b/hummingbot/connector/exchange/bitfinex/bitfinex_exchange.pyx @@ -1127,7 +1127,7 @@ cdef class BitfinexExchange(ExchangeBase): cancellation_results = [] async for _response in ws.messages(waitFor=waitFor): cancelled_client_oids = [o[-1]['order_id'] for o in _response[2][4]] - self.logger().info(f"Succesfully canceled orders: {cancelled_client_oids}") + self.logger().info(f"Successfully canceled orders: {cancelled_client_oids}") for c_oid in cancelled_client_oids: cancellation_results.append(CancellationResult(c_oid, True)) break diff --git a/test/connector/derivative/__init__.py b/hummingbot/connector/exchange/bitstamp/__init__.py similarity index 100% rename from test/connector/derivative/__init__.py rename to hummingbot/connector/exchange/bitstamp/__init__.py diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_api_order_book_data_source.py b/hummingbot/connector/exchange/bitstamp/bitstamp_api_order_book_data_source.py new file mode 100644 index 0000000000..d3fcd9d8b4 --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_api_order_book_data_source.py @@ -0,0 +1,146 @@ +import asyncio +import time +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from hummingbot.connector.exchange.bitstamp import bitstamp_constants as CONSTANTS, bitstamp_web_utils as web_utils +from hummingbot.connector.exchange.bitstamp.bitstamp_order_book import BitstampOrderBook +from hummingbot.core.data_type.order_book_message import OrderBookMessage +from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.logger import HummingbotLogger + +if TYPE_CHECKING: + from hummingbot.connector.exchange.bitstamp.bitstamp_exchange import BitstampExchange + + +class BitstampAPIOrderBookDataSource(OrderBookTrackerDataSource): + _logger: Optional[HummingbotLogger] = None + + def __init__(self, + trading_pairs: List[str], + connector: 'BitstampExchange', + api_factory: WebAssistantsFactory, + domain: str = CONSTANTS.DEFAULT_DOMAIN): + super().__init__(trading_pairs) + self._connector = connector + self._trade_messages_queue_key = CONSTANTS.TRADE_EVENT_TYPE + self._diff_messages_queue_key = CONSTANTS.DIFF_EVENT_TYPE + self._domain = domain + self._api_factory = api_factory + self._channel_associated_to_tradingpair = {} + + async def get_last_traded_prices(self, + trading_pairs: List[str], + domain: Optional[str] = None) -> Dict[str, float]: + return await self._connector.get_last_traded_prices(trading_pairs=trading_pairs) + + async def _request_order_book_snapshot(self, trading_pair: str) -> Dict[str, Any]: + """ + Retrieves a copy of the full order book from the exchange, for a particular trading pair. + + :param trading_pair: the trading pair for which the order book will be retrieved + + :return: the response from the exchange (JSON dictionary) + """ + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + + rest_assistant = await self._api_factory.get_rest_assistant() + order_book_data = await rest_assistant.execute_request( + url=web_utils.public_rest_url(path_url=CONSTANTS.ORDER_BOOK_URL.format(symbol), domain=self._domain), + method=RESTMethod.GET, + throttler_limit_id=CONSTANTS.ORDER_BOOK_URL_LIMIT_ID, + ) + + return order_book_data + + async def _subscribe_channels(self, ws: WSAssistant): + """ + Subscribes to the trade events and diff orders events through the provided websocket connection. + :param ws: the websocket assistant used to connect to the exchange + """ + try: + self._channel_associated_to_tradingpair.clear() + for trading_pair in self._trading_pairs: + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + + channel = CONSTANTS.WS_PUBLIC_LIVE_TRADES.format(symbol) + payload = { + "event": "bts:subscribe", + "data": { + "channel": channel + } + } + subscribe_trade_request: WSJSONRequest = WSJSONRequest(payload=payload) + self._channel_associated_to_tradingpair[channel] = trading_pair + + channel = CONSTANTS.WS_PUBLIC_DIFF_ORDER_BOOK.format(symbol) + payload = { + "event": "bts:subscribe", + "data": { + "channel": channel + } + } + subscribe_orderbook_request: WSJSONRequest = WSJSONRequest(payload=payload) + self._channel_associated_to_tradingpair[channel] = trading_pair + + await ws.send(subscribe_trade_request) + await ws.send(subscribe_orderbook_request) + + self.logger().info("Subscribed to public order book and trade channels...") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred subscribing to order book trading and delta streams...", + exc_info=True + ) + raise + + async def _connected_websocket_assistant(self) -> WSAssistant: + """ + Creates an instance of WSAssistant connected to the exchange + """ + ws: WSAssistant = await self._api_factory.get_ws_assistant() + await ws.connect(ws_url=CONSTANTS.WSS_URL.format(self._domain), + ping_timeout=CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL) + return ws + + async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: + snapshot: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair) + snapshot_msg: OrderBookMessage = BitstampOrderBook.snapshot_message_from_exchange( + snapshot, + time.time(), + metadata={"trading_pair": trading_pair} + ) + return snapshot_msg + + async def _parse_trade_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + trading_pair = self._channel_associated_to_tradingpair.get(raw_message["channel"]) + + trade_message = BitstampOrderBook.trade_message_from_exchange( + raw_message, {"trading_pair": trading_pair}) + + message_queue.put_nowait(trade_message) + + async def _parse_order_book_diff_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + trading_pair = self._channel_associated_to_tradingpair.get(raw_message["channel"]) + + order_book_message: OrderBookMessage = BitstampOrderBook.diff_message_from_exchange( + raw_message, time.time(), {"trading_pair": trading_pair}) + + message_queue.put_nowait(order_book_message) + + def _channel_originating_message(self, event_message: Dict[str, Any]) -> str: + return event_message.get("event", "") + + async def _process_message_for_unknown_channel(self, event_message: Dict[str, Any], websocket_assistant: WSAssistant): + event = event_message.get("event", "") + channel = event_message.get("channel") + if event == "bts:subscription_succeeded": + self.logger().info(f"Subscription succeeded for channel '{channel}'") + elif event == "bts:request_reconnect": + raise ConnectionError("Received request to reconnect. Reconnecting...") + else: + self.logger().debug(f"Received message from unknown channel: {event_message}") diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_api_user_stream_data_source.py b/hummingbot/connector/exchange/bitstamp/bitstamp_api_user_stream_data_source.py new file mode 100644 index 0000000000..db29e0726e --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_api_user_stream_data_source.py @@ -0,0 +1,124 @@ +import asyncio +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from hummingbot.connector.exchange.bitstamp import bitstamp_constants as CONSTANTS, bitstamp_web_utils as web_utils +from hummingbot.connector.exchange.bitstamp.bitstamp_auth import BitstampAuth +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.logger import HummingbotLogger + +if TYPE_CHECKING: + from hummingbot.connector.exchange.bitstamp.bitstamp_exchange import BitstampExchange + + +class BitstampAPIUserStreamDataSource(UserStreamTrackerDataSource): + USER_STREAM_EVENTS = { + CONSTANTS.USER_ORDER_CREATED, + CONSTANTS.USER_ORDER_CHANGED, + CONSTANTS.USER_ORDER_DELETED, + CONSTANTS.USER_TRADE, + CONSTANTS.USER_SELF_TRADE, + } + + _logger: Optional[HummingbotLogger] = None + + def __init__(self, + auth: BitstampAuth, + trading_pairs: List[str], + connector: 'BitstampExchange', + api_factory: WebAssistantsFactory, + domain: str = CONSTANTS.DEFAULT_DOMAIN): + super().__init__() + self._auth: BitstampAuth = auth + self._trading_pairs = trading_pairs + self._connector = connector + self._current_listen_key = None + self._domain = domain + self._api_factory = api_factory + + async def _connected_websocket_assistant(self) -> WSAssistant: + """ + Creates an instance of WSAssistant connected to the exchange + """ + ws: WSAssistant = await self._api_factory.get_ws_assistant() + await ws.connect(ws_url=CONSTANTS.WSS_URL.format(self._domain), + ping_timeout=CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL) + return ws + + async def _subscribe_channels(self, websocket_assistant: WSAssistant): + """ + Subscribes to the trade events and diff orders events through the provided websocket connection. + + Bitstamp does not require any channel subscription. + + :param websocket_assistant: the websocket assistant used to connect to the exchange + """ + try: + + rest_assistant = await self._api_factory.get_rest_assistant() + for trading_pair in self._trading_pairs: + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + + resp = await rest_assistant.execute_request( + url=web_utils.private_rest_url(path_url=CONSTANTS.WEBSOCKET_TOKEN_URL, domain=self._domain), + method=RESTMethod.POST, + is_auth_required=True, + throttler_limit_id=CONSTANTS.WEBSOCKET_TOKEN_URL + ) + user_id = resp.get("user_id") + token = resp.get("token") + + payload = { + "event": "bts:subscribe", + "data": { + "channel": CONSTANTS.WS_PRIVATE_MY_TRADES.format(symbol, user_id), + "auth": token + } + } + my_trades_subscribe_request: WSJSONRequest = WSJSONRequest(payload=payload) + + payload = { + "event": "bts:subscribe", + "data": { + "channel": CONSTANTS.WS_PRIVATE_MY_SELF_TRADES.format(symbol, user_id), + "auth": token + } + } + my_self_trades_subscribe_request: WSJSONRequest = WSJSONRequest(payload=payload) + + payload = { + "event": "bts:subscribe", + "data": { + "channel": CONSTANTS.WS_PRIVATE_MY_ORDERS.format(symbol, user_id), + "auth": token + } + } + my_orders_subscribe_request: WSJSONRequest = WSJSONRequest(payload=payload) + + await websocket_assistant.send(my_trades_subscribe_request) + await websocket_assistant.send(my_self_trades_subscribe_request) + await websocket_assistant.send(my_orders_subscribe_request) + + self.logger().info("Subscribed to private account and orders channels...") + except asyncio.CancelledError: + raise + except Exception: + self.logger().exception("Unexpected error occurred subscribing to order book trading...") + raise + + async def _process_event_message(self, event_message: Dict[str, Any], queue: asyncio.Queue): + if len(event_message) > 0: + event = event_message.get("event", "") + channel = event_message.get("channel", "") + + if event in self.USER_STREAM_EVENTS: + queue.put_nowait(event_message) + else: + if event == "bts:subscription_succeeded": + self.logger().info(f"Successfully subscribed to '{channel}'...") + elif event == "bts:request_reconnect": + raise ConnectionError("Received request to reconnect. Reconnecting...") + else: + self.logger().debug(f"Received unknown event message: {event_message}") diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_auth.py b/hummingbot/connector/exchange/bitstamp/bitstamp_auth.py new file mode 100644 index 0000000000..e9cf5e1767 --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_auth.py @@ -0,0 +1,73 @@ +import hashlib +import hmac +import uuid +from typing import Dict +from urllib.parse import urlencode, urlparse + +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest, WSRequest + + +class BitstampAuth(AuthBase): + AUTH_VERSION = "v2" + + def __init__(self, api_key: str, secret_key: str, time_provider: TimeSynchronizer): + self.api_key = api_key + self.secret_key = secret_key + self.time_provider = time_provider + + async def rest_authenticate(self, request: RESTRequest) -> RESTRequest: + """ + Adds the server time and the signature to the request, required for authenticated interactions. It also adds + the required parameter in the request header. + :param request: the request to be configured for authenticated interaction + """ + local_headers = {} + if request.headers is not None: + local_headers.update(request.headers) + + auth_headers = self._generate_headers_for_authentication( + request_url=request.url, + content_type=local_headers.get("Content-Type"), + payload=request.data, + method=request.method, + ) + + local_headers.update(auth_headers) + request.headers = local_headers + + return request + + async def ws_authenticate(self, request: WSRequest) -> WSRequest: + """ + This method is intended to configure a websocket request to be authenticated. Bitstamp does not use this + functionality + """ + return request # pass-through + + def _generate_headers_for_authentication(self, method: RESTMethod, request_url: str, content_type: str, payload) -> Dict[str, str]: + nonce = str(uuid.uuid4()) + timestamp_str = str(int(self.time_provider.time() * 1e3)) + + headers = { + 'X-Auth': 'BITSTAMP ' + self.api_key, + 'X-Auth-Signature': self._generate_signature(self._generate_message(method, request_url, content_type, payload, nonce, timestamp_str)), + 'X-Auth-Nonce': nonce, + 'X-Auth-Timestamp': timestamp_str, + 'X-Auth-Version': self.AUTH_VERSION + } + + return headers + + def _generate_message(self, method: RESTMethod, request_url: str, content_type: str, payload, nonce: str, timestamp_str: str) -> str: + content_type = content_type or "" + payload_str = urlencode(payload) if payload else "" + url = urlparse(request_url) + message = f"BITSTAMP {self.api_key}{method}{url.hostname}{url.path}{content_type}{nonce}{timestamp_str}{self.AUTH_VERSION}{payload_str}" + + return message + + def _generate_signature(self, msg: str) -> str: + digest = hmac.new(self.secret_key.encode("utf8"), msg=msg.encode("utf8"), digestmod=hashlib.sha256).hexdigest() + return digest diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_constants.py b/hummingbot/connector/exchange/bitstamp/bitstamp_constants.py new file mode 100644 index 0000000000..e1f04bac33 --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_constants.py @@ -0,0 +1,113 @@ +from hummingbot.connector.constants import MINUTE, SECOND +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit +from hummingbot.core.data_type.in_flight_order import OrderState + +DEFAULT_DOMAIN = "" + +REST_URL = "https://www.bitstamp.net/api/" +WSS_URL = "wss://ws.bitstamp.net" + +API_VERSION = "v2" + +MAX_ORDER_ID_LEN = None +HBOT_ORDER_ID_PREFIX = "hbot" + +# Order States +ORDER_STATE = { + "Open": OrderState.OPEN, + "Finished": OrderState.FILLED, + "Expired": OrderState.CANCELED, + "Canceled": OrderState.CANCELED, +} + +# Error Codes +ORDER_NOT_EXIST_ERROR_CODE = "404.002" +ORDER_NOT_EXIST_MESSAGE = "Order not found" +TIMESTAMP_ERROR_CODE = "API0017" +TIMESTAMP_ERROR_MESSAGE = "X-Auth-Timestamp header" + +SIDE_BUY = "buy" +SIDE_SELL = "sell" + +# Public API endpoints +STATUS_URL = "/status/" +CURRENCIES_URL = "/currencies/" +EXCHANGE_INFO_PATH_URL = "/trading-pairs-info/" +ORDER_BOOK_URL = "/order_book/{}" +TICKER_URL = "/ticker/{}" + +# Private API endpoints +ACCOUNT_BALANCES_URL = "/account_balances/" +ORDER_CANCEL_URL = "/cancel_order/" +ORDER_STATUS_URL = "/order_status/" +TRADING_FEES_URL = "/fees/trading/" +WEBSOCKET_TOKEN_URL = "/websockets_token/" + +# WS Events +DIFF_EVENT_TYPE = "data" +TRADE_EVENT_TYPE = "trade" +USER_ORDER_CREATED = "order_created" +USER_ORDER_CHANGED = "order_changed" +USER_ORDER_DELETED = "order_deleted" +USER_TRADE = "trade" +USER_SELF_TRADE = "self_trade" + +# WS Public channels +WS_PUBLIC_DIFF_ORDER_BOOK = "diff_order_book_{}" +WS_PUBLIC_LIVE_TRADES = "live_trades_{}" + +# WS Private channels +WS_PRIVATE_MY_ORDERS = "private-my_orders_{}-{}" +WS_PRIVATE_MY_TRADES = "private-my_trades_{}-{}" +WS_PRIVATE_MY_SELF_TRADES = "private-live_trades_{}-{}" + +# WS Other +WS_HEARTBEAT_TIME_INTERVAL = 30.0 + +# Rate Limit +MAX_REQUEST = 10000 +MAX_REQUESTS_PER_SECOND = 400 + +RAW_REQUESTS_LIMIT_ID = "raw_requests" +REQUEST_WEIGHT_LIMIT_ID = "request_weight" +ORDER_BOOK_URL_LIMIT_ID = 'order_book' +ORDER_CREATE_URL_LIMIT_ID = 'order_create' +TICKER_URL_LIMIT_ID = 'ticker' + +RATE_LIMITS = [ + RateLimit(limit_id=RAW_REQUESTS_LIMIT_ID, limit=MAX_REQUEST, time_interval=10 * MINUTE), + RateLimit(limit_id=REQUEST_WEIGHT_LIMIT_ID, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, linked_limits=[LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=STATUS_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=CURRENCIES_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=EXCHANGE_INFO_PATH_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=ORDER_BOOK_URL_LIMIT_ID, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=TICKER_URL_LIMIT_ID, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=ACCOUNT_BALANCES_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=ORDER_CREATE_URL_LIMIT_ID, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=ORDER_CANCEL_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=ORDER_STATUS_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=TRADING_FEES_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), + RateLimit(limit_id=WEBSOCKET_TOKEN_URL, limit=MAX_REQUESTS_PER_SECOND, time_interval=SECOND, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT_LIMIT_ID), + LinkedLimitWeightPair(RAW_REQUESTS_LIMIT_ID)]), +] diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_exchange.py b/hummingbot/connector/exchange/bitstamp/bitstamp_exchange.py new file mode 100644 index 0000000000..bb1ee2e02b --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_exchange.py @@ -0,0 +1,530 @@ +import asyncio +from datetime import datetime +from decimal import Decimal +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple + +from bidict import bidict + +from hummingbot.connector.constants import s_decimal_NaN +from hummingbot.connector.exchange.bitstamp import ( + bitstamp_constants as CONSTANTS, + bitstamp_utils, + bitstamp_web_utils as web_utils, +) +from hummingbot.connector.exchange.bitstamp.bitstamp_api_order_book_data_source import BitstampAPIOrderBookDataSource +from hummingbot.connector.exchange.bitstamp.bitstamp_api_user_stream_data_source import BitstampAPIUserStreamDataSource +from hummingbot.connector.exchange.bitstamp.bitstamp_auth import BitstampAuth +from hummingbot.connector.exchange_py_base import ExchangePyBase +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair +from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState, OrderUpdate, TradeUpdate +from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource +from hummingbot.core.data_type.trade_fee import TokenAmount, TradeFeeBase, TradeFeeSchema +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.utils.estimate_fee import build_trade_fee +from hummingbot.core.web_assistant.connections.data_types import RESTMethod +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + +if TYPE_CHECKING: + from hummingbot.client.config.config_helpers import ClientConfigAdapter + + +class BitstampExchange(ExchangePyBase): + UPDATE_ORDER_STATUS_MIN_INTERVAL = 10.0 + + web_utils = web_utils + + def __init__(self, + client_config_map: "ClientConfigAdapter", + bitstamp_api_key: str, + bitstamp_api_secret: str, + trading_pairs: Optional[List[str]] = None, + trading_required: bool = True, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + time_provider: Optional[Callable] = None, + ): + self.api_key = bitstamp_api_key + self.secret_key = bitstamp_api_secret + self._trading_pairs = trading_pairs + self._trading_required = trading_required + self._domain = domain + self._time_provider = time_provider + self._last_trades_poll_bitstamp_timestamp = 1.0 + + super().__init__(client_config_map) + self._real_time_balance_update = False + self._trading_fees + + @staticmethod + def bitstamp_order_type(order_type: OrderType) -> str: + return order_type.name.upper() + + @staticmethod + def to_hb_order_type(bitstamp_type: str) -> OrderType: + return OrderType[bitstamp_type] + + @property + def authenticator(self): + return BitstampAuth( + api_key=self.api_key, + secret_key=self.secret_key, + time_provider=self._time_synchronizer) + + @property + def name(self) -> str: + return "bitstamp" + + @property + def rate_limits_rules(self): + return CONSTANTS.RATE_LIMITS + + @property + def domain(self): + return self._domain + + @property + def client_order_id_max_length(self): + return CONSTANTS.MAX_ORDER_ID_LEN + + @property + def client_order_id_prefix(self): + return CONSTANTS.HBOT_ORDER_ID_PREFIX + + @property + def trading_rules_request_path(self): + return CONSTANTS.EXCHANGE_INFO_PATH_URL + + @property + def trading_pairs_request_path(self): + return CONSTANTS.EXCHANGE_INFO_PATH_URL + + @property + def check_network_request_path(self): + return CONSTANTS.STATUS_URL + + @property + def trading_pairs(self): + return self._trading_pairs + + @property + def is_cancel_request_in_exchange_synchronous(self) -> bool: + return True + + @property + def is_trading_required(self) -> bool: + return self._trading_required + + def supported_order_types(self): + return [OrderType.LIMIT, OrderType.LIMIT_MAKER, OrderType.MARKET] + + async def get_all_pairs_prices(self) -> List[Dict[str, str]]: + pairs_prices = await self._api_get(path_url=CONSTANTS.CURRENCIES_URL) + return pairs_prices + + def convert_from_exchange_trading_pair(self, exchange_trading_pair: str) -> Optional[str]: + try: + base_asset, quote_asset = exchange_trading_pair.split("/") + except Exception as e: + raise ValueError(f"Error parsing the trading pair {exchange_trading_pair}: {e}") + + return f"{base_asset}-{quote_asset}" + + def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception): + return CONSTANTS.TIMESTAMP_ERROR_CODE in str( + request_exception + ) and CONSTANTS.TIMESTAMP_ERROR_MESSAGE in str(request_exception) + + def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool: + return CONSTANTS.ORDER_NOT_EXIST_ERROR_CODE in str( + status_update_exception + ) and CONSTANTS.ORDER_NOT_EXIST_MESSAGE in str(status_update_exception) + + def _is_order_not_found_during_cancelation_error(self, cancelation_exception: Exception) -> bool: + return CONSTANTS.ORDER_NOT_EXIST_ERROR_CODE in str( + cancelation_exception + ) and CONSTANTS.ORDER_NOT_EXIST_MESSAGE in str(cancelation_exception) + + def _create_web_assistants_factory(self) -> WebAssistantsFactory: + return web_utils.build_api_factory( + throttler=self._throttler, + time_synchronizer=self._time_synchronizer, + time_provider=self._time_provider, + domain=self._domain, + auth=self._auth) + + def _create_order_book_data_source(self) -> OrderBookTrackerDataSource: + return BitstampAPIOrderBookDataSource( + trading_pairs=self._trading_pairs, + connector=self, + domain=self.domain, + api_factory=self._web_assistants_factory) + + def _create_user_stream_data_source(self) -> UserStreamTrackerDataSource: + return BitstampAPIUserStreamDataSource( + auth=self._auth, + trading_pairs=self._trading_pairs, + connector=self, + api_factory=self._web_assistants_factory, + domain=self.domain, + ) + + def _get_fee(self, + base_currency: str, + quote_currency: str, + order_type: OrderType, + order_side: TradeType, + amount: Decimal, + price: Decimal = s_decimal_NaN, + is_maker: Optional[bool] = None) -> TradeFeeBase: + + is_maker = is_maker or (order_type is OrderType.LIMIT_MAKER) + trading_pair = combine_to_hb_trading_pair(base=base_currency, quote=quote_currency) + + trade_fee_schema = self._trading_fees.get(trading_pair) + if trade_fee_schema: + fee_percent: Decimal = ( + trade_fee_schema.maker_percent_fee_decimal if is_maker else trade_fee_schema.taker_percent_fee_decimal + ) + fee = TradeFeeBase.new_spot_fee( + fee_schema=trade_fee_schema, + trade_type=order_side, + percent=fee_percent + ) + else: + fee = build_trade_fee( + self.name, + is_maker, + base_currency=base_currency, + quote_currency=quote_currency, + order_type=order_type, + order_side=order_side, + amount=amount, + price=price, + ) + return fee + + async def _place_order(self, + order_id: str, + trading_pair: str, + amount: Decimal, + trade_type: TradeType, + order_type: OrderType, + price: Decimal, + **kwargs) -> Tuple[str, float]: + api_params = { + "amount": f"{amount:f}", + "client_order_id": order_id + } + + side_str = CONSTANTS.SIDE_BUY if trade_type is TradeType.BUY else CONSTANTS.SIDE_SELL + symbol = await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + if order_type.is_limit_type(): + order_url = f"/{side_str}/{symbol}/" + api_params["price"] = f"{price:f}" + if order_type == OrderType.LIMIT_MAKER: + # Set Maker-Or-Cancel, this ensures that the order is not fully or partially filled when placed. + # In case it would be, the order is cancelled. + api_params["moc_order"] = True + else: + order_url = f"/{side_str}/market/{symbol}/" + + order_result = await self._api_post( + path_url=order_url, + data=api_params, + is_auth_required=True, + limit_id=CONSTANTS.ORDER_CREATE_URL_LIMIT_ID + ) + + if order_result.get("status", "") == "error": + raise IOError(f"Error placing order. Error: {order_result}") + + o_id = str(order_result["id"]) + transact_time = datetime.fromisoformat(order_result["datetime"]).timestamp() + + return o_id, transact_time + + async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder): + exchange_order_id = await tracked_order.get_exchange_order_id() + + cancel_response = await self._api_post( + path_url=f"{CONSTANTS.ORDER_CANCEL_URL}", + data={"id": exchange_order_id}, + is_auth_required=True + ) + if cancel_response.get("status", "") == "error": + raise IOError(f"Error canceling order. Error: {cancel_response}") + + return str(cancel_response.get("id", "")) == exchange_order_id + + async def _format_trading_rules(self, exchange_info: List[Dict[str, Any]]) -> List[TradingRule]: + retval = [] + for info in filter(bitstamp_utils.is_exchange_information_valid, exchange_info): + try: + retval.append( + TradingRule( + trading_pair=self.convert_from_exchange_trading_pair(info["name"]), + min_price_increment=Decimal(f"1e-{info['counter_decimals']}"), + min_base_amount_increment=Decimal(f"1e-{info['base_decimals']}"), + min_quote_amount_increment=Decimal(f"1e-{info['counter_decimals']}"), + min_notional_size=Decimal(info["minimum_order"].split(" ")[0])) + ) + except Exception: + self.logger().exception(f"Error parsing the trading pair rule {info}. Skipping.") + return retval + + async def _update_trading_fees(self): + """ + Update fees information from the exchange + """ + trading_fees: List[Dict[str, Any]] = await self._api_post( + path_url=CONSTANTS.TRADING_FEES_URL, + is_auth_required=True + ) + + for fee_info in trading_fees: + try: + trading_pair = await self.trading_pair_associated_to_exchange_symbol(symbol=fee_info.get("market")) + except KeyError: + continue + + if trading_pair: + fees = fee_info["fees"] + self._trading_fees[trading_pair] = TradeFeeSchema( + maker_percent_fee_decimal=Decimal(fees["maker"]), + taker_percent_fee_decimal=Decimal(fees["taker"]) + ) + + async def _user_stream_event_listener(self): + """ + This functions runs in background continuously processing the events received from the exchange by the user + stream data source. It keeps reading events from the queue until the task is interrupted. + The events received are balance updates, order updates and trade events. + """ + trade_events = { + CONSTANTS.USER_TRADE, + CONSTANTS.USER_SELF_TRADE, + } + async for event_message in self._iter_user_event_queue(): + try: + event = event_message.get("event") + + if event in trade_events: + self._process_user_stream_trade_event(event, event_message) + else: + self._process_user_stream_order_event(event, event_message) + + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error in user stream listener loop.", exc_info=True) + await self._sleep(5.0) + + def _process_user_stream_trade_event(self, event: str, event_message: Dict[str, Any]): + try: + event_data = event_message.get("data", {}) + + if event == CONSTANTS.USER_TRADE: + client_order_id = str(event_data.get("client_order_id")) + order: InFlightOrder = self._order_tracker.all_fillable_orders.get(client_order_id) + if order is None: + self.logger().debug(f"Received event for unknown order ID: {event_message}") + return + + fee = TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=order.trade_type, + flat_fees=[TokenAmount(amount=Decimal(event_data["fee"]), token=order.quote_asset)] + ) + + amount = Decimal(event_data["amount"]) + price = Decimal(event_data["price"]) + trade_update = TradeUpdate( + trade_id=str(event_data["id"]), + client_order_id=order.client_order_id, + exchange_order_id=event_data["order_id"], + trading_pair=order.trading_pair, + fee=fee, + fill_base_amount=amount, + fill_quote_amount=price * amount, + fill_price=price, + fill_timestamp=float(event_data["microtimestamp"]) * 1e-3, + ) + self._order_tracker.process_trade_update(trade_update) + else: + # These trades don't incur any fees, but they do offset each other. + # We register them as regular fills so that the executed amount is updated correctly. + + buy_order_id = str(event_data.get("buy_order_id")) + sell_order_id = str(event_data.get("sell_order_id")) + + amount = Decimal(event_data["amount"]) + price = Decimal(event_data["price"]) + + buy_order: InFlightOrder = self._order_tracker.all_fillable_orders_by_exchange_order_id.get(buy_order_id) + if buy_order: + buy_trade_update = TradeUpdate( + trade_id=f"{buy_order_id}-{sell_order_id}", + client_order_id=buy_order.client_order_id, + exchange_order_id=buy_order_id, + trading_pair=buy_order.trading_pair, + fee=TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=buy_order.trade_type, + flat_fees=TokenAmount(amount=Decimal(0), token=buy_order.quote_asset)), + fill_base_amount=amount, + fill_quote_amount=price * amount, + fill_price=price, + fill_timestamp=float(event_data["timestamp"]) + ) + self._order_tracker.process_trade_update(buy_trade_update) + + sell_order: InFlightOrder = self._order_tracker.all_fillable_orders_by_exchange_order_id.get(sell_order_id) + if sell_order: + sell_trade_update = TradeUpdate( + trade_id=f"{buy_order_id}-{sell_order_id}", + client_order_id=sell_order.client_order_id, + exchange_order_id=sell_order_id, + trading_pair=sell_order.trading_pair, + fee=TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=sell_order.trade_type, + flat_fees=TokenAmount(amount=Decimal(0), token=sell_order.quote_asset)), + fill_base_amount=amount, + fill_quote_amount=price * amount, + fill_price=price, + fill_timestamp=float(event_data["timestamp"]), + ) + self._order_tracker.process_trade_update(sell_trade_update) + + except Exception as e: + raise ValueError(f"Error parsing the user stream trade event {event_message}: {e}") + + def _process_user_stream_order_event(self, event: str, event_message: Dict[str, Any]): + try: + event_data = event_message.get("data", {}) + client_order_id = str(event_data.get("client_order_id")) + order: InFlightOrder = self._order_tracker.all_fillable_orders.get(client_order_id) + if order is None: + self.logger().debug(f"Received event for unknown order ID: {event_message}") + return + + # Determine the new state of the order + new_state = OrderState.OPEN + if event == CONSTANTS.USER_ORDER_CHANGED and Decimal(event_data["amount_traded"]) > 0: + new_state = OrderState.PARTIALLY_FILLED + elif event == CONSTANTS.USER_ORDER_DELETED: + new_state = OrderState.FILLED if event_data["amount"] == 0 else OrderState.CANCELED + + order_update = OrderUpdate( + client_order_id=order.client_order_id, + exchange_order_id=order.exchange_order_id, + trading_pair=order.trading_pair, + update_timestamp=float(event_data["datetime"]), + new_state=new_state, + ) + self._order_tracker.process_order_update(order_update) + except Exception as e: + raise ValueError(f"Error parsing the user stream order event {event_message}: {e}") + + async def _all_trade_updates_for_order(self, order: InFlightOrder) -> List[TradeUpdate]: + all_fills_response = await self._api_post( + path_url=CONSTANTS.ORDER_STATUS_URL, + data={ + "client_order_id": order.client_order_id, + "omit_transactions": "false", + }, + is_auth_required=True + ) + + exchange_order_id = await order.get_exchange_order_id() + trade_updates = [] + for trade in all_fills_response.get("transactions", []): + fee = TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=order.trade_type, + flat_fees=[TokenAmount(amount=Decimal(trade["fee"]), token=order.quote_asset)] + ) + trade_update = TradeUpdate( + trade_id=str(trade["tid"]), + client_order_id=order.client_order_id, + exchange_order_id=exchange_order_id, + trading_pair=order.trading_pair, + fee=fee, + fill_base_amount=Decimal(trade[order.base_asset.lower()]), + fill_quote_amount=Decimal(trade[order.quote_asset.lower()]), + fill_price=Decimal(trade["price"]), + fill_timestamp=datetime.fromisoformat(trade["datetime"]).timestamp(), + ) + trade_updates.append(trade_update) + + return trade_updates + + async def _request_order_status(self, tracked_order: InFlightOrder) -> OrderUpdate: + updated_order_data = await self._api_post( + path_url=CONSTANTS.ORDER_STATUS_URL, + data={ + "client_order_id": tracked_order.client_order_id, + "omit_transactions": "true" + }, + is_auth_required = True + ) + + if updated_order_data.get("status", "") == "error": + raise IOError(f"Error requesting order status. Error: {updated_order_data}") + + new_state = CONSTANTS.ORDER_STATE[updated_order_data["status"]] + amount_remaining = Decimal(updated_order_data["amount_remaining"]) + if new_state == OrderState.OPEN and amount_remaining < tracked_order.amount: + new_state = OrderState.PARTIALLY_FILLED + + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=updated_order_data["id"], + trading_pair=tracked_order.trading_pair, + update_timestamp=datetime.fromisoformat(updated_order_data["datetime"]).timestamp(), + new_state=new_state, + ) + + return order_update + + async def _update_balances(self): + local_asset_names = set(self._account_balances.keys()) + remote_asset_names = set() + + balances = await self._api_post( + path_url=CONSTANTS.ACCOUNT_BALANCES_URL, + is_auth_required=True + ) + + for balance_entry in balances: + asset_name = balance_entry["currency"].upper() + self._account_available_balances[asset_name] = Decimal(balance_entry["available"]) + self._account_balances[asset_name] = Decimal(balance_entry["total"]) + remote_asset_names.add(asset_name) + + asset_names_to_remove = local_asset_names.difference(remote_asset_names) + for asset_name in asset_names_to_remove: + del self._account_available_balances[asset_name] + del self._account_balances[asset_name] + + def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: List[Dict[str, Any]]): + mapping = bidict() + for info in filter(bitstamp_utils.is_exchange_information_valid, exchange_info): + try: + mapping[info["url_symbol"]] = self.convert_from_exchange_trading_pair(info["name"]) + except Exception: + self.logger().error(f"Error parsing trading pair symbol data {info}. Skipping.") + + self._set_trading_pair_symbol_map(mapping) + + async def _get_last_traded_price(self, trading_pair: str) -> float: + symbol = await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + + resp_json = await self._api_get( + method=RESTMethod.GET, + path_url=CONSTANTS.TICKER_URL.format(symbol), + limit_id=CONSTANTS.TICKER_URL_LIMIT_ID + ) + + return float(resp_json["last"]) diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_order_book.py b/hummingbot/connector/exchange/bitstamp/bitstamp_order_book.py new file mode 100644 index 0000000000..9288c45c0e --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_order_book.py @@ -0,0 +1,74 @@ +from typing import Dict, Optional + +from hummingbot.core.data_type.common import TradeType +from hummingbot.core.data_type.order_book import OrderBook +from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType + + +class BitstampOrderBook(OrderBook): + + @classmethod + def snapshot_message_from_exchange(cls, + msg: Dict[str, any], + timestamp: float, + metadata: Optional[Dict] = None) -> OrderBookMessage: + """ + Creates a snapshot message with the order book snapshot message + :param msg: the response from the exchange when requesting the order book snapshot + :param timestamp: the snapshot timestamp + :param metadata: a dictionary with extra information to add to the snapshot data + :return: a snapshot message with the snapshot information received from the exchange + """ + if metadata: + msg.update(metadata) + + return OrderBookMessage(OrderBookMessageType.SNAPSHOT, { + "trading_pair": msg["trading_pair"], + "update_id": float(msg["timestamp"]), + "bids": msg["bids"], + "asks": msg["asks"] + }, timestamp) + + @classmethod + def diff_message_from_exchange(cls, + msg: Dict[str, any], + timestamp: Optional[float] = None, + metadata: Optional[Dict] = None) -> OrderBookMessage: + """ + Creates a diff message with the changes in the order book received from the exchange + :param msg: the changes in the order book + :param timestamp: the timestamp of the difference + :param metadata: a dictionary with extra information to add to the difference data + :return: a diff message with the changes in the order book notified by the exchange + """ + data = msg["data"] + if metadata: + data.update(metadata) + + return OrderBookMessage(OrderBookMessageType.DIFF, { + "trading_pair": data["trading_pair"], + "update_id": float(data["timestamp"]), + "bids": data["bids"], + "asks": data["asks"] + }, timestamp) + + @classmethod + def trade_message_from_exchange(cls, msg: Dict[str, any], metadata: Optional[Dict] = None): + """ + Creates a trade message with the information from the trade event sent by the exchange + :param msg: the trade event details sent by the exchange + :param metadata: a dictionary with extra information to add to trade message + :return: a trade message with the details of the trade as provided by the exchange + """ + data = msg["data"] + if metadata: + data.update(metadata) + + return OrderBookMessage(OrderBookMessageType.TRADE, { + "trading_pair": data["trading_pair"], + "trade_type": float(TradeType.SELL.value) if data["type"] else float(TradeType.BUY.value), + "trade_id": str(data["id"]), + "update_id": float(data["microtimestamp"]), + "price": data["price"], + "amount": data["amount"] + }) diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_utils.py b/hummingbot/connector/exchange/bitstamp/bitstamp_utils.py new file mode 100644 index 0000000000..0f0d3c81d2 --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_utils.py @@ -0,0 +1,52 @@ +from decimal import Decimal +from typing import Any, Dict + +from pydantic import Field, SecretStr + +from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData +from hummingbot.core.data_type.trade_fee import TradeFeeSchema + +CENTRALIZED = True +EXAMPLE_PAIR = "ZRX-ETH" + +DEFAULT_FEES = TradeFeeSchema( + maker_percent_fee_decimal=Decimal("0.1"), + taker_percent_fee_decimal=Decimal("0.2") +) + + +def is_exchange_information_valid(exchange_info: Dict[str, Any]) -> bool: + """ + Verifies if a trading pair is enabled to operate with based on its exchange information + :param exchange_info: the exchange information for a trading pair + :return: True if the trading pair is enabled, False otherwise + """ + return exchange_info.get("trading") == "Enabled" + + +class BitstampConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="bitstamp", const=True, client_data=None) + bitstamp_api_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Bitstamp API key", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ) + ) + bitstamp_api_secret: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Bitstamp API secret", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ) + ) + + class Config: + title = "bitstamp" + + +KEYS = BitstampConfigMap.construct() diff --git a/hummingbot/connector/exchange/bitstamp/bitstamp_web_utils.py b/hummingbot/connector/exchange/bitstamp/bitstamp_web_utils.py new file mode 100644 index 0000000000..cbb5481163 --- /dev/null +++ b/hummingbot/connector/exchange/bitstamp/bitstamp_web_utils.py @@ -0,0 +1,95 @@ +import json +from typing import Callable, Optional + +import hummingbot.connector.exchange.bitstamp.bitstamp_constants as CONSTANTS +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.connector.utils import TimeSynchronizerRESTPreProcessor +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest +from hummingbot.core.web_assistant.rest_pre_processors import RESTPreProcessorBase +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + + +class BitstampRESTPreProcessor(RESTPreProcessorBase): + CONTENT_TYPE_HEADER = "Content-Type" + + async def pre_process(self, request: RESTRequest) -> RESTRequest: + + if not request.data and self.CONTENT_TYPE_HEADER in request.headers: + # aiohttp adds the Content-Type header which is not allowed by bitstamp when sending an empty body. + request.headers[self.CONTENT_TYPE_HEADER] = '' + return request + + if request.method != RESTMethod.GET: + request.headers[self.CONTENT_TYPE_HEADER] = "application/x-www-form-urlencoded" + + # rest_assistant converts the data dictionary to json but we need a urlencoded string instead + # the actual url encoding of this is done by aiohttp. + request.data = json.loads(request.data) + + return request + + +def public_rest_url(path_url: str, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> str: + """ + Creates a full URL for provided public REST endpoint + :param path_url: a public REST endpoint + :param domain: the Bitstamp domain to connect to ("com" or "us"). The default value is "com" + :return: the full URL to the endpoint + """ + return CONSTANTS.REST_URL.format(domain) + CONSTANTS.API_VERSION + path_url + + +def private_rest_url(path_url: str, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> str: + """ + Creates a full URL for provided private REST endpoint + :param path_url: a private REST endpoint + :param domain: the Bitstamp domain to connect to ("com" or "us"). The default value is "com" + :return: the full URL to the endpoint + """ + return CONSTANTS.REST_URL.format(domain) + CONSTANTS.API_VERSION + path_url + + +def build_api_factory( + throttler: Optional[AsyncThrottler] = None, + time_synchronizer: Optional[TimeSynchronizer] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + time_provider: Optional[Callable] = None, + auth: Optional[AuthBase] = None, ) -> WebAssistantsFactory: + time_synchronizer = time_synchronizer or TimeSynchronizer() + time_provider = time_provider or (lambda: get_current_server_time(throttler=throttler)) + api_factory = WebAssistantsFactory( + throttler=throttler, + auth=auth, + rest_pre_processors=[ + BitstampRESTPreProcessor(), + TimeSynchronizerRESTPreProcessor(synchronizer=time_synchronizer, time_provider=time_provider), + ], + ) + return api_factory + + +def build_api_factory_without_time_synchronizer_pre_processor(throttler: AsyncThrottler) -> WebAssistantsFactory: + api_factory = WebAssistantsFactory(throttler=throttler) + return api_factory + + +def create_throttler() -> AsyncThrottler: + return AsyncThrottler(CONSTANTS.RATE_LIMITS) + + +async def get_current_server_time( + throttler: Optional[AsyncThrottler] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, +) -> float: + throttler = throttler or create_throttler() + api_factory = build_api_factory_without_time_synchronizer_pre_processor(throttler=throttler) + rest_assistant = await api_factory.get_rest_assistant() + response = await rest_assistant.execute_request( + url=public_rest_url(path_url=CONSTANTS.STATUS_URL, domain=domain), + method=RESTMethod.GET, + throttler_limit_id=CONSTANTS.STATUS_URL, + ) + server_time = response["server_time"] + return server_time diff --git a/test/connector/derivative/binance_perpetual/__init__.py b/hummingbot/connector/exchange/bitstamp/dummy.pxd similarity index 100% rename from test/connector/derivative/binance_perpetual/__init__.py rename to hummingbot/connector/exchange/bitstamp/dummy.pxd diff --git a/test/connector/exchange/__init__.py b/hummingbot/connector/exchange/bitstamp/dummy.pyx similarity index 100% rename from test/connector/exchange/__init__.py rename to hummingbot/connector/exchange/bitstamp/dummy.pyx diff --git a/hummingbot/connector/exchange/bybit/bybit_api_order_book_data_source.py b/hummingbot/connector/exchange/bybit/bybit_api_order_book_data_source.py index 1f24e16828..a2e52c3514 100644 --- a/hummingbot/connector/exchange/bybit/bybit_api_order_book_data_source.py +++ b/hummingbot/connector/exchange/bybit/bybit_api_order_book_data_source.py @@ -23,7 +23,6 @@ class BybitAPIOrderBookDataSource(OrderBookTrackerDataSource): HEARTBEAT_TIME_INTERVAL = 30.0 TRADE_STREAM_ID = 1 DIFF_STREAM_ID = 2 - ONE_HOUR = 60 * 60 _logger: Optional[HummingbotLogger] = None _trading_pair_symbol_map: Dict[str, Mapping[str, str]] = {} @@ -38,7 +37,6 @@ def __init__(self, time_synchronizer: Optional[TimeSynchronizer] = None): super().__init__(trading_pairs) self._connector = connector - self._diff_messages_queue_key = CONSTANTS.DIFF_EVENT_TYPE self._domain = domain self._time_synchronizer = time_synchronizer self._throttler = throttler @@ -49,6 +47,8 @@ def __init__(self, ) self._message_queue: Dict[str, asyncio.Queue] = defaultdict(asyncio.Queue) self._last_ws_message_sent_timestamp = 0 + self._category = "spot" + self._depth = CONSTANTS.SPOT_ORDER_BOOK_DEPTH async def get_last_traded_prices(self, trading_pairs: List[str], @@ -64,17 +64,20 @@ async def _request_order_book_snapshot(self, trading_pair: str) -> Dict[str, Any :return: the response from the exchange (JSON dictionary) """ params = { + "category": self._category, "symbol": await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair), "limit": "1000" } - data = await self._connector._api_request(path_url=CONSTANTS.SNAPSHOT_PATH_URL, - method=RESTMethod.GET, - params=params) + data = await self._connector._api_request( + path_url=CONSTANTS.SNAPSHOT_PATH_URL, + method=RESTMethod.GET, + params=params + ) return data['result'] async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: snapshot: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair) - snapshot_timestamp: float = float(snapshot["time"]) * 1e-3 + snapshot_timestamp: float = float(snapshot["ts"]) * 1e-3 snapshot_msg: OrderBookMessage = BybitOrderBook.snapshot_message_from_exchange_rest( snapshot, snapshot_timestamp, @@ -83,18 +86,25 @@ async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: return snapshot_msg async def _parse_trade_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): - trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol(symbol=raw_message["symbol"]) - for trades in raw_message["data"]: + data = raw_message["data"] + for trade in data: + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol(symbol=trade["s"]) trade_message: OrderBookMessage = BybitOrderBook.trade_message_from_exchange( - trades, {"trading_pair": trading_pair}) + trade, + {"trading_pair": trading_pair} + ) message_queue.put_nowait(trade_message) async def _parse_order_book_diff_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): - trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol(symbol=raw_message["symbol"]) - for diff_message in raw_message["data"]: - order_book_message: OrderBookMessage = BybitOrderBook.diff_message_from_exchange( - diff_message, diff_message["t"], {"trading_pair": trading_pair}) - message_queue.put_nowait(order_book_message) + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol( + symbol=raw_message["data"]["s"] + ) + order_book_message: OrderBookMessage = BybitOrderBook.diff_message_from_exchange( + raw_message['data'], + raw_message["ts"] * 1e-3, + {"trading_pair": trading_pair} + ) + message_queue.put_nowait(order_book_message) async def listen_for_order_book_snapshots(self, ev_loop: asyncio.AbstractEventLoop, output: asyncio.Queue): """ @@ -106,7 +116,7 @@ async def listen_for_order_book_snapshots(self, ev_loop: asyncio.AbstractEventLo """ while True: try: - await asyncio.wait_for(self._process_ob_snapshot(snapshot_queue=output), timeout=self.ONE_HOUR) + await asyncio.wait_for(self._process_ob_snapshot(snapshot_queue=output), timeout=CONSTANTS.ONE_SECOND) except asyncio.TimeoutError: await self._take_full_order_book_snapshot(trading_pairs=self._trading_pairs, snapshot_queue=output) except asyncio.CancelledError: @@ -125,7 +135,7 @@ async def listen_for_subscriptions(self): while True: try: ws: WSAssistant = await self._api_factory.get_ws_assistant() - await ws.connect(ws_url=CONSTANTS.WSS_V1_PUBLIC_URL[self._domain]) + await ws.connect(ws_url=CONSTANTS.WSS_PUBLIC_URL[self._domain]) await self._subscribe_channels(ws) self._last_ws_message_sent_timestamp = self._time() @@ -137,7 +147,7 @@ async def listen_for_subscriptions(self): except asyncio.TimeoutError: ping_time = self._time() payload = { - "ping": int(ping_time * 1e3) + "op": "ping" } ping_request = WSJSONRequest(payload=payload) await ws.send(request=ping_request) @@ -161,30 +171,24 @@ async def _subscribe_channels(self, ws: WSAssistant): try: for trading_pair in self._trading_pairs: symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + trade_topic = self._get_trade_topic_from_symbol(symbol) trade_payload = { - "topic": "trade", - "event": "sub", - "symbol": symbol, - "params": { - "binary": False - } + "op": "subscribe", + "args": [trade_topic] } subscribe_trade_request: WSJSONRequest = WSJSONRequest(payload=trade_payload) - depth_payload = { - "topic": "diffDepth", - "event": "sub", - "symbol": symbol, - "params": { - "binary": False - } + orderbook_topic = self._get_ob_topic_from_symbol(symbol, self._depth) + orderbook_payload = { + "op": "subscribe", + "args": [orderbook_topic] } - subscribe_orderbook_request: WSJSONRequest = WSJSONRequest(payload=depth_payload) + subscribe_orderbook_request: WSJSONRequest = WSJSONRequest(payload=orderbook_payload) await ws.send(subscribe_trade_request) await ws.send(subscribe_orderbook_request) - self.logger().info(f"Subscribed to public order book and trade channels of {trading_pair}...") + self.logger().info("Subscribed to public order book and trade channels...") except asyncio.CancelledError: raise except Exception: @@ -197,26 +201,36 @@ async def _subscribe_channels(self, ws: WSAssistant): async def _process_ws_messages(self, ws: WSAssistant): async for ws_response in ws.iter_messages(): data = ws_response.data - if data.get("msg") == "Success": + if data.get("op") == "subscribe": + if data.get("success") is False: + self.logger().error( + "Unexpected error occurred subscribing to order book trading and delta streams...", + exc_info=True + ) continue - event_type = data.get("topic") - if event_type == CONSTANTS.DIFF_EVENT_TYPE: - if data.get("f"): - self._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE].put_nowait(data) - else: - self._message_queue[CONSTANTS.DIFF_EVENT_TYPE].put_nowait(data) - elif event_type == CONSTANTS.TRADE_EVENT_TYPE: - self._message_queue[CONSTANTS.TRADE_EVENT_TYPE].put_nowait(data) + event_type = data.get("type") + topic = data.get("topic") + if event_type == CONSTANTS.TRADE_EVENT_TYPE and "publicTrade" in topic: + channel = self._trade_messages_queue_key + elif event_type == CONSTANTS.ORDERBOOK_SNAPSHOT_EVENT_TYPE and "orderbook" in topic: + channel = self._snapshot_messages_queue_key + elif event_type == CONSTANTS.ORDERBOOK_DIFF_EVENT_TYPE and "orderbook" in topic: + channel = self._diff_messages_queue_key + else: + channel = None + if channel: + self._message_queue[channel].put_nowait(data) async def _process_ob_snapshot(self, snapshot_queue: asyncio.Queue): message_queue = self._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] while True: try: json_msg = await message_queue.get() + data = json_msg["data"] trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol( - symbol=json_msg["symbol"]) + symbol=data["s"]) order_book_message: OrderBookMessage = BybitOrderBook.snapshot_message_from_exchange_websocket( - json_msg["data"][0], json_msg["data"][0], {"trading_pair": trading_pair}) + data, json_msg["ts"], {"trading_pair": trading_pair}) snapshot_queue.put_nowait(order_book_message) except asyncio.CancelledError: raise @@ -228,7 +242,7 @@ async def _take_full_order_book_snapshot(self, trading_pairs: List[str], snapsho for trading_pair in trading_pairs: try: snapshot: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair=trading_pair) - snapshot_timestamp: float = float(snapshot["time"]) * 1e-3 + snapshot_timestamp: float = float(snapshot["ts"]) * 1e-3 snapshot_msg: OrderBookMessage = BybitOrderBook.snapshot_message_from_exchange_rest( snapshot, snapshot_timestamp, @@ -245,3 +259,9 @@ async def _take_full_order_book_snapshot(self, trading_pairs: List[str], snapsho def _time(self): return time.time() + + def _get_trade_topic_from_symbol(self, symbol: str) -> str: + return f"publicTrade.{symbol}" + + def _get_ob_topic_from_symbol(self, symbol: str, depth: int) -> str: + return f"orderbook.{depth}.{symbol}" diff --git a/hummingbot/connector/exchange/bybit/bybit_api_user_stream_data_source.py b/hummingbot/connector/exchange/bybit/bybit_api_user_stream_data_source.py index 58e893f82a..babddf7436 100644 --- a/hummingbot/connector/exchange/bybit/bybit_api_user_stream_data_source.py +++ b/hummingbot/connector/exchange/bybit/bybit_api_user_stream_data_source.py @@ -67,24 +67,19 @@ async def listen_for_user_stream(self, output: asyncio.Queue): ws = None while True: try: - ws: WSAssistant = await self._get_ws_assistant() - await ws.connect(ws_url=CONSTANTS.WSS_PRIVATE_URL[self._domain]) - await self._authenticate_connection(ws) + ws: WSAssistant = await self._connected_websocket_assistant(self._domain) + await self._subscribe_channels(ws) self._last_ws_message_sent_timestamp = self._time() while True: try: - seconds_until_next_ping = (CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL - - (self._time() - self._last_ws_message_sent_timestamp)) + seconds_until_next_ping = ( + CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL - + (self._time() - self._last_ws_message_sent_timestamp) + ) await asyncio.wait_for( self._process_ws_messages(ws=ws, output=output), timeout=seconds_until_next_ping) except asyncio.TimeoutError: - ping_time = self._time() - payload = { - "ping": int(ping_time * 1e3) - } - ping_request = WSJSONRequest(payload=payload) - await ws.send(request=ping_request) - self._last_ws_message_sent_timestamp = ping_time + await self._ping_server(ws) except asyncio.CancelledError: raise except Exception: @@ -94,28 +89,111 @@ async def listen_for_user_stream(self, output: asyncio.Queue): ws and await ws.disconnect() await self._sleep(5) + async def _ping_server(self, ws: WSAssistant): + ping_time = self._time() + payload = { + "op": "ping", + "args": int(ping_time * 1e3) + } + ping_request = WSJSONRequest(payload=payload) + await ws.send(request=ping_request) + self._last_ws_message_sent_timestamp = ping_time + + async def _subscribe_channels(self, ws: WSAssistant): + """ + Subscribes to the trade events and diff orders events through the provided websocket connection. + :param ws: the websocket assistant used to connect to the exchange + """ + try: + payload = { + "op": "subscribe", + "args": [f"{CONSTANTS.WS_SUBSCRIPTION_ORDERS_ENDPOINT_NAME}"], + } + subscribe_orders_request = WSJSONRequest(payload) + payload = { + "op": "subscribe", + "args": [f"{CONSTANTS.WS_SUBSCRIPTION_EXECUTIONS_ENDPOINT_NAME}"], + } + subscribe_executions_request = WSJSONRequest(payload) + payload = { + "op": "subscribe", + "args": [f"{CONSTANTS.WS_SUBSCRIPTION_WALLET_ENDPOINT_NAME}"], + } + subscribe_wallet_request = WSJSONRequest(payload) + + await ws.send(subscribe_orders_request) + await ws.send(subscribe_executions_request) + await ws.send(subscribe_wallet_request) + + self.logger().info("Subscribed to private orders, executions and wallet channels") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred subscribing to private channels...", + exc_info=True + ) + raise + async def _authenticate_connection(self, ws: WSAssistant): """ Sends the authentication message. :param ws: the websocket assistant used to connect to the exchange """ - auth_message: WSJSONRequest = WSJSONRequest(payload=self._auth.generate_ws_authentication_message()) - await ws.send(auth_message) + request: WSJSONRequest = WSJSONRequest( + payload=self._auth.generate_ws_auth_message() + ) + await ws.send(request) async def _process_ws_messages(self, ws: WSAssistant, output: asyncio.Queue): async for ws_response in ws.iter_messages(): data = ws_response.data - if isinstance(data, list): - for message in data: - if message["e"] in ["executionReport", "outboundAccountInfo"]: - output.put_nowait(message) - elif data.get("auth") == "fail": - raise IOError("Private channel authentication failed.") + if "op" in data: + if data.get("op") == "auth": + await self._process_ws_auth_msg(data) + elif data.get("op") == "subscribe": + if data.get("success") is False: + self.logger().error( + "Unexpected error occurred subscribing to private channels...", + exc_info=True + ) + continue + topic = data.get("topic") + channel = "" + if topic == CONSTANTS.WS_SUBSCRIPTION_ORDERS_ENDPOINT_NAME: + channel = CONSTANTS.PRIVATE_ORDER_CHANNEL + elif topic == CONSTANTS.WS_SUBSCRIPTION_EXECUTIONS_ENDPOINT_NAME: + channel = CONSTANTS.PRIVATE_TRADE_CHANNEL + elif topic == CONSTANTS.WS_SUBSCRIPTION_WALLET_ENDPOINT_NAME: + channel = CONSTANTS.PRIVATE_WALLET_CHANNEL + else: + output.put_nowait(data) + if channel: + data["channel"] = channel + output.put_nowait(data) + + async def _process_ws_auth_msg(self, data: dict): + if not data.get("success"): + raise IOError(f"Private channel authentication failed - {data['ret_msg']}") + else: + self.logger().info("Private channel authentication success.") async def _get_ws_assistant(self) -> WSAssistant: if self._ws_assistant is None: self._ws_assistant = await self._api_factory.get_ws_assistant() return self._ws_assistant + async def _connected_websocket_assistant(self, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> WSAssistant: + ws: WSAssistant = await self._get_ws_assistant() + await ws.connect( + ws_url=CONSTANTS.WSS_PRIVATE_URL[domain], + ping_timeout=CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL + ) + await self._authenticate_connection(ws) + return ws + + def _get_server_timestamp(self): + return web_utils.get_current_server_time() + def _time(self): return time.time() diff --git a/hummingbot/connector/exchange/bybit/bybit_auth.py b/hummingbot/connector/exchange/bybit/bybit_auth.py index 6c4b82190d..6c43064795 100644 --- a/hummingbot/connector/exchange/bybit/bybit_auth.py +++ b/hummingbot/connector/exchange/bybit/bybit_auth.py @@ -1,14 +1,12 @@ -import hashlib import hmac import time -from collections import OrderedDict from typing import Any, Dict, Optional from urllib.parse import urlencode import hummingbot.connector.exchange.bybit.bybit_constants as CONSTANTS from hummingbot.connector.time_synchronizer import TimeSynchronizer from hummingbot.core.web_assistant.auth import AuthBase -from hummingbot.core.web_assistant.connections.data_types import RESTRequest, WSRequest +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest, WSRequest class BybitAuth(AuthBase): @@ -18,33 +16,24 @@ def __init__(self, api_key: str, secret_key: str, time_provider: TimeSynchronize self.secret_key = secret_key self.time_provider = time_provider - @staticmethod - def keysort(dictionary: Dict[str, str]) -> Dict[str, str]: - return OrderedDict(sorted(dictionary.items(), key=lambda t: t[0])) - async def rest_authenticate(self, request: RESTRequest) -> RESTRequest: """ Adds the server time and the signature to the request, required for authenticated interactions. It also adds the required parameter in the request header. :param request: the request to be configured for authenticated interaction """ - request.params = self.add_auth_to_params(params=request.params) - headers = {} - if request.headers is not None: - headers.update(request.headers) - request.headers = headers - return request + return self.add_auth_headers(method=request.method, request=request) async def ws_authenticate(self, request: WSRequest) -> WSRequest: """ This method is intended to configure a websocket request to be authenticated. Bybit does not use this functionality """ - return request # pass-through + return self.generate_ws_auth_message() def get_referral_code_headers(self): """ - Generates authentication headers required by ByBit + Generates referral headers :return: a dictionary of auth headers """ headers = { @@ -52,31 +41,63 @@ def get_referral_code_headers(self): } return headers - def add_auth_to_params(self, - params: Optional[Dict[str, Any]]): - timestamp = int(self.time_provider.time() * 1e3) - request_params = params or {} - request_params["timestamp"] = timestamp - request_params["api_key"] = self.api_key - request_params = self.keysort(request_params) - signature = self._generate_signature(params=request_params) - request_params["sign"] = signature - return request_params - - def _generate_signature(self, params: Dict[str, Any]) -> str: - encoded_params_str = urlencode(params) - digest = hmac.new(self.secret_key.encode("utf8"), encoded_params_str.encode("utf8"), hashlib.sha256).hexdigest() - return digest - - def generate_ws_authentication_message(self): + def add_auth_headers(self, method: str, request: Optional[Dict[str, Any]]): + """ + Add authentication headers in request object + + :param method: HTTP method (POST, PUT, GET) + :param request: The request to be configured for authenticated interaction + + :return: request object updated with xauth headers + """ + ts = str(int(time.time() * 10 ** 3)) + + headers = {} + headers["X-BAPI-TIMESTAMP"] = str(ts) + headers["X-BAPI-API-KEY"] = self.api_key + + if method.value == "POST": + signature = self._generate_rest_signature( + timestamp=ts, method=method, payload=request.data) + else: + signature = self._generate_rest_signature( + timestamp=ts, method=method, payload=request.params) + + headers["X-BAPI-SIGN"] = signature + headers["X-BAPI-SIGN-TYPE"] = str(CONSTANTS.X_API_SIGN_TYPE) + headers["X-BAPI-RECV-WINDOW"] = str(CONSTANTS.X_API_RECV_WINDOW) + request.headers = {**request.headers, **headers} if request.headers is not None else headers + return request + + def _generate_rest_signature(self, timestamp, method: str, payload: Optional[Dict[str, Any]]) -> str: + if payload is None: + payload = {} + if method == RESTMethod.GET: + param_str = str(timestamp) + self.api_key + CONSTANTS.X_API_RECV_WINDOW + urlencode(payload) + elif method == RESTMethod.POST: + param_str = str(timestamp) + self.api_key + CONSTANTS.X_API_RECV_WINDOW + f"{payload}" + signature = hmac.new( + bytes(self.secret_key, "utf-8"), + param_str.encode("utf-8"), + digestmod="sha256" + ).hexdigest() + return signature + + def _generate_ws_signature(self, expires: int): + signature = str(hmac.new( + bytes(self.secret_key, "utf-8"), + bytes(f"GET/realtime{expires}", "utf-8"), + digestmod="sha256" + ).hexdigest()) + return signature + + def generate_ws_auth_message(self): """ Generates the authentication message to start receiving messages from the 3 private ws channels """ - expires = int((self.time_provider.time() + 10) * 1e3) - _val = f'GET/realtime{expires}' - signature = hmac.new(self.secret_key.encode("utf8"), - _val.encode("utf8"), hashlib.sha256).hexdigest() + expires = int((self._time() + 10000) * 1000) + signature = self._generate_ws_signature(expires) auth_message = { "op": "auth", "args": [self.api_key, expires, signature] diff --git a/hummingbot/connector/exchange/bybit/bybit_constants.py b/hummingbot/connector/exchange/bybit/bybit_constants.py index 8a6a62ecbb..a26f9883be 100644 --- a/hummingbot/connector/exchange/bybit/bybit_constants.py +++ b/hummingbot/connector/exchange/bybit/bybit_constants.py @@ -12,96 +12,216 @@ TIME_IN_FORCE_GTC = "GTC" # Base URL -REST_URLS = {"bybit_main": "https://api.bybit.com", - "bybit_testnet": "https://api-testnet.bybit.com"} +REST_URLS = { + "bybit_main": "https://api.bybit.com", + "bybit_testnet": "https://api-testnet.bybit.com" +} + +WSS_PUBLIC_URL = { + "bybit_main": "wss://stream.bybit.com/v5/public/spot", + "bybit_testnet": "wss://stream-testnet.bybit.com/v5/public/spot" +} + +WSS_PRIVATE_URL = { + "bybit_main": "wss://stream.bybit.com/v5/private", + "bybit_testnet": "wss://stream-testnet.bybit.com/v5/private" +} -WSS_V1_PUBLIC_URL = {"bybit_main": "wss://stream.bybit.com/spot/quote/ws/v1", - "bybit_testnet": "wss://stream-testnet.bybit.com/spot/quote/ws/v1"} +# unit in millisecond and default value is 5,000) to specify how long an HTTP request is valid. +# It is also used to prevent replay attacks. +# https://bybit-exchange.github.io/docs/v5/guide#parameters-for-authenticated-endpoints +X_API_RECV_WINDOW = str(50000) -WSS_PRIVATE_URL = {"bybit_main": "wss://stream.bybit.com/spot/ws", - "bybit_testnet": "wss://stream-testnet.bybit.com/spot/ws"} +X_API_SIGN_TYPE = str(2) + +# https://bybit-exchange.github.io/docs/v5/websocket/public/orderbook +SPOT_ORDER_BOOK_DEPTH = 50 + +TRADE_CATEGORY = "spot" # Websocket event types -DIFF_EVENT_TYPE = "diffDepth" -TRADE_EVENT_TYPE = "trade" +# https://bybit-exchange.github.io/docs/v5/websocket/public/trade +TRADE_EVENT_TYPE = "snapshot" # Weird but true in V5 SNAPSHOT_EVENT_TYPE = "depth" +# V5: https://bybit-exchange.github.io/docs/v5/websocket/public/orderbook +ORDERBOOK_DIFF_EVENT_TYPE = "delta" +ORDERBOOK_SNAPSHOT_EVENT_TYPE = "snapshot" + +PRIVATE_ORDER_CHANNEL = "order" +PRIVATE_TRADE_CHANNEL = "trade" +PRIVATE_WALLET_CHANNEL = "wallet" + +WS_SUBSCRIPTION_ORDERS_ENDPOINT_NAME = "order" +WS_SUBSCRIPTION_EXECUTIONS_ENDPOINT_NAME = "execution" +WS_SUBSCRIPTION_WALLET_ENDPOINT_NAME = "wallet" # Public API endpoints -LAST_TRADED_PRICE_PATH = "/spot/quote/v1/ticker/price" -EXCHANGE_INFO_PATH_URL = "/spot/v1/symbols" -SNAPSHOT_PATH_URL = "/spot/quote/v1/depth" -SERVER_TIME_PATH_URL = "/spot/v1/time" +LAST_TRADED_PRICE_PATH = "/v5/market/tickers" +EXCHANGE_INFO_PATH_URL = "/v5/market/instruments-info" +SNAPSHOT_PATH_URL = "/v5/market/orderbook" +SERVER_TIME_PATH_URL = "/v5/market/time" # Private API endpoints -ACCOUNTS_PATH_URL = "/spot/v1/account" -MY_TRADES_PATH_URL = "/spot/v1/myTrades" -ORDER_PATH_URL = "/spot/v1/order" +ACCOUNT_INFO_PATH_URL = "/v5/account/info" +BALANCE_PATH_URL = "/v5/account/wallet-balance" +ORDER_PLACE_PATH_URL = "/v5/order/create" +ORDER_CANCEL_PATH_URL = "/v5/order/cancel" +GET_ORDERS_PATH_URL = "/v5/order/realtime" +TRADE_HISTORY_PATH_URL = "/v5/execution/list" +EXCHANGE_FEE_RATE_PATH_URL = "/v5/account/fee-rate" + # Order States +# https://bybit-exchange.github.io/docs/v5/enum#orderstatus ORDER_STATE = { - "PENDING": OrderState.PENDING_CREATE, - "NEW": OrderState.OPEN, - "PARTIALLY_FILLED": OrderState.PARTIALLY_FILLED, - "FILLED": OrderState.FILLED, - "PENDING_CANCEL": OrderState.PENDING_CANCEL, - "CANCELED": OrderState.CANCELED, - "REJECTED": OrderState.FAILED, + "New": OrderState.OPEN, + "PartiallyFilled": OrderState.PARTIALLY_FILLED, + "Filled": OrderState.FILLED, + "Cancelled": OrderState.CANCELED, + "PartiallyFilledCanceled": OrderState.CANCELED, + "Rejected": OrderState.FAILED, +} + +ACCOUNT_TYPE = { + "REGULAR": 1, + "UNIFIED": 3, + "UTA_PRO": 4 } -WS_HEARTBEAT_TIME_INTERVAL = 30 +WS_HEARTBEAT_TIME_INTERVAL = 20 + +# Request error codes +RET_CODE_OK = 0 +RET_CODE_PARAMS_ERROR = 10001 +RET_CODE_API_KEY_INVALID = 10003 +RET_CODE_AUTH_TIMESTAMP_ERROR = 10021 +RET_CODE_ORDER_NOT_EXISTS = 20001 +RET_CODE_MODE_POSITION_NOT_EMPTY = 30082 +RET_CODE_MODE_NOT_MODIFIED = 110025 +RET_CODE_MODE_ORDER_NOT_EMPTY = 30086 +RET_CODE_API_KEY_EXPIRED = 33004 +RET_CODE_LEVERAGE_NOT_MODIFIED = 110043 +RET_CODE_POSITION_ZERO = 130125 + +API_REQUEST_RETRY = 2 # Rate Limit Type -REQUEST_GET = "GET" -REQUEST_GET_BURST = "GET_BURST" -REQUEST_GET_MIXED = "GET_MIXED" -REQUEST_POST = "POST" -REQUEST_POST_BURST = "POST_BURST" -REQUEST_POST_MIXED = "POST_MIXED" - -# Rate Limit Max request - -MAX_REQUEST_GET = 6000 -MAX_REQUEST_GET_BURST = 70 -MAX_REQUEST_GET_MIXED = 400 -MAX_REQUEST_POST = 2400 -MAX_REQUEST_POST_BURST = 50 -MAX_REQUEST_POST_MIXED = 270 +REQUEST_GET_POST_SHARED = "ALL" # Rate Limit time intervals TWO_MINUTES = 120 ONE_SECOND = 1 SIX_SECONDS = 6 -ONE_DAY = 86400 +FIVE_SECONDS = 5 +ONE_DAY = 60 * 60 * 24 +ONE_HOUR = 60 * 60 + +# https://bybit-exchange.github.io/docs/v5/rate-limit#api-rate-limit-rules-for-vipspros +MAX_REQUEST_SECURE_DIVIDER = 2 +MAX_REQUEST_LIMIT_DEFAULT = 20 / MAX_REQUEST_SECURE_DIVIDER # 20/s is the max + +# No more than 600 requests are allowed in any 5-second window. +# https://bybit-exchange.github.io/docs/v5/rate-limit#ip-rate-limit +SHARED_RATE_LIMIT = 600 # per 5 second + +# WS_CONNECTIONS_RATE_LIMIT = "WS_CONNECTIONS_RATE_LIMIT" +# WS_CONNECTIONS_RATE_LIMIT = 500 # Per 5 seconds +# WS_CONNECTIONS_RATE_LIMIT_SEC = WS_CONNECTIONS_RATE_LIMIT / 5 RATE_LIMITS = { - # General - RateLimit(limit_id=REQUEST_GET, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES), - RateLimit(limit_id=REQUEST_GET_BURST, limit=MAX_REQUEST_GET_BURST, time_interval=ONE_SECOND), - RateLimit(limit_id=REQUEST_GET_MIXED, limit=MAX_REQUEST_GET_MIXED, time_interval=SIX_SECONDS), - RateLimit(limit_id=REQUEST_POST, limit=MAX_REQUEST_POST, time_interval=TWO_MINUTES), - RateLimit(limit_id=REQUEST_POST_BURST, limit=MAX_REQUEST_POST_BURST, time_interval=ONE_SECOND), - RateLimit(limit_id=REQUEST_POST_MIXED, limit=MAX_REQUEST_POST_MIXED, time_interval=SIX_SECONDS), + # General Limits on REST Verbs (GET/POST) + RateLimit( + limit_id=REQUEST_GET_POST_SHARED, + limit=SHARED_RATE_LIMIT, + time_interval=FIVE_SECONDS + ), # Linked limits - RateLimit(limit_id=LAST_TRADED_PRICE_PATH, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, - linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), - LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), - RateLimit(limit_id=EXCHANGE_INFO_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, - linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), - LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), - RateLimit(limit_id=SNAPSHOT_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, - linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), - LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), - RateLimit(limit_id=SERVER_TIME_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, - linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), - LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), - RateLimit(limit_id=ORDER_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, - linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), - LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), - RateLimit(limit_id=ACCOUNTS_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, - linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), - LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), - RateLimit(limit_id=MY_TRADES_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, - linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), - LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), - + RateLimit( + limit_id=LAST_TRADED_PRICE_PATH, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=EXCHANGE_INFO_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=SNAPSHOT_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=SERVER_TIME_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=ORDER_PLACE_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=ORDER_CANCEL_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=GET_ORDERS_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=ACCOUNT_INFO_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=BALANCE_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=TRADE_HISTORY_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), + RateLimit( + limit_id=EXCHANGE_FEE_RATE_PATH_URL, + limit=MAX_REQUEST_LIMIT_DEFAULT, + time_interval=ONE_SECOND, + linked_limits=[ + LinkedLimitWeightPair(REQUEST_GET_POST_SHARED), + ] + ), } diff --git a/hummingbot/connector/exchange/bybit/bybit_exchange.py b/hummingbot/connector/exchange/bybit/bybit_exchange.py index 4812f687b4..504f0e9faa 100644 --- a/hummingbot/connector/exchange/bybit/bybit_exchange.py +++ b/hummingbot/connector/exchange/bybit/bybit_exchange.py @@ -2,10 +2,10 @@ from decimal import Decimal from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +import pandas as pd from bidict import bidict import hummingbot.connector.exchange.bybit.bybit_constants as CONSTANTS -import hummingbot.connector.exchange.bybit.bybit_utils as bybit_utils import hummingbot.connector.exchange.bybit.bybit_web_utils as web_utils from hummingbot.connector.exchange.bybit.bybit_api_order_book_data_source import BybitAPIOrderBookDataSource from hummingbot.connector.exchange.bybit.bybit_api_user_stream_data_source import BybitAPIUserStreamDataSource @@ -16,7 +16,7 @@ from hummingbot.core.data_type.common import OrderType, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderUpdate, TradeUpdate from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource -from hummingbot.core.data_type.trade_fee import TokenAmount, TradeFeeBase +from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource from hummingbot.core.utils.estimate_fee import build_trade_fee from hummingbot.core.web_assistant.connections.data_types import RESTMethod @@ -46,11 +46,16 @@ def __init__(self, self._trading_required = trading_required self._trading_pairs = trading_pairs self._last_trades_poll_bybit_timestamp = 1.0 + self._account_type = None # To be update on firtst call to balances + self._category = CONSTANTS.TRADE_CATEGORY # Required by the V5 API super().__init__(client_config_map) @staticmethod def bybit_order_type(order_type: OrderType) -> str: - return order_type.name.upper() + if order_type.is_limit_type(): + return "Limit" + else: + return "Market" @staticmethod def to_hb_order_type(bybit_type: str) -> OrderType: @@ -142,7 +147,7 @@ def _create_web_assistants_factory(self) -> WebAssistantsFactory: def _create_order_book_data_source(self) -> OrderBookTrackerDataSource: return BybitAPIOrderBookDataSource( - trading_pairs=self._trading_pairs, + trading_pairs=self.trading_pairs, connector=self, domain=self.domain, api_factory=self._web_assistants_factory, @@ -167,17 +172,45 @@ def _get_fee(self, price: Decimal = s_decimal_NaN, is_maker: Optional[bool] = None) -> TradeFeeBase: is_maker = order_type is OrderType.LIMIT_MAKER - trade_base_fee = build_trade_fee( - exchange=self.name, - is_maker=is_maker, - order_side=order_side, - order_type=order_type, - amount=amount, - price=price, - base_currency=base_currency, - quote_currency=quote_currency + trading_pair = combine_to_hb_trading_pair(base=base_currency, quote=quote_currency) + if trading_pair in self._trading_fees: + fees_data = self._trading_fees[trading_pair] + fee_value = Decimal(fees_data["makerFeeRate"]) if is_maker else Decimal(fees_data["takerFeeRate"]) + fee = AddedToCostTradeFee(percent=fee_value) + else: + fee = build_trade_fee( + self.name, + is_maker, + base_currency=base_currency, + quote_currency=quote_currency, + order_type=order_type, + order_side=order_side, + amount=amount, + price=price, + ) + return fee + + async def _get_account_info(self): + account_info = await self._api_get( + path_url=CONSTANTS.ACCOUNT_INFO_PATH_URL, + params=None, + is_auth_required=True, + headers={ + "referer": CONSTANTS.HBOT_BROKER_ID + }, ) - return trade_base_fee + return account_info + + async def _get_account_type(self): + account_info = await self._get_account_info() + if account_info["retCode"] != 0: + raise ValueError(f"{account_info['retMsg']}") + account_type = 'SPOT' if account_info["result"]["unifiedMarginStatus"] == \ + CONSTANTS.ACCOUNT_TYPE["REGULAR"] else 'UNIFIED' + return account_type + + async def _update_account_type(self): + self._account_type = await self._get_account_type() async def _place_order(self, order_id: str, @@ -187,106 +220,81 @@ async def _place_order(self, order_type: OrderType, price: Decimal, **kwargs) -> Tuple[str, float]: - amount_str = f"{amount:f}" type_str = self.bybit_order_type(order_type) side_str = CONSTANTS.SIDE_BUY if trade_type is TradeType.BUY else CONSTANTS.SIDE_SELL symbol = await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) - api_params = {"symbol": symbol, - "side": side_str, - "qty": amount_str, - "type": type_str, - "orderLinkId": order_id} - if order_type != OrderType.MARKET: - api_params["price"] = f"{price:f}" + + api_params = { + "category": self._category, + "symbol": symbol, + "side": side_str, + "orderType": type_str, + "qty": f"{amount:f}", + "marketUnit": "baseCoin", + "price": f"{price:f}", + "orderLinkId": order_id + } if order_type == OrderType.LIMIT: api_params["timeInForce"] = CONSTANTS.TIME_IN_FORCE_GTC - order_result = await self._api_post( - path_url=CONSTANTS.ORDER_PATH_URL, - params=api_params, + response = await self._api_post( + path_url=CONSTANTS.ORDER_PLACE_PATH_URL, + data=api_params, is_auth_required=True, - trading_pair=trading_pair, - headers={"referer": CONSTANTS.HBOT_BROKER_ID}, + trading_pair=trading_pair ) - - o_id = str(order_result["result"]["orderId"]) - transact_time = int(order_result["result"]["transactTime"]) * 1e-3 + if response["retCode"] != 0: + raise ValueError(f"{response['retMsg']}") + order_result = response.get("result", {}) + o_id = str(order_result["orderId"]) + transact_time = int(response["time"]) * 1e-3 return (o_id, transact_time) async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder): - api_params = {} - if tracked_order.exchange_order_id: - api_params["orderId"] = tracked_order.exchange_order_id + exchange_order_id = tracked_order.exchange_order_id + client_order_id = tracked_order.client_order_id + trading_pair = tracked_order.trading_pair + api_params = { + "category": self._category, + "symbol": trading_pair + } + if exchange_order_id: + api_params["orderId"] = exchange_order_id else: - api_params["orderLinkId"] = tracked_order.client_order_id - cancel_result = await self._api_delete( - path_url=CONSTANTS.ORDER_PATH_URL, - params=api_params, - is_auth_required=True) - - if isinstance(cancel_result, dict) and "orderLinkId" in cancel_result["result"]: + api_params["orderLinkId"] = client_order_id + api_params = dict(sorted(api_params.items())) + response = await self._api_post( + path_url=CONSTANTS.ORDER_CANCEL_PATH_URL, + data=api_params, + is_auth_required=True, + headers={"referer": CONSTANTS.HBOT_BROKER_ID}, + ) + if response["retCode"] != 0: + raise ValueError(f"{response['retMsg']}") + if isinstance(response, dict) and "orderLinkId" in response["result"]: return True return False async def _format_trading_rules(self, exchange_info_dict: Dict[str, Any]) -> List[TradingRule]: - """ - Example: - { - "ret_code": 0, - "ret_msg": "", - "ext_code": null, - "ext_info": null, - "result": [ - { - "name": "BTCUSDT", - "alias": "BTCUSDT", - "baseCurrency": "BTC", - "quoteCurrency": "USDT", - "basePrecision": "0.000001", - "quotePrecision": "0.01", - "minTradeQuantity": "0.0001", - "minTradeAmount": "10", - "minPricePrecision": "0.01", - "maxTradeQuantity": "2", - "maxTradeAmount": "200", - "category": 1 - }, - { - "name": "ETHUSDT", - "alias": "ETHUSDT", - "baseCurrency": "ETH", - "quoteCurrency": "USDT", - "basePrecision": "0.0001", - "quotePrecision": "0.01", - "minTradeQuantity": "0.0001", - "minTradeAmount": "10", - "minPricePrecision": "0.01", - "maxTradeQuantity": "2", - "maxTradeAmount": "200", - "category": 1 - } - ] - } - """ - trading_pair_rules = exchange_info_dict.get("result", []) + trading_pair_rules = exchange_info_dict.get("result", []).get("list", []) retval = [] for rule in trading_pair_rules: try: - trading_pair = await self.trading_pair_associated_to_exchange_symbol(symbol=rule.get("name")) - - min_order_size = rule.get("minTradeQuantity") - min_price_increment = rule.get("minPricePrecision") - min_base_amount_increment = rule.get("basePrecision") - min_notional_size = rule.get("minTradeAmount") - + trading_pair = await self.trading_pair_associated_to_exchange_symbol(symbol=rule["symbol"]) + lot_size_filter = rule.get("lotSizeFilter", {}) + price_filter = rule.get("priceFilter", {}) retval.append( - TradingRule(trading_pair, - min_order_size=Decimal(min_order_size), - min_price_increment=Decimal(min_price_increment), - min_base_amount_increment=Decimal(min_base_amount_increment), - min_notional_size=Decimal(min_notional_size))) - + TradingRule( + trading_pair, + min_order_size=Decimal(lot_size_filter.get("minOrderQty")), + max_order_size=Decimal(lot_size_filter.get("maxOrderQty")), + min_price_increment=Decimal(price_filter.get("tickSize")), + min_base_amount_increment=Decimal(lot_size_filter.get("basePrecision")), + min_quote_amount_increment=Decimal(lot_size_filter.get('quotePrecision')), + min_notional_size=Decimal(lot_size_filter.get("minOrderAmt")) + ) + ) except Exception: self.logger().exception(f"Error parsing the trading pair rule {rule.get('name')}. Skipping.") return retval @@ -295,7 +303,25 @@ async def _update_trading_fees(self): """ Update fees information from the exchange """ - pass + # await self._update_exchange_fee_rates() + fee_rates = await self._get_exchange_fee_rates() + for tpfee in fee_rates: + trading_pair = await self.trading_pair_associated_to_exchange_symbol(symbol=tpfee["symbol"]) + self._trading_fees[trading_pair] = tpfee + + def _process_trade_event_message(self, trade_msg: Dict[str, Any]): + """ + Updates in-flight order and trigger order filled event for trade message received. Triggers order completed + event if the total executed amount equals to the specified order amount. + :param trade_msg: The trade event message payload + """ + + client_order_id = str(trade_msg["orderLinkId"]) + fillable_order = self._order_tracker.all_fillable_orders.get(client_order_id) + + if fillable_order is not None: + trade_update = self._parse_trade_update(trade_msg=trade_msg, tracked_order=fillable_order) + self._order_tracker.process_trade_update(trade_update) async def _user_stream_event_listener(self): """ @@ -305,49 +331,48 @@ async def _user_stream_event_listener(self): """ async for event_message in self._iter_user_event_queue(): try: - event_type = event_message.get("e") - if event_type == "executionReport": - execution_type = event_message.get("X") - client_order_id = event_message.get("c") - tracked_order = self._order_tracker.fetch_order(client_order_id=client_order_id) - if tracked_order is not None: - if execution_type in ["PARTIALLY_FILLED", "FILLED"]: - fee = TradeFeeBase.new_spot_fee( - fee_schema=self.trade_fee_schema(), - trade_type=tracked_order.trade_type, - flat_fees=[TokenAmount(amount=Decimal(event_message["n"]), token=event_message["N"])] - ) - trade_update = TradeUpdate( - trade_id=str(event_message["t"]), + channel = event_message.get("channel") + if channel == CONSTANTS.PRIVATE_TRADE_CHANNEL: + data = event_message.get("data") + for trade_msg in data: + # SPOT: "", UNIFIED: "Trade" + if trade_msg.get("execType") not in ("Trade", ""): # Not a trade event + continue + self._process_trade_event_message(trade_msg) + elif channel == CONSTANTS.PRIVATE_ORDER_CHANNEL: + data = event_message.get("data") + for order in data: + client_order_id = order.get("orderLinkId") + exchange_order_id = order.get("orderId") + updatable_order = self._order_tracker.all_updatable_orders.get(client_order_id) + if updatable_order is not None: + new_state = CONSTANTS.ORDER_STATE[order["orderStatus"]] + order_update = OrderUpdate( + trading_pair=updatable_order.trading_pair, + update_timestamp=int(order["updatedTime"]) * 1e-3, + new_state=new_state, client_order_id=client_order_id, - exchange_order_id=str(event_message["i"]), - trading_pair=tracked_order.trading_pair, - fee=fee, - fill_base_amount=Decimal(event_message["l"]), - fill_quote_amount=Decimal(event_message["l"]) * Decimal(event_message["L"]), - fill_price=Decimal(event_message["L"]), - fill_timestamp=int(event_message["E"]) * 1e-3, + exchange_order_id=str(exchange_order_id), ) - self._order_tracker.process_trade_update(trade_update) - - order_update = OrderUpdate( - trading_pair=tracked_order.trading_pair, - update_timestamp=int(event_message["E"]) * 1e-3, - new_state=CONSTANTS.ORDER_STATE[event_message["X"]], - client_order_id=client_order_id, - exchange_order_id=str(event_message["i"]), - ) - self._order_tracker.process_order_update(order_update=order_update) - - elif event_type == "outboundAccountInfo": - balances = event_message["B"] + self._order_tracker.process_order_update(order_update=order_update) + elif channel == CONSTANTS.PRIVATE_WALLET_CHANNEL: + accounts = event_message["data"] + account_type = self._account_type + balances = [] + for account in accounts: + if account["accountType"] == account_type: + balances = account["coin"] + break for balance_entry in balances: - asset_name = balance_entry["a"] - free_balance = Decimal(balance_entry["f"]) - total_balance = Decimal(balance_entry["f"]) + Decimal(balance_entry["l"]) + asset_name = balance_entry["coin"] + free_balance = Decimal( + balance_entry.get("free") or + balance_entry.get("availableToWithdraw") or + balance_entry.get("availableToBorrow") + ) + total_balance = Decimal(balance_entry["walletBalance"]) self._account_available_balances[asset_name] = free_balance self._account_balances[asset_name] = total_balance - except asyncio.CancelledError: raise except Exception: @@ -356,93 +381,169 @@ async def _user_stream_event_listener(self): async def _all_trade_updates_for_order(self, order: InFlightOrder) -> List[TradeUpdate]: trade_updates = [] - if order.exchange_order_id is not None: - exchange_order_id = int(order.exchange_order_id) - trading_pair = await self.exchange_symbol_associated_to_pair(trading_pair=order.trading_pair) - all_fills_response = await self._api_get( - path_url=CONSTANTS.MY_TRADES_PATH_URL, - params={ - "symbol": trading_pair, - "orderId": exchange_order_id - }, - is_auth_required=True, - limit_id=CONSTANTS.MY_TRADES_PATH_URL) - fills_data = all_fills_response.get("result", []) - if fills_data is not None: - for trade in fills_data: - exchange_order_id = str(trade["orderId"]) - fee = TradeFeeBase.new_spot_fee( - fee_schema=self.trade_fee_schema(), - trade_type=order.trade_type, - percent_token=trade["commissionAsset"], - flat_fees=[TokenAmount(amount=Decimal(trade["commission"]), token=trade["commissionAsset"])] - ) - trade_update = TradeUpdate( - trade_id=str(trade["ticketId"]), - client_order_id=order.client_order_id, - exchange_order_id=exchange_order_id, - trading_pair=trading_pair, - fee=fee, - fill_base_amount=Decimal(trade["qty"]), - fill_quote_amount=Decimal(trade["price"]) * Decimal(trade["qty"]), - fill_price=Decimal(trade["price"]), - fill_timestamp=int(trade["executionTime"]) * 1e-3, - ) - trade_updates.append(trade_update) - + try: + all_fills_response = await self._request_order_fills(order=order) + fills_data = all_fills_response["list"] + + if fills_data is not None: + for fill_data in fills_data: + trade_update = self._parse_trade_update(trade_msg=fill_data, tracked_order=order) + trade_updates.append(trade_update) + except IOError as ex: + if not self._is_request_exception_related_to_time_synchronizer(request_exception=ex): + raise return trade_updates + async def _request_order_fills(self, order: InFlightOrder) -> Dict[str, Any]: + exchange_symbol = await self.exchange_symbol_associated_to_pair(trading_pair=order.trading_pair) + exchange_order_id = str(order.exchange_order_id) + client_order_id = str(order.client_order_id) + api_params = { + "category": self._category, + "symbol": exchange_symbol, + "execType": "Trade" + } + if exchange_order_id: + api_params["orderId"] = exchange_order_id + else: + api_params["orderLinkId"] = client_order_id + response = await self._api_request( + method=RESTMethod.GET, + path_url=CONSTANTS.TRADE_HISTORY_PATH_URL, + params=api_params, + is_auth_required=True, + ) + result = response["result"] + return result + + def _parse_trade_update(self, trade_msg: Dict, tracked_order: InFlightOrder) -> TradeUpdate: + trade_id: str = str(trade_msg["execId"]) + is_maker: bool = trade_msg["isMaker"] + try: + maker_fee_rate: Decimal = Decimal(self._trading_fees[tracked_order.trading_pair]["makerFeeRate"]) + except KeyError: + # Workaround when no fees are initialized yet. + maker_fee_rate = Decimal("0") + side: str = trade_msg["side"] + if maker_fee_rate > Decimal("0"): + if side == "Buy": + fee_asset = tracked_order.base_asset + else: + fee_asset = tracked_order.quote_asset + else: + if is_maker: + if side == "Buy": + fee_asset = tracked_order.quote_asset + else: + fee_asset = tracked_order.base_asset + else: + if side == "Buy": + fee_asset = tracked_order.base_asset + else: + fee_asset = tracked_order.quote_asset + fee_amount = Decimal(trade_msg["execFee"]) + ptoken = tracked_order.trading_pair.split("-")[1] + + flat_fees = [] if fee_amount == Decimal("0") else [TokenAmount(amount=fee_amount, token=fee_asset)] + + fee = TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=tracked_order.trade_type, + percent_token=ptoken, + flat_fees=flat_fees + ) + + exec_price = Decimal(trade_msg["execPrice"]) if "execPrice" in trade_msg else Decimal(trade_msg["price"]) + exec_time = ( + int(trade_msg["execTime"]) * 1e-3 if "execTime" in trade_msg else + pd.Timestamp(trade_msg["trade_time"]).timestamp() * 1e-3 + ) + + trade_update: TradeUpdate = TradeUpdate( + trade_id=trade_id, + client_order_id=str(tracked_order.client_order_id or trade_msg["orderLinkId"]), + exchange_order_id=str(tracked_order.exchange_order_id or trade_msg["orderId"]), + trading_pair=tracked_order.trading_pair, + fill_timestamp=exec_time, + fill_price=exec_price, + fill_base_amount=Decimal(trade_msg["execQty"]), + fill_quote_amount=exec_price * Decimal(trade_msg["execQty"]), + fee=fee, + ) + return trade_update + async def _request_order_status(self, tracked_order: InFlightOrder) -> OrderUpdate: + exchange_order_id = tracked_order.exchange_order_id + client_order_id = tracked_order.client_order_id + trading_pair = tracked_order.trading_pair + api_params = { + "category": self._category, + "symbol": trading_pair + } + if exchange_order_id: + api_params["orderId"] = exchange_order_id + else: + api_params["orderLinkId"] = client_order_id updated_order_data = await self._api_get( - path_url=CONSTANTS.ORDER_PATH_URL, - params={ - "orderLinkId": tracked_order.client_order_id}, - is_auth_required=True) + path_url=CONSTANTS.GET_ORDERS_PATH_URL, + params=api_params, + is_auth_required=True, + limit_id=CONSTANTS.GET_ORDERS_PATH_URL + ) + if not len(updated_order_data["result"]["list"]): + raise ValueError(f"No order found for {client_order_id} or {exchange_order_id}") + order_data = updated_order_data["result"]["list"][0] + order_status = order_data["orderStatus"] - new_state = CONSTANTS.ORDER_STATE[updated_order_data["result"]["status"]] + new_state = CONSTANTS.ORDER_STATE[order_status] order_update = OrderUpdate( - client_order_id=tracked_order.client_order_id, - exchange_order_id=str(updated_order_data["result"]["orderId"]), - trading_pair=tracked_order.trading_pair, - update_timestamp=int(updated_order_data["result"]["updateTime"]) * 1e-3, + client_order_id=client_order_id, + exchange_order_id=exchange_order_id, + trading_pair=trading_pair, + update_timestamp=int(order_data["updatedTime"]) * 1e-3, new_state=new_state, ) - return order_update async def _update_balances(self): - local_asset_names = set(self._account_balances.keys()) - remote_asset_names = set() + # Update the first time it is called + if self._account_type is None: + await self._update_account_type() - account_info = await self._api_request( + balances = await self._api_request( method=RESTMethod.GET, - path_url=CONSTANTS.ACCOUNTS_PATH_URL, - is_auth_required=True) - balances = account_info["result"]["balances"] - for balance_entry in balances: - asset_name = balance_entry["coin"] - free_balance = Decimal(balance_entry["free"]) - total_balance = Decimal(balance_entry["total"]) - self._account_available_balances[asset_name] = free_balance - self._account_balances[asset_name] = total_balance - remote_asset_names.add(asset_name) - - asset_names_to_remove = local_asset_names.difference(remote_asset_names) - for asset_name in asset_names_to_remove: - del self._account_available_balances[asset_name] - del self._account_balances[asset_name] + path_url=CONSTANTS.BALANCE_PATH_URL, + params={ + 'accountType': self._account_type + }, + is_auth_required=True + ) + if balances["retCode"] != 0: + raise ValueError(f"{balances['retMsg']}") + self._account_available_balances.clear() + self._account_balances.clear() + for coin in balances["result"]["list"][0]["coin"]: + name = coin["coin"] + free_balance = Decimal(coin["free"]) if self._account_type == "SPOT" \ + else Decimal(coin["availableToWithdraw"]) + balance = Decimal(coin["walletBalance"]) + self._account_available_balances[name] = free_balance + self._account_balances[name] = Decimal(balance) def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: Dict[str, Any]): mapping = bidict() - for symbol_data in filter(bybit_utils.is_exchange_information_valid, exchange_info["result"]): - mapping[symbol_data["name"]] = combine_to_hb_trading_pair(base=symbol_data["baseCurrency"], - quote=symbol_data["quoteCurrency"]) + for symbol_data in exchange_info["result"]['list']: + mapping[symbol_data["symbol"]] = combine_to_hb_trading_pair( + base=symbol_data["baseCoin"], + quote=symbol_data["quoteCoin"] + ) self._set_trading_pair_symbol_map(mapping) async def _get_last_traded_price(self, trading_pair: str) -> float: params = { + "category": self._category, "symbol": await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair), } resp_json = await self._api_request( @@ -451,7 +552,7 @@ async def _get_last_traded_price(self, trading_pair: str) -> float: params=params, ) - return float(resp_json["result"]["price"]) + return float(resp_json["result"]["list"][0]["lastPrice"]) async def _api_request(self, path_url, @@ -461,14 +562,15 @@ async def _api_request(self, is_auth_required: bool = False, return_err: bool = False, limit_id: Optional[str] = None, - trading_pair: Optional[str] = None, + headers: Optional[Dict[str, Any]] = None, **kwargs) -> Dict[str, Any]: last_exception = None rest_assistant = await self._web_assistants_factory.get_rest_assistant() url = web_utils.rest_url(path_url, domain=self.domain) - local_headers = { - "Content-Type": "application/x-www-form-urlencoded"} - for _ in range(2): + params = dict(sorted(params.items())) if isinstance(params, dict) else params + data = dict(sorted(data.items())) if isinstance(data, dict) else data + + for _ in range(CONSTANTS.API_REQUEST_RETRY): try: request_result = await rest_assistant.execute_request( url=url, @@ -477,7 +579,7 @@ async def _api_request(self, method=method, is_auth_required=is_auth_required, return_err=return_err, - headers=local_headers, + headers=headers, throttler_limit_id=limit_id if limit_id else path_url, ) return request_result @@ -488,6 +590,48 @@ async def _api_request(self, await self._update_time_synchronizer() else: raise - # Failed even after the last retry raise last_exception + + async def _make_trading_rules_request(self) -> Any: + exchange_info = await self._api_get( + path_url=self.trading_rules_request_path, + params={ + 'category': self._category + } + ) + return exchange_info + + async def _make_trading_pairs_request(self) -> Any: + exchange_info = await self._api_get( + path_url=self.trading_pairs_request_path, + params={ + 'category': self._category + } + ) + return exchange_info + + async def _get_trading_pair_fee_rate(self, trading_pair: str) -> Any: + api_params = { + "category": self._category, + "symbol": await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + } + fee_rates = await self._api_get( + path_url=CONSTANTS.EXCHANGE_FEE_RATE_PATH_URL, + params=api_params, + is_auth_required=True, + limit_id=CONSTANTS.EXCHANGE_FEE_RATE_PATH_URL + ) + return fee_rates["result"]["list"][0] + + async def _get_exchange_fee_rates(self) -> Any: + api_params = { + "category": self._category + } + fee_rates = await self._api_get( + path_url=CONSTANTS.EXCHANGE_FEE_RATE_PATH_URL, + params=api_params, + is_auth_required=True, + limit_id=CONSTANTS.EXCHANGE_FEE_RATE_PATH_URL + ) + return fee_rates["result"]["list"] diff --git a/hummingbot/connector/exchange/bybit/bybit_order_book.py b/hummingbot/connector/exchange/bybit/bybit_order_book.py index 3a7bd442ca..41e05850b9 100644 --- a/hummingbot/connector/exchange/bybit/bybit_order_book.py +++ b/hummingbot/connector/exchange/bybit/bybit_order_book.py @@ -20,10 +20,9 @@ def snapshot_message_from_exchange_websocket(cls, """ if metadata: msg.update(metadata) - ts = msg["t"] return OrderBookMessage(OrderBookMessageType.SNAPSHOT, { "trading_pair": msg["trading_pair"], - "update_id": ts, + "update_id": msg["u"], "bids": msg["b"], "asks": msg["a"] }, timestamp=timestamp) @@ -42,12 +41,11 @@ def snapshot_message_from_exchange_rest(cls, """ if metadata: msg.update(metadata) - ts = msg["time"] return OrderBookMessage(OrderBookMessageType.SNAPSHOT, { "trading_pair": msg["trading_pair"], - "update_id": ts, - "bids": msg["bids"], - "asks": msg["asks"] + "update_id": msg["u"], + "bids": msg["b"], + "asks": msg["a"] }, timestamp=timestamp) @classmethod @@ -64,10 +62,9 @@ def diff_message_from_exchange(cls, """ if metadata: msg.update(metadata) - ts = msg["t"] return OrderBookMessage(OrderBookMessageType.DIFF, { "trading_pair": msg["trading_pair"], - "update_id": ts, + "update_id": msg["u"], "bids": msg["b"], "asks": msg["a"] }, timestamp=timestamp) @@ -82,12 +79,12 @@ def trade_message_from_exchange(cls, msg: Dict[str, any], metadata: Optional[Dic """ if metadata: msg.update(metadata) - ts = msg["t"] - return OrderBookMessage(OrderBookMessageType.TRADE, { + trade_msg = OrderBookMessage(OrderBookMessageType.TRADE, { "trading_pair": msg["trading_pair"], - "trade_type": float(TradeType.BUY.value) if msg["m"] else float(TradeType.SELL.value), - "trade_id": ts, - "update_id": ts, + "trade_type": float(TradeType.BUY.value) if msg["S"] == "BUY" else float(TradeType.SELL.value), + "trade_id": msg["i"], + "update_id": msg["T"], "price": msg["p"], - "amount": msg["q"] - }, timestamp=ts * 1e-3) + "amount": msg["v"] + }, timestamp=msg["T"]) + return trade_msg diff --git a/hummingbot/connector/exchange/bybit/bybit_web_utils.py b/hummingbot/connector/exchange/bybit/bybit_web_utils.py index 4be08757e4..8e42750195 100644 --- a/hummingbot/connector/exchange/bybit/bybit_web_utils.py +++ b/hummingbot/connector/exchange/bybit/bybit_web_utils.py @@ -6,9 +6,18 @@ from hummingbot.core.api_throttler.async_throttler import AsyncThrottler from hummingbot.core.web_assistant.auth import AuthBase from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest +from hummingbot.core.web_assistant.rest_pre_processors import RESTPreProcessorBase from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +class HeadersContentRESTPreProcessor(RESTPreProcessorBase): + async def pre_process(self, request: RESTRequest) -> RESTRequest: + request.headers = request.headers or {} + if request.method == RESTMethod.POST: + request.headers["Content-Type"] = "application/json" + return request + + def rest_url(path_url: str, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> str: """ Creates a full URL for provided public REST endpoint @@ -74,9 +83,6 @@ async def api_request(path: str, ) rest_assistant = await api_factory.get_rest_assistant() - local_headers = { - "Content-Type": "application/x-www-form-urlencoded"} - local_headers.update(headers) url = rest_url(path, domain=domain) request = RESTRequest( @@ -84,7 +90,7 @@ async def api_request(path: str, url=url, params=params, data=data, - headers=local_headers, + headers=headers, is_auth_required=is_auth_required, throttler_limit_id=limit_id if limit_id else path ) @@ -119,6 +125,7 @@ async def get_current_server_time( throttler=throttler, domain=domain, method=RESTMethod.GET) - server_time = response["result"]["serverTime"] - + # response["result"] = {"timeSeconds": 0, "timeNano": 0} + # Better use nanoseconds and divide by 10^9 for higher resolution + server_time = float(response["result"]["timeNano"]) / 10**9 return server_time diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_active_order_tracker.pxd b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_active_order_tracker.pxd deleted file mode 100644 index 1b150b93cf..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_active_order_tracker.pxd +++ /dev/null @@ -1,11 +0,0 @@ -# distutils: language=c++ -cimport numpy as np - - -cdef class CoinbaseProActiveOrderTracker: - cdef dict _active_bids - cdef dict _active_asks - - cdef tuple c_convert_diff_message_to_np_arrays(self, object message) - cdef tuple c_convert_snapshot_message_to_np_arrays(self, object message) - cdef np.ndarray[np.float64_t, ndim=1] c_convert_trade_message_to_np_array(self, object message) diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_active_order_tracker.pyx b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_active_order_tracker.pyx deleted file mode 100644 index 5ec63af652..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_active_order_tracker.pyx +++ /dev/null @@ -1,299 +0,0 @@ -# distutils: language=c++ -# distutils: sources=hummingbot/core/cpp/OrderBookEntry.cpp - -import logging -from decimal import Decimal -from typing import Dict - -import numpy as np - -from hummingbot.core.data_type.order_book_row import OrderBookRow -from hummingbot.logger import HummingbotLogger - -_cbpaot_logger = None -s_empty_diff = np.ndarray(shape=(0, 4), dtype="float64") - -CoinbaseProOrderBookTrackingDictionary = Dict[Decimal, Dict[str, Dict[str, any]]] - -TYPE_OPEN = "open" -TYPE_CHANGE = "change" -TYPE_MATCH = "match" -TYPE_DONE = "done" -SIDE_BUY = "buy" -SIDE_SELL = "sell" - -cdef class CoinbaseProActiveOrderTracker: - def __init__(self, - active_asks: CoinbaseProOrderBookTrackingDictionary = None, - active_bids: CoinbaseProOrderBookTrackingDictionary = None): - super().__init__() - self._active_asks = active_asks or {} - self._active_bids = active_bids or {} - - @classmethod - def logger(cls) -> HummingbotLogger: - global _cbpaot_logger - if _cbpaot_logger is None: - _cbpaot_logger = logging.getLogger(__name__) - return _cbpaot_logger - - @property - def active_asks(self) -> CoinbaseProOrderBookTrackingDictionary: - """ - Get all asks on the order book in dictionary format - :returns: Dict[price, Dict[order_id, order_book_message]] - """ - return self._active_asks - - @property - def active_bids(self) -> CoinbaseProOrderBookTrackingDictionary: - """ - Get all bids on the order book in dictionary format - :returns: Dict[price, Dict[order_id, order_book_message]] - """ - return self._active_bids - - def volume_for_ask_price(self, price) -> float: - """ - For a certain price, get the volume sum of all ask order book rows with that price - :returns: volume sum - """ - return sum([float(msg["remaining_size"]) for msg in self._active_asks[price].values()]) - - def volume_for_bid_price(self, price) -> float: - """ - For a certain price, get the volume sum of all bid order book rows with that price - :returns: volume sum - """ - return sum([float(msg["remaining_size"]) for msg in self._active_bids[price].values()]) - - cdef tuple c_convert_diff_message_to_np_arrays(self, object message): - """ - Interpret an incoming diff message and apply changes to the order book accordingly - :returns: new order book rows: Tuple(np.array (bids), np.array (asks)) - """ - - cdef: - dict content = message.content - str msg_type = content["type"] - str order_id - str order_side - str price_raw - object price - dict order_dict - str remaining_size - double timestamp = message.timestamp - double quantity = 0 - - order_id = content.get("order_id") or content.get("maker_order_id") - order_side = content.get("side") - price_raw = content.get("price") - if order_id is None: - raise ValueError(f"Unknown order id for message - '{message}'. Aborting.") - if order_side not in [SIDE_BUY, SIDE_SELL]: - raise ValueError(f"Unknown order side for message - '{message}'. Aborting.") - if price_raw is None: - raise ValueError(f"Unknown order price for message - '{message}'. Aborting.") - elif price_raw == "null": # 'change' messages have 'null' as price for market orders - return s_empty_diff, s_empty_diff - price = Decimal(price_raw) - - if msg_type == TYPE_OPEN: - order_dict = { - "order_id": order_id, - "remaining_size": content["remaining_size"] - } - if order_side == SIDE_BUY: - if price in self._active_bids: - self._active_bids[price][order_id] = order_dict - else: - self._active_bids[price] = {order_id: order_dict} - quantity = self.volume_for_bid_price(price) - return np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64"), s_empty_diff - else: - if price in self._active_asks: - self._active_asks[price][order_id] = order_dict - else: - self._active_asks[price] = {order_id: order_dict} - quantity = self.volume_for_ask_price(price) - return s_empty_diff, np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64") - - elif msg_type == TYPE_CHANGE: - if content.get("new_size") is not None: - remaining_size = content["new_size"] - elif content.get("new_funds") is not None: - remaining_size = str(Decimal(content["new_funds"]) / price) - else: - raise ValueError(f"Invalid change message - '{message}'. Aborting.") - if order_side == SIDE_BUY: - if price in self._active_bids and order_id in self._active_bids[price]: - self._active_bids[price][order_id]["remaining_size"] = remaining_size - quantity = self.volume_for_bid_price(price) - return ( - np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64"), - s_empty_diff - ) - else: - return s_empty_diff, s_empty_diff - else: - if price in self._active_asks and order_id in self._active_asks[price]: - self._active_asks[price][order_id]["remaining_size"] = remaining_size - quantity = self.volume_for_ask_price(price) - return ( - s_empty_diff, - np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64") - ) - else: - return s_empty_diff, s_empty_diff - - elif msg_type == TYPE_MATCH: - if order_side == SIDE_BUY: - if price in self._active_bids and order_id in self._active_bids[price]: - remaining_size = self._active_bids[price][order_id]["remaining_size"] - self._active_bids[price][order_id]["remaining_size"] = str(float(remaining_size) - float(content["size"])) - quantity = self.volume_for_bid_price(price) - return ( - np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64"), - s_empty_diff - ) - else: - return s_empty_diff, s_empty_diff - else: - if price in self._active_asks and order_id in self._active_asks[price]: - remaining_size = self._active_asks[price][order_id]["remaining_size"] - self._active_asks[price][order_id]["remaining_size"] = str(float(remaining_size) - float(content["size"])) - quantity = self.volume_for_ask_price(price) - return ( - s_empty_diff, - np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64") - ) - else: - return s_empty_diff, s_empty_diff - - elif msg_type == TYPE_DONE: - if order_side == SIDE_BUY: - if price in self._active_bids and order_id in self._active_bids[price]: - del self._active_bids[price][order_id] - if len(self._active_bids[price]) < 1: - del self._active_bids[price] - return ( - np.array([[timestamp, float(price), 0.0, message.update_id]], dtype="float64"), - s_empty_diff - ) - else: - quantity = self.volume_for_bid_price(price) - return ( - np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64"), - s_empty_diff - ) - return s_empty_diff, s_empty_diff - else: - if price in self._active_asks and order_id in self._active_asks[price]: - del self._active_asks[price][order_id] - if len(self._active_asks[price]) < 1: - del self._active_asks[price] - return ( - s_empty_diff, - np.array([[timestamp, float(price), 0.0, message.update_id]], dtype="float64") - ) - else: - quantity = self.volume_for_ask_price(price) - return ( - s_empty_diff, - np.array([[timestamp, float(price), quantity, message.update_id]], dtype="float64") - ) - return s_empty_diff, s_empty_diff - - else: - raise ValueError(f"Unknown message type '{msg_type}' - {message}. Aborting.") - - cdef tuple c_convert_snapshot_message_to_np_arrays(self, object message): - """ - Interpret an incoming snapshot message and apply changes to the order book accordingly - :returns: new order book rows: Tuple(np.array (bids), np.array (asks)) - """ - cdef: - object price - str order_id - str amount - dict order_dict - - # Refresh all order tracking. - self._active_bids.clear() - self._active_asks.clear() - for snapshot_orders, active_orders in [(message.content["bids"], self._active_bids), - (message.content["asks"], self._active_asks)]: - for order in snapshot_orders: - price = Decimal(order[0]) - order_id = order[2] - amount = order[1] - order_dict = { - "order_id": order_id, - "remaining_size": amount - } - - if price in active_orders: - active_orders[price][order_id] = order_dict - else: - active_orders[price] = { - order_id: order_dict - } - - # Return the sorted snapshot tables. - cdef: - np.ndarray[np.float64_t, ndim=2] bids = np.array( - [[message.timestamp, - float(price), - sum([float(order_dict["remaining_size"]) - for order_dict in self._active_bids[price].values()]), - message.update_id] - for price in sorted(self._active_bids.keys(), reverse=True)], dtype="float64", ndmin=2) - np.ndarray[np.float64_t, ndim=2] asks = np.array( - [[message.timestamp, - float(price), - sum([float(order_dict["remaining_size"]) - for order_dict in self._active_asks[price].values()]), - message.update_id] - for price in sorted(self._active_asks.keys(), reverse=True)], dtype="float64", ndmin=2) - - # If there're no rows, the shape would become (1, 0) and not (0, 4). - # Reshape to fix that. - if bids.shape[1] != 4: - bids = bids.reshape((0, 4)) - if asks.shape[1] != 4: - asks = asks.reshape((0, 4)) - - return bids, asks - - cdef np.ndarray[np.float64_t, ndim=1] c_convert_trade_message_to_np_array(self, object message): - """ - Interpret an incoming trade message and apply changes to the order book accordingly - :returns: new order book rows: Tuple[np.array (bids), np.array (asks)] - """ - cdef: - double trade_type_value = 1.0 if message.content["side"] == SIDE_SELL else 2.0 - - return np.array( - [message.timestamp, trade_type_value, float(message.content["price"]), float(message.content["size"])], - dtype="float64" - ) - - def convert_diff_message_to_order_book_row(self, message): - """ - Convert an incoming diff message to Tuple of np.arrays, and then convert to OrderBookRow - :returns: Tuple(List[bids_row], List[asks_row]) - """ - np_bids, np_asks = self.c_convert_diff_message_to_np_arrays(message) - bids_row = [OrderBookRow(price, qty, update_id) for ts, price, qty, update_id in np_bids] - asks_row = [OrderBookRow(price, qty, update_id) for ts, price, qty, update_id in np_asks] - return bids_row, asks_row - - def convert_snapshot_message_to_order_book_row(self, message): - """ - Convert an incoming snapshot message to Tuple of np.arrays, and then convert to OrderBookRow - :returns: Tuple(List[bids_row], List[asks_row]) - """ - np_bids, np_asks = self.c_convert_snapshot_message_to_np_arrays(message) - bids_row = [OrderBookRow(price, qty, update_id) for ts, price, qty, update_id in np_bids] - asks_row = [OrderBookRow(price, qty, update_id) for ts, price, qty, update_id in np_asks] - return bids_row, asks_row diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_api_order_book_data_source.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_api_order_book_data_source.py deleted file mode 100755 index 47eed95cfe..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_api_order_book_data_source.py +++ /dev/null @@ -1,287 +0,0 @@ -import asyncio -import logging -import time -from decimal import Decimal -from typing import AsyncIterable, Dict, List, Optional - -import pandas as pd - -from hummingbot.connector.exchange.coinbase_pro import coinbase_pro_constants as CONSTANTS -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_active_order_tracker import CoinbaseProActiveOrderTracker -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book import CoinbaseProOrderBook -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_tracker_entry import ( - CoinbaseProOrderBookTrackerEntry, -) -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_utils import ( - CoinbaseProRESTRequest, - build_coinbase_pro_web_assistant_factory, -) -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.data_type.order_book_message import OrderBookMessage -from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource -from hummingbot.core.data_type.order_book_tracker_entry import OrderBookTrackerEntry -from hummingbot.core.utils.async_utils import safe_gather -from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest -from hummingbot.core.web_assistant.rest_assistant import RESTAssistant -from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory -from hummingbot.core.web_assistant.ws_assistant import WSAssistant -from hummingbot.logger import HummingbotLogger - -MAX_RETRIES = 20 -NaN = float("nan") - - -class CoinbaseProAPIOrderBookDataSource(OrderBookTrackerDataSource): - - MESSAGE_TIMEOUT = 30.0 - PING_TIMEOUT = 10.0 - - _cbpaobds_logger: Optional[HummingbotLogger] = None - _shared_web_assistants_factory: Optional[WebAssistantsFactory] = None - - @classmethod - def logger(cls) -> HummingbotLogger: - if cls._cbpaobds_logger is None: - cls._cbpaobds_logger = logging.getLogger(__name__) - return cls._cbpaobds_logger - - def __init__( - self, - trading_pairs: Optional[List[str]] = None, - web_assistants_factory: Optional[WebAssistantsFactory] = None, - ): - super().__init__(trading_pairs) - self._web_assistants_factory = web_assistants_factory or build_coinbase_pro_web_assistant_factory() - self._rest_assistant = None - - @classmethod - async def get_last_traded_prices(cls, trading_pairs: List[str]) -> Dict[str, Decimal]: - tasks = [cls.get_last_traded_price(t_pair) for t_pair in trading_pairs] - results = await safe_gather(*tasks) - return {t_pair: result for t_pair, result in zip(trading_pairs, results)} - - @classmethod - async def get_last_traded_price(cls, trading_pair: str) -> Decimal: - factory = build_coinbase_pro_web_assistant_factory() - rest_assistant = await factory.get_rest_assistant() - endpoint = f"{CONSTANTS.PRODUCTS_PATH_URL}/{trading_pair}/ticker" - request = CoinbaseProRESTRequest(RESTMethod.GET, endpoint=endpoint) - response = await rest_assistant.call(request) - resp_json = await response.json() - return Decimal(resp_json["price"]) - - @staticmethod - async def fetch_trading_pairs() -> List[str]: - trading_pair_list = [] - try: - factory = build_coinbase_pro_web_assistant_factory() - rest_assistant = await factory.get_rest_assistant() - request = CoinbaseProRESTRequest(RESTMethod.GET, endpoint=CONSTANTS.PRODUCTS_PATH_URL) - response = await rest_assistant.call(request) - if response.status == 200: - markets = await response.json() - raw_trading_pairs: List[str] = list(map(lambda details: details.get('id'), markets)) - trading_pair_list: List[str] = [] - for raw_trading_pair in raw_trading_pairs: - trading_pair_list.append(raw_trading_pair) - except Exception: - # Do nothing if the request fails -- there will be no autocomplete for coinbase trading pairs - pass - return trading_pair_list - - @staticmethod - async def get_snapshot(rest_assistant: RESTAssistant, trading_pair: str) -> Dict[str, any]: - """ - Fetches order book snapshot for a particular trading pair from the rest API - :returns: Response from the rest API - """ - endpoint = f"{CONSTANTS.PRODUCTS_PATH_URL}/{trading_pair}/book?level=3" - request = CoinbaseProRESTRequest(RESTMethod.GET, endpoint=endpoint) - response = await rest_assistant.call(request) - if response.status != 200: - raise IOError(f"Error fetching Coinbase Pro market snapshot for {trading_pair}. " - f"HTTP status is {response.status}.") - response_data = await response.json() - return response_data - - async def get_new_order_book(self, trading_pair: str) -> OrderBook: - rest_assistant = await self._get_rest_assistant() - snapshot: Dict[str, any] = await self.get_snapshot(rest_assistant, trading_pair) - snapshot_timestamp: float = time.time() - snapshot_msg: OrderBookMessage = CoinbaseProOrderBook.snapshot_message_from_exchange( - snapshot, - snapshot_timestamp, - metadata={"trading_pair": trading_pair} - ) - active_order_tracker: CoinbaseProActiveOrderTracker = CoinbaseProActiveOrderTracker() - bids, asks = active_order_tracker.convert_snapshot_message_to_order_book_row(snapshot_msg) - order_book = self.order_book_create_function() - order_book.apply_snapshot(bids, asks, snapshot_msg.update_id) - return order_book - - async def get_tracking_pairs(self) -> Dict[str, OrderBookTrackerEntry]: - """ - *required - Initializes order books and order book trackers for the list of trading pairs - returned by `self.get_trading_pairs` - :returns: A dictionary of order book trackers for each trading pair - """ - # Get the currently active markets - trading_pairs: List[str] = self._trading_pairs - retval: Dict[str, OrderBookTrackerEntry] = {} - rest_assistant = await self._get_rest_assistant() - - number_of_pairs: int = len(trading_pairs) - for index, trading_pair in enumerate(trading_pairs): - try: - snapshot: Dict[str, any] = await self.get_snapshot(rest_assistant, trading_pair) - snapshot_timestamp: float = time.time() - snapshot_msg: OrderBookMessage = CoinbaseProOrderBook.snapshot_message_from_exchange( - snapshot, - snapshot_timestamp, - metadata={"trading_pair": trading_pair} - ) - order_book: OrderBook = self.order_book_create_function() - active_order_tracker: CoinbaseProActiveOrderTracker = CoinbaseProActiveOrderTracker() - bids, asks = active_order_tracker.convert_snapshot_message_to_order_book_row(snapshot_msg) - order_book.apply_snapshot(bids, asks, snapshot_msg.update_id) - - retval[trading_pair] = CoinbaseProOrderBookTrackerEntry( - trading_pair, - snapshot_timestamp, - order_book, - active_order_tracker - ) - self.logger().info(f"Initialized order book for {trading_pair}. " - f"{index+1}/{number_of_pairs} completed.") - await self._sleep(0.6) - except IOError: - self.logger().network( - f"Error getting snapshot for {trading_pair}.", - exc_info=True, - app_warning_msg=f"Error getting snapshot for {trading_pair}. Check network connection." - ) - except Exception: - self.logger().error(f"Error initializing order book for {trading_pair}. ", exc_info=True) - return retval - - async def _iter_messages(self, ws: WSAssistant) -> AsyncIterable[Dict]: - """ - Generator function that returns messages from the web socket stream - :param ws: current web socket connection - :returns: message in AsyncIterable format - """ - # Terminate the recv() loop as soon as the next message timed out, so the outer loop can reconnect. - try: - async for response in ws.iter_messages(): - msg = response.data - yield msg - except asyncio.TimeoutError: - self.logger().warning("WebSocket ping timed out. Going to reconnect...") - finally: - await ws.disconnect() - - async def listen_for_trades(self, ev_loop: asyncio.BaseEventLoop, output: asyncio.Queue): - # Trade messages are received from the order book web socket - pass - - async def listen_for_order_book_diffs(self, ev_loop: asyncio.AbstractEventLoop, output: asyncio.Queue): - """ - *required - Subscribe to diff channel via web socket, and keep the connection open for incoming messages - :param ev_loop: ev_loop to execute this function in - :param output: an async queue where the incoming messages are stored - """ - while True: - try: - trading_pairs: List[str] = self._trading_pairs - ws_assistant = await self._web_assistants_factory.get_ws_assistant() - await ws_assistant.connect(CONSTANTS.WS_URL, message_timeout=CONSTANTS.WS_MESSAGE_TIMEOUT) - subscribe_payload = { - "type": "subscribe", - "product_ids": trading_pairs, - "channels": [CONSTANTS.FULL_CHANNEL_NAME] - } - subscribe_request = WSJSONRequest(payload=subscribe_payload) - await ws_assistant.subscribe(subscribe_request) - async for msg in self._iter_messages(ws_assistant): - msg_type: str = msg.get("type", None) - if msg_type is None: - raise ValueError(f"Coinbase Pro Websocket message does not contain a type - {msg}") - elif msg_type == "error": - raise ValueError(f"Coinbase Pro Websocket received error message - {msg['message']}") - elif msg_type in ["open", "match", "change", "done"]: - if msg_type == "done" and "price" not in msg: - # done messages with no price are completed market orders which can be ignored - continue - order_book_message: OrderBookMessage = CoinbaseProOrderBook.diff_message_from_exchange(msg) - output.put_nowait(order_book_message) - elif msg_type in ["received", "activate", "subscriptions"]: - # these messages are not needed to track the order book - continue - else: - raise ValueError(f"Unrecognized Coinbase Pro Websocket message received - {msg}") - except asyncio.CancelledError: - raise - except Exception: - self.logger().network( - "Unexpected error with WebSocket connection.", - exc_info=True, - app_warning_msg=f"Unexpected error with WebSocket connection." - f" Retrying in {CONSTANTS.REST_API_LIMIT_COOLDOWN} seconds." - f" Check network connection." - ) - await self._sleep(CONSTANTS.WS_RECONNECT_COOLDOWN) - - async def listen_for_order_book_snapshots(self, ev_loop: asyncio.BaseEventLoop, output: asyncio.Queue): - """ - *required - Fetches order book snapshots for each trading pair, and use them to update the local order book - :param ev_loop: ev_loop to execute this function in - :param output: an async queue where the incoming messages are stored - """ - while True: - try: - trading_pairs: List[str] = self._trading_pairs - rest_assistant = await self._get_rest_assistant() - for trading_pair in trading_pairs: - try: - snapshot: Dict[str, any] = await self.get_snapshot(rest_assistant, trading_pair) - snapshot_timestamp: float = time.time() - snapshot_msg: OrderBookMessage = CoinbaseProOrderBook.snapshot_message_from_exchange( - snapshot, - snapshot_timestamp, - metadata={"product_id": trading_pair} - ) - output.put_nowait(snapshot_msg) - self.logger().debug(f"Saved order book snapshot for {trading_pair}") - # Be careful not to go above API rate limits. - await self._sleep(CONSTANTS.REST_API_LIMIT_COOLDOWN) - except asyncio.CancelledError: - raise - except Exception: - self.logger().network( - "Unexpected error with WebSocket connection.", - exc_info=True, - app_warning_msg=f"Unexpected error with WebSocket connection." - f" Retrying in {CONSTANTS.REST_API_LIMIT_COOLDOWN} seconds." - f" Check network connection." - ) - await self._sleep(CONSTANTS.REST_API_LIMIT_COOLDOWN) - this_hour: pd.Timestamp = pd.Timestamp.utcnow().replace(minute=0, second=0, microsecond=0) - next_hour: pd.Timestamp = this_hour + pd.Timedelta(hours=1) - delta: float = next_hour.timestamp() - time.time() - await self._sleep(delta) - except asyncio.CancelledError: - raise - except Exception: - self.logger().error("Unexpected error.", exc_info=True) - await self._sleep(CONSTANTS.REST_API_LIMIT_COOLDOWN) - - async def _sleep(self, delay: float): - await asyncio.sleep(delay) - - async def _get_rest_assistant(self) -> RESTAssistant: - if self._rest_assistant is None: - self._rest_assistant = await self._web_assistants_factory.get_rest_assistant() - return self._rest_assistant diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_api_user_stream_data_source.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_api_user_stream_data_source.py deleted file mode 100755 index d17798aef4..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_api_user_stream_data_source.py +++ /dev/null @@ -1,110 +0,0 @@ -import asyncio -import logging -from typing import AsyncIterable, Dict, List, Optional - -from hummingbot.connector.exchange.coinbase_pro import coinbase_pro_constants as CONSTANTS -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book import CoinbaseProOrderBook -from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory -from hummingbot.core.web_assistant.ws_assistant import WSAssistant -from hummingbot.logger import HummingbotLogger - - -class CoinbaseProAPIUserStreamDataSource(UserStreamTrackerDataSource): - _cbpausds_logger: Optional[HummingbotLogger] = None - - @classmethod - def logger(cls) -> HummingbotLogger: - if cls._cbpausds_logger is None: - cls._cbpausds_logger = logging.getLogger(__name__) - return cls._cbpausds_logger - - def __init__( - self, - web_assistants_factory: WebAssistantsFactory, - trading_pairs: Optional[List[str]] = None, - ): - self._trading_pairs = trading_pairs - self._web_assistants_factory = web_assistants_factory - self._ws_assistant: Optional[WSAssistant] = None - self._current_listen_key = None - self._listen_for_user_stream_task = None - super().__init__() - - @property - def order_book_class(self): - """ - *required - Get relevant order book class to access class specific methods - :returns: OrderBook class - """ - return CoinbaseProOrderBook - - @property - def last_recv_time(self) -> float: - return self._ws_assistant.last_recv_time if self._ws_assistant is not None else 0 - - async def listen_for_user_stream(self, output: asyncio.Queue): - """ - *required - Subscribe to user stream via web socket, and keep the connection open for incoming messages - - :param output: an async queue where the incoming messages are stored - """ - while True: - try: - self._ws_assistant = await self._web_assistants_factory.get_ws_assistant() - await self._ws_assistant.connect(CONSTANTS.WS_URL, message_timeout=CONSTANTS.WS_MESSAGE_TIMEOUT) - subscribe_payload: Dict[str, any] = { - "type": "subscribe", - "product_ids": self._trading_pairs, - "channels": [CONSTANTS.USER_CHANNEL_NAME] - } - subscribe_request = WSJSONRequest(payload=subscribe_payload, is_auth_required=True) - await self._ws_assistant.subscribe(subscribe_request) - async for msg in self._iter_messages(self._ws_assistant): - msg_type: str = msg.get("type", None) - if msg_type is None: - raise ValueError(f"Coinbase Pro Websocket message does not contain a type - {msg}") - elif msg_type == "error": - raise ValueError(f"Coinbase Pro Websocket received error message - {msg['message']}") - elif msg_type in ["open", "match", "change", "done"]: - output.put_nowait(msg) - elif msg_type in ["received", "activate", "subscriptions"]: - # these messages are not needed to track the order book - pass - else: - raise ValueError(f"Unrecognized Coinbase Pro Websocket message received - {msg}") - except asyncio.CancelledError: - self._ws_assistant = None - raise - except Exception: - self._ws_assistant = None - self.logger().network( - "Unexpected error with WebSocket connection.", - exc_info=True, - app_warning_msg=f"Unexpected error with WebSocket connection." - f" Retrying in {CONSTANTS.REST_API_LIMIT_COOLDOWN} seconds." - f" Check network connection." - ) - await self._sleep(CONSTANTS.REST_API_LIMIT_COOLDOWN) - - async def _iter_messages(self, ws: WSAssistant) -> AsyncIterable[Dict]: - """ - Generator function that returns messages from the web socket stream - :param ws: current web socket connection - :returns: message in AsyncIterable format - """ - # Terminate the recv() loop as soon as the next message timed out, so the outer loop can reconnect. - try: - async for response in ws.iter_messages(): - msg = response.data - yield msg - except asyncio.TimeoutError: - self.logger().warning("WebSocket ping timed out. Going to reconnect...") - finally: - await ws.disconnect() - - async def _sleep(self, delay: float): - await asyncio.sleep(delay) diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_auth.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_auth.py deleted file mode 100755 index 1bda65144d..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_auth.py +++ /dev/null @@ -1,67 +0,0 @@ -import base64 -import hashlib -import hmac -import time -from typing import Dict - -from hummingbot.connector.exchange.coinbase_pro import coinbase_pro_constants as CONSTANTS -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_utils import CoinbaseProRESTRequest -from hummingbot.core.web_assistant.auth import AuthBase -from hummingbot.core.web_assistant.connections.data_types import RESTRequest, WSRequest - - -class CoinbaseProAuth(AuthBase): - """ - Auth class required by Coinbase Pro API - Learn more at https://docs.pro.coinbase.com/?python#signing-a-message - """ - def __init__(self, api_key: str, secret_key: str, passphrase: str): - self.api_key = api_key - self.secret_key = secret_key - self.passphrase = passphrase - - async def rest_authenticate(self, request: CoinbaseProRESTRequest) -> RESTRequest: - request.headers = self._get_headers( - method_str=request.method.value, path_url=request.endpoint, body=request.data - ) - return request - - async def ws_authenticate(self, request: WSRequest) -> WSRequest: - auth_dict = self._generate_auth_dict("GET", CONSTANTS.VERIFY_PATH_URL, "") - request.payload.update(auth_dict) - return request - - def _get_headers(self, method_str: str, path_url: str, body: str = "") -> Dict[str, any]: - """ - Generates authentication headers required by coinbase - :param method_str: GET / POST / etc. - :param path_url: e.g. "/accounts" - :param body: request payload - :return: a dictionary of auth headers - """ - header_dict = self._generate_auth_dict(method_str, path_url, body) - return { - "CB-ACCESS-SIGN": header_dict["signature"], - "CB-ACCESS-TIMESTAMP": header_dict["timestamp"], - "CB-ACCESS-KEY": header_dict["key"], - "CB-ACCESS-PASSPHRASE": header_dict["passphrase"], - "Content-Type": 'application/json', - } - - def _generate_auth_dict(self, method_str: str, path_url: str, body: str = "") -> Dict[str, any]: - """ - Generates authentication signature and return it in a dictionary along with other inputs - :return: a dictionary of request info including the request signature - """ - timestamp = str(time.time()) - message = timestamp + method_str + path_url + body - hmac_key = base64.b64decode(self.secret_key) - signature = hmac.new(hmac_key, message.encode('utf8'), hashlib.sha256) - signature_b64 = base64.b64encode(bytes(signature.digest())).decode('utf8') - - return { - "signature": signature_b64, - "timestamp": timestamp, - "key": self.api_key, - "passphrase": self.passphrase, - } diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_constants.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_constants.py deleted file mode 100644 index f4ff144937..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_constants.py +++ /dev/null @@ -1,18 +0,0 @@ -WS_MESSAGE_TIMEOUT = 30.0 # seconds -WS_RECONNECT_COOLDOWN = 30.0 # seconds -REST_API_LIMIT_COOLDOWN = 5.0 # seconds - -REST_URL = "https://api.pro.coinbase.com" -WS_URL = "wss://ws-feed.pro.coinbase.com" - -ACCOUNTS_PATH_URL = "/accounts" -FEES_PATH_URL = "/fees" -ORDERS_PATH_URL = "/orders" -PRODUCTS_PATH_URL = "/products" -TIME_PATH_URL = "/time" -TRANSFERS_PATH_URL = "/transfers" -VERIFY_PATH_URL = "/users/self/verify" - -# WebSocket Channels -FULL_CHANNEL_NAME = "full" -USER_CHANNEL_NAME = "user" diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_exchange.pxd b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_exchange.pxd deleted file mode 100755 index 4e0afb934d..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_exchange.pxd +++ /dev/null @@ -1,35 +0,0 @@ -from hummingbot.connector.exchange_base cimport ExchangeBase -from hummingbot.core.data_type.transaction_tracker cimport TransactionTracker - - -cdef class CoinbaseProExchange(ExchangeBase): - cdef: - object _user_stream_tracker - object _ev_loop - object _poll_notifier - double _last_timestamp - double _last_order_update_timestamp - double _last_fee_percentage_update_timestamp - object _maker_fee_percentage - object _taker_fee_percentage - double _poll_interval - dict _in_flight_orders - TransactionTracker _tx_tracker - dict _trading_rules - object _coro_queue - object _status_polling_task - object _coro_scheduler_task - object _user_stream_tracker_task - object _user_stream_event_listener_task - object _trading_rules_polling_task - object _web_assistants_factory - object _rest_assistant - - cdef c_start_tracking_order(self, - str order_id, - str trading_pair, - object trade_type, - object order_type, - object price, - object amount) - cdef c_did_timeout_tx(self, str tracking_id) diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_exchange.pyx b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_exchange.pyx deleted file mode 100755 index b0e1eab6f0..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_exchange.pyx +++ /dev/null @@ -1,1084 +0,0 @@ -import asyncio -import copy -import logging -from decimal import Decimal -from typing import Any, AsyncIterable, Dict, List, Optional, TYPE_CHECKING - -from async_timeout import timeout -from libc.stdint cimport int64_t - -from hummingbot.connector.exchange.coinbase_pro import coinbase_pro_constants as CONSTANTS -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_api_order_book_data_source import \ - CoinbaseProAPIOrderBookDataSource -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_auth import CoinbaseProAuth -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_in_flight_order cimport CoinbaseProInFlightOrder -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_in_flight_order import CoinbaseProInFlightOrder -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_tracker import CoinbaseProOrderBookTracker -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_user_stream_tracker import CoinbaseProUserStreamTracker -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_utils import ( - build_coinbase_pro_web_assistant_factory, - CoinbaseProRESTRequest, -) -from hummingbot.connector.exchange_base import ExchangeBase -from hummingbot.connector.trading_rule cimport TradingRule -from hummingbot.core.clock cimport Clock -from hummingbot.core.data_type.cancellation_result import CancellationResult -from hummingbot.core.data_type.limit_order import LimitOrder -from hummingbot.core.data_type.order_book cimport OrderBook -from hummingbot.core.data_type.order_book_message import OrderBookMessage -from hummingbot.core.data_type.transaction_tracker import TransactionTracker -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - MarketOrderFailureEvent, - MarketTransactionFailureEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.data_type.common import OrderType, TradeType -from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee -from hummingbot.core.network_iterator import NetworkStatus -from hummingbot.core.utils.async_utils import safe_ensure_future, safe_gather -from hummingbot.core.utils.estimate_fee import estimate_fee -from hummingbot.core.utils.tracking_nonce import get_tracking_nonce -from hummingbot.core.web_assistant.connections.data_types import RESTMethod -from hummingbot.core.web_assistant.rest_assistant import RESTAssistant -from hummingbot.logger import HummingbotLogger - -if TYPE_CHECKING: - from hummingbot.client.config.config_helpers import ClientConfigAdapter - -s_logger = None -s_decimal_0 = Decimal("0.0") -s_decimal_nan = Decimal("nan") - -cdef class CoinbaseProExchangeTransactionTracker(TransactionTracker): - cdef: - CoinbaseProExchange _owner - - def __init__(self, owner: CoinbaseProExchange): - super().__init__() - self._owner = owner - - cdef c_did_timeout_tx(self, str tx_id): - TransactionTracker.c_did_timeout_tx(self, tx_id) - self._owner.c_did_timeout_tx(tx_id) - - -cdef class CoinbaseProExchange(ExchangeBase): - MARKET_BUY_ORDER_COMPLETED_EVENT_TAG = MarketEvent.BuyOrderCompleted.value - MARKET_SELL_ORDER_COMPLETED_EVENT_TAG = MarketEvent.SellOrderCompleted.value - MARKET_ORDER_CANCELED_EVENT_TAG = MarketEvent.OrderCancelled.value - MARKET_TRANSACTION_FAILURE_EVENT_TAG = MarketEvent.TransactionFailure.value - MARKET_ORDER_FAILURE_EVENT_TAG = MarketEvent.OrderFailure.value - MARKET_ORDER_FILLED_EVENT_TAG = MarketEvent.OrderFilled.value - MARKET_BUY_ORDER_CREATED_EVENT_TAG = MarketEvent.BuyOrderCreated.value - MARKET_SELL_ORDER_CREATED_EVENT_TAG = MarketEvent.SellOrderCreated.value - - API_CALL_TIMEOUT = 10.0 - UPDATE_ORDERS_INTERVAL = 10.0 - UPDATE_FEE_PERCENTAGE_INTERVAL = 60.0 - MAKER_FEE_PERCENTAGE_DEFAULT = 0.005 - TAKER_FEE_PERCENTAGE_DEFAULT = 0.005 - - @classmethod - def logger(cls) -> HummingbotLogger: - global s_logger - if s_logger is None: - s_logger = logging.getLogger(__name__) - return s_logger - - def __init__(self, - client_config_map: "ClientConfigAdapter", - coinbase_pro_api_key: str, - coinbase_pro_secret_key: str, - coinbase_pro_passphrase: str, - poll_interval: float = 5.0, # interval which the class periodically pulls status from the rest API - trading_pairs: Optional[List[str]] = None, - trading_required: bool = True): - super().__init__(client_config_map) - self._trading_required = trading_required - auth = CoinbaseProAuth(coinbase_pro_api_key, coinbase_pro_secret_key, coinbase_pro_passphrase) - self._web_assistants_factory = build_coinbase_pro_web_assistant_factory(auth) - self._set_order_book_tracker(CoinbaseProOrderBookTracker(trading_pairs, self._web_assistants_factory)) - self._user_stream_tracker = CoinbaseProUserStreamTracker( - trading_pairs=trading_pairs, - web_assistants_factory=self._web_assistants_factory, - ) - self._ev_loop = asyncio.get_event_loop() - self._poll_notifier = asyncio.Event() - self._last_timestamp = 0 - self._last_order_update_timestamp = 0 - self._last_fee_percentage_update_timestamp = 0 - self._poll_interval = poll_interval - self._in_flight_orders = {} - self._tx_tracker = CoinbaseProExchangeTransactionTracker(self) - self._trading_rules = {} - self._status_polling_task = None - self._user_stream_tracker_task = None - self._user_stream_event_listener_task = None - self._trading_rules_polling_task = None - self._rest_assistant = None - self._maker_fee_percentage = Decimal(self.MAKER_FEE_PERCENTAGE_DEFAULT) - self._taker_fee_percentage = Decimal(self.TAKER_FEE_PERCENTAGE_DEFAULT) - self._real_time_balance_update = False - - @property - def name(self) -> str: - """ - *required - :return: A lowercase name / id for the market. Must stay consistent with market name in global settings. - """ - return "coinbase_pro" - - @property - def order_books(self) -> Dict[str, OrderBook]: - """ - *required - Get mapping of all the order books that are being tracked. - :return: Dict[trading_pair : OrderBook] - """ - return self.order_book_tracker.order_books - - @property - def status_dict(self) -> Dict[str, bool]: - """ - *required - :return: a dictionary of relevant status checks. - This is used by `ready` method below to determine if a market is ready for trading. - """ - return { - "order_books_initialized": self.order_book_tracker.ready, - "account_balance": len(self._account_balances) > 0 if self._trading_required else True, - "trading_rule_initialized": len(self._trading_rules) > 0 if self._trading_required else True - } - - @property - def ready(self) -> bool: - """ - *required - :return: a boolean value that indicates if the market is ready for trading - """ - return all(self.status_dict.values()) - - @property - def limit_orders(self) -> List[LimitOrder]: - """ - *required - :return: list of active limit orders - """ - return [ - in_flight_order.to_limit_order() - for in_flight_order in self._in_flight_orders.values() - ] - - @property - def tracking_states(self) -> Dict[str, any]: - """ - *required - :return: Dict[client_order_id: InFlightOrder] - This is used by the MarketsRecorder class to orchestrate market classes at a higher level. - """ - return { - key: value.to_json() - for key, value in self._in_flight_orders.items() - } - - @property - def in_flight_orders(self) -> Dict[str, CoinbaseProInFlightOrder]: - return self._in_flight_orders - - @property - def user_stream_tracker(self) -> CoinbaseProUserStreamTracker: - return self._user_stream_tracker - - @property - def maker_fee_percentage(self) -> Decimal: - return self._maker_fee_percentage - - @property - def taker_fee_percentage(self) -> Decimal: - return self._taker_fee_percentage - - @property - def trading_rules(self): - return self._trading_rules - - def restore_tracking_states(self, saved_states: Dict[str, any]): - """ - *required - Updates inflight order statuses from API results - This is used by the MarketsRecorder class to orchestrate market classes at a higher level. - """ - self._in_flight_orders.update({ - key: CoinbaseProInFlightOrder.from_json(value) - for key, value in saved_states.items() - }) - - cdef c_start(self, Clock clock, double timestamp): - """ - *required - c_start function used by top level Clock to orchestrate components of the bot - """ - self._tx_tracker.c_start(clock, timestamp) - ExchangeBase.c_start(self, clock, timestamp) - - async def start_network(self): - """ - *required - Async function used by NetworkBase class to handle when a single market goes online - """ - self._stop_network() - self.order_book_tracker.start() - if self._trading_required: - self._status_polling_task = safe_ensure_future(self._status_polling_loop()) - self._trading_rules_polling_task = safe_ensure_future(self._trading_rules_polling_loop()) - self._user_stream_tracker_task = safe_ensure_future(self._user_stream_tracker.start()) - self._user_stream_event_listener_task = safe_ensure_future(self._user_stream_event_listener()) - - def _stop_network(self): - """ - Synchronous function that handles when a single market goes offline - """ - self.order_book_tracker.stop() - if self._status_polling_task is not None: - self._status_polling_task.cancel() - if self._user_stream_tracker_task is not None: - self._user_stream_tracker_task.cancel() - if self._user_stream_event_listener_task is not None: - self._user_stream_event_listener_task.cancel() - self._status_polling_task = self._user_stream_tracker_task = \ - self._user_stream_event_listener_task = None - - async def stop_network(self): - """ - *required - Async wrapper for `self._stop_network`. Used by NetworkBase class to handle when a single market goes offline. - """ - self._stop_network() - - async def check_network(self) -> NetworkStatus: - """ - *required - Async function used by NetworkBase class to check if the market is online / offline. - """ - try: - await self._api_request(RESTMethod.GET, endpoint=CONSTANTS.TIME_PATH_URL) - except asyncio.CancelledError: - raise - except Exception: - return NetworkStatus.NOT_CONNECTED - return NetworkStatus.CONNECTED - - cdef c_tick(self, double timestamp): - """ - *required - Used by top level Clock to orchestrate components of the bot. - This function is called frequently with every clock tick - """ - cdef: - int64_t last_tick = (self._last_timestamp / self._poll_interval) - int64_t current_tick = (timestamp / self._poll_interval) - - ExchangeBase.c_tick(self, timestamp) - if current_tick > last_tick: - if not self._poll_notifier.is_set(): - self._poll_notifier.set() - self._last_timestamp = timestamp - - async def _get_rest_assistant(self) -> RESTAssistant: - if self._rest_assistant is None: - self._rest_assistant = await self._web_assistants_factory.get_rest_assistant() - return self._rest_assistant - - async def _api_request( - self, - method: RESTMethod, - url: Optional[str] = None, - endpoint: Optional[str] = None, - data: Any = None, - ) -> Dict[str, Any]: - """ - A wrapper for submitting API requests to Coinbase Pro - :returns: json data from the endpoints - """ - client = await self._get_rest_assistant() - request = CoinbaseProRESTRequest(method, url, data=data, endpoint=endpoint, is_auth_required=True) - request.data = "" if request.data is None else request.data - response = await client.call(request, timeout=self.API_CALL_TIMEOUT) - resp_data = await response.json() - if response.status != 200: - raise IOError(f"Error fetching data from {response.url}. HTTP status is {response.status}. {resp_data}") - response_data = await response.json() - return response_data - - cdef object c_get_fee(self, - str base_currency, - str quote_currency, - object order_type, - object order_side, - object amount, - object price, - object is_maker = None): - """ - *required - function to calculate fees for a particular order - :returns: TradeFee class that includes fee percentage and flat fees - """ - # There is no API for checking user's fee tier - # Fee info from https://pro.coinbase.com/fees - is_maker = order_type is OrderType.LIMIT_MAKER - return estimate_fee("coinbase_pro", is_maker) - - async def _update_fee_percentage(self): - """ - Pulls the API for updated balances - """ - cdef: - double current_timestamp = self._current_timestamp - - if current_timestamp - self._last_fee_percentage_update_timestamp <= self.UPDATE_FEE_PERCENTAGE_INTERVAL: - return - - fee_info = await self._api_request(RESTMethod.GET, endpoint=CONSTANTS.FEES_PATH_URL) - self._maker_fee_percentage = Decimal(fee_info["maker_fee_rate"]) - self._taker_fee_percentage = Decimal(fee_info["taker_fee_rate"]) - self._last_fee_percentage_update_timestamp = current_timestamp - - async def _update_balances(self): - """ - Pulls the API for updated balances - """ - cdef: - dict account_info - list balances - str asset_name - set local_asset_names = set(self._account_balances.keys()) - set remote_asset_names = set() - set asset_names_to_remove - - account_balances = await self._api_request(RESTMethod.GET, endpoint=CONSTANTS.ACCOUNTS_PATH_URL) - - for balance_entry in account_balances: - asset_name = balance_entry["currency"] - available_balance = Decimal(balance_entry["available"]) - total_balance = Decimal(balance_entry["balance"]) - self._account_available_balances[asset_name] = available_balance - self._account_balances[asset_name] = total_balance - remote_asset_names.add(asset_name) - - asset_names_to_remove = local_asset_names.difference(remote_asset_names) - for asset_name in asset_names_to_remove: - del self._account_available_balances[asset_name] - del self._account_balances[asset_name] - self._in_flight_orders_snapshot = {k: copy.copy(v) for k, v in self._in_flight_orders.items()} - self._in_flight_orders_snapshot_timestamp = self._current_timestamp - - async def _update_trading_rules(self): - """ - Pulls the API for trading rules (min / max order size, etc) - """ - cdef: - # The poll interval for withdraw rules is 60 seconds. - int64_t last_tick = (self._last_timestamp / 60.0) - int64_t current_tick = (self._current_timestamp / 60.0) - if current_tick > last_tick or len(self._trading_rules) == 0: - product_info = await self._api_request(RESTMethod.GET, endpoint=CONSTANTS.PRODUCTS_PATH_URL) - trading_rules_list = self._format_trading_rules(product_info) - self._trading_rules.clear() - for trading_rule in trading_rules_list: - self._trading_rules[trading_rule.trading_pair] = trading_rule - - def _format_trading_rules(self, raw_trading_rules: List[Any]) -> List[TradingRule]: - """ - Turns json data from API into TradingRule instances - :returns: List of TradingRule - """ - cdef: - list retval = [] - for rule in raw_trading_rules: - try: - trading_pair = rule.get("id") - retval.append(TradingRule(trading_pair, - min_price_increment=Decimal(str(rule.get("quote_increment"))), - min_base_amount_increment=Decimal(str(rule.get("base_increment"))), - min_notional_size=Decimal(str(rule.get("min_market_funds"))), - supports_market_orders=(not rule.get("limit_only")))) - except Exception: - self.logger().error(f"Error parsing the trading_pair rule {rule}. Skipping.", exc_info=True) - return retval - - async def _update_order_status(self): - """ - Pulls the rest API for for latest order statuses and update local order statuses. - """ - cdef: - double current_timestamp = self._current_timestamp - - if current_timestamp - self._last_order_update_timestamp <= self.UPDATE_ORDERS_INTERVAL: - return - - tracked_orders = list(self._in_flight_orders.values()) - results = await self.list_orders() - order_dict = dict((result["id"], result) for result in results) - - for tracked_order in tracked_orders: - exchange_order_id = await tracked_order.get_exchange_order_id() - order_update = order_dict.get(exchange_order_id) - client_order_id = tracked_order.client_order_id - if order_update is None: - try: - order = await self.get_order(client_order_id) - except IOError as e: - if "order not found" in str(e): - # The order does not exist. So we should not be tracking it. - self.logger().info( - f"The tracked order {client_order_id} does not exist on Coinbase Pro." - f"Order removed from tracking." - ) - self.c_stop_tracking_order(client_order_id) - self.c_trigger_event( - self.MARKET_ORDER_CANCELED_EVENT_TAG, - OrderCancelledEvent(self._current_timestamp, client_order_id) - ) - except asyncio.CancelledError: - raise - except Exception as e: - self.logger().network( - f"Error fetching status update for the order {client_order_id}: ", - exc_info=True, - app_warning_msg=f"Could not fetch updates for the order {client_order_id}. " - f"Check API key and network connection.{e}" - ) - continue - - done_reason = order_update.get("done_reason") - # Calculate the newly executed amount for this update. - new_confirmed_amount = Decimal(order_update["filled_size"]) - execute_amount_diff = new_confirmed_amount - tracked_order.executed_amount_base - execute_price = s_decimal_0 if new_confirmed_amount == s_decimal_0 \ - else Decimal(order_update["executed_value"]) / new_confirmed_amount - - order_type_description = tracked_order.order_type_description - order_type = tracked_order.order_type - # Emit event if executed amount is greater than 0. - if execute_amount_diff > s_decimal_0: - order_filled_event = OrderFilledEvent( - self._current_timestamp, - tracked_order.client_order_id, - tracked_order.trading_pair, - tracked_order.trade_type, - order_type, - execute_price, - execute_amount_diff, - self.c_get_fee( - tracked_order.base_asset, - tracked_order.quote_asset, - order_type, - tracked_order.trade_type, - execute_price, - execute_amount_diff, - ), - # Coinbase Pro's websocket stream tags events with order_id rather than trade_id - # Using order_id here for easier data validation - exchange_trade_id=str(int(self._time() * 1e6)), - ) - self.logger().info(f"Filled {execute_amount_diff} out of {tracked_order.amount} of the " - f"{order_type_description} order {client_order_id}.") - self.c_trigger_event(self.MARKET_ORDER_FILLED_EVENT_TAG, order_filled_event) - - # Update the tracked order - tracked_order.last_state = done_reason if done_reason in {"filled", "canceled"} else order_update["status"] - tracked_order.executed_amount_base = new_confirmed_amount - tracked_order.executed_amount_quote = Decimal(order_update["executed_value"]) - tracked_order.fee_paid = Decimal(order_update["fill_fees"]) - if tracked_order.is_done: - if not tracked_order.is_failure: - if tracked_order.trade_type == TradeType.BUY: - self.logger().info(f"The market buy order {tracked_order.client_order_id} has completed " - f"according to order status API.") - self.c_trigger_event(self.MARKET_BUY_ORDER_COMPLETED_EVENT_TAG, - BuyOrderCompletedEvent(self._current_timestamp, - tracked_order.client_order_id, - tracked_order.base_asset, - tracked_order.quote_asset, - tracked_order.executed_amount_base, - tracked_order.executed_amount_quote, - order_type)) - else: - self.logger().info(f"The market sell order {tracked_order.client_order_id} has completed " - f"according to order status API.") - self.c_trigger_event(self.MARKET_SELL_ORDER_COMPLETED_EVENT_TAG, - SellOrderCompletedEvent(self._current_timestamp, - tracked_order.client_order_id, - tracked_order.base_asset, - tracked_order.quote_asset, - tracked_order.executed_amount_base, - tracked_order.executed_amount_quote, - order_type)) - else: - self.logger().info(f"The market order {tracked_order.client_order_id} has failed/been canceled " - f"according to order status API.") - self.c_trigger_event(self.MARKET_ORDER_CANCELED_EVENT_TAG, - OrderCancelledEvent( - self._current_timestamp, - tracked_order.client_order_id - )) - self.c_stop_tracking_order(tracked_order.client_order_id) - self._last_order_update_timestamp = current_timestamp - - async def _iter_user_event_queue(self) -> AsyncIterable[OrderBookMessage]: - """ - Iterator for incoming messages from the user stream. - """ - while True: - try: - yield await self._user_stream_tracker.user_stream.get() - except asyncio.CancelledError: - raise - except Exception: - self.logger().error("Unknown error. Retrying after 1 seconds.", exc_info=True) - await asyncio.sleep(1.0) - - async def _user_stream_event_listener(self): - """ - Update order statuses from incoming messages from the user stream - """ - async for event_message in self._iter_user_event_queue(): - try: - content = event_message - event_type = content.get("type") - exchange_order_ids = [content.get("order_id"), - content.get("maker_order_id"), - content.get("taker_order_id")] - - tracked_order = None - for order in list(self._in_flight_orders.values()): - await order.get_exchange_order_id() - if order.exchange_order_id in exchange_order_ids: - tracked_order = order - break - - if tracked_order is None: - continue - - order_type_description = tracked_order.order_type_description - execute_price = Decimal(content.get("price", 0.0)) - execute_amount_diff = s_decimal_0 - - if event_type == "match": - updated = tracked_order.update_with_trade_update(content) - if updated: - execute_amount_diff = Decimal(content.get("size", 0.0)) - self.logger().info(f"Filled {execute_amount_diff} out of {tracked_order.amount} of the " - f"{order_type_description} order {tracked_order.client_order_id}") - exchange_order_id = tracked_order.exchange_order_id - - self.c_trigger_event(self.MARKET_ORDER_FILLED_EVENT_TAG, - OrderFilledEvent( - self._current_timestamp, - tracked_order.client_order_id, - tracked_order.trading_pair, - tracked_order.trade_type, - tracked_order.order_type, - execute_price, - execute_amount_diff, - AddedToCostTradeFee( - percent=tracked_order.fee_rate_from_trade_update(content) - ), - exchange_trade_id=content["trade_id"] - )) - - if event_type == "change": - if content.get("new_size") is not None: - tracked_order.amount = Decimal(content.get("new_size", 0.0)) - elif content.get("new_funds") is not None: - if tracked_order.price is not s_decimal_0: - tracked_order.amount = Decimal(content.get("new_funds")) / tracked_order.price - else: - self.logger().error(f"Invalid change message - '{content}'. Aborting.") - - if event_type in ["open", "done"]: - remaining_size = Decimal(content.get("remaining_size", tracked_order.amount)) - new_confirmed_amount = tracked_order.amount - remaining_size - execute_amount_diff = new_confirmed_amount - tracked_order.executed_amount_base - tracked_order.executed_amount_base = new_confirmed_amount - tracked_order.executed_amount_quote += execute_amount_diff * execute_price - - if content.get("reason") == "filled": # Only handles orders with "done" status - if tracked_order.trade_type == TradeType.BUY: - self.logger().info(f"The market buy order {tracked_order.client_order_id} has completed " - f"according to Coinbase Pro user stream.") - self.c_trigger_event(self.MARKET_BUY_ORDER_COMPLETED_EVENT_TAG, - BuyOrderCompletedEvent(self._current_timestamp, - tracked_order.client_order_id, - tracked_order.base_asset, - tracked_order.quote_asset, - tracked_order.executed_amount_base, - tracked_order.executed_amount_quote, - tracked_order.order_type)) - else: - self.logger().info(f"The market sell order {tracked_order.client_order_id} has completed " - f"according to Coinbase Pro user stream.") - self.c_trigger_event(self.MARKET_SELL_ORDER_COMPLETED_EVENT_TAG, - SellOrderCompletedEvent(self._current_timestamp, - tracked_order.client_order_id, - tracked_order.base_asset, - tracked_order.quote_asset, - tracked_order.executed_amount_base, - tracked_order.executed_amount_quote, - tracked_order.order_type)) - tracked_order.last_state = "filled" - self.c_stop_tracking_order(tracked_order.client_order_id) - - elif content.get("reason") == "canceled": # reason == "canceled": - execute_amount_diff = 0 - tracked_order.last_state = "canceled" - self.c_trigger_event(self.MARKET_ORDER_CANCELED_EVENT_TAG, - OrderCancelledEvent(self._current_timestamp, tracked_order.client_order_id)) - execute_amount_diff = 0 - self.c_stop_tracking_order(tracked_order.client_order_id) - - except asyncio.CancelledError: - raise - except Exception: - self.logger().error("Unexpected error in user stream listener loop.", exc_info=True) - await asyncio.sleep(5.0) - - def supported_order_types(self): - return [OrderType.LIMIT, OrderType.LIMIT_MAKER] - - async def place_order(self, order_id: str, trading_pair: str, amount: Decimal, is_buy: bool, order_type: OrderType, - price: Decimal): - """ - Async wrapper for placing orders through the rest API. - :returns: json response from the API - """ - data = { - "size": f"{amount:f}", - "product_id": trading_pair, - "side": "buy" if is_buy else "sell", - "type": "limit", - } - if order_type is OrderType.LIMIT: - data["price"] = f"{price:f}" - elif order_type is OrderType.LIMIT_MAKER: - data["price"] = f"{price:f}" - data["post_only"] = True - order_result = await self._api_request(RESTMethod.POST, endpoint=CONSTANTS.ORDERS_PATH_URL, data=data) - return order_result - - async def execute_buy(self, - order_id: str, - trading_pair: str, - amount: Decimal, - order_type: OrderType, - price: Optional[Decimal] = s_decimal_0): - """ - Function that takes strategy inputs, auto corrects itself with trading rule, - and submit an API request to place a buy order - """ - cdef: - TradingRule trading_rule = self._trading_rules[trading_pair] - - decimal_amount = self.quantize_order_amount(trading_pair, amount) - decimal_price = self.quantize_order_price(trading_pair, price) - if decimal_amount < trading_rule.min_order_size: - raise ValueError(f"Buy order amount {decimal_amount} is lower than the minimum order size " - f"{trading_rule.min_order_size}.") - - try: - self.c_start_tracking_order(order_id, trading_pair, order_type, TradeType.BUY, decimal_price, decimal_amount) - order_result = await self.place_order(order_id, trading_pair, decimal_amount, True, order_type, decimal_price) - - exchange_order_id = order_result["id"] - tracked_order = self._in_flight_orders.get(order_id) - if tracked_order is not None: - self.logger().info(f"Created {order_type} buy order {order_id} for {decimal_amount} {trading_pair}.") - tracked_order.update_exchange_order_id(exchange_order_id) - - self.c_trigger_event(self.MARKET_BUY_ORDER_CREATED_EVENT_TAG, - BuyOrderCreatedEvent(self._current_timestamp, - order_type, - trading_pair, - decimal_amount, - decimal_price, - order_id, - tracked_order.creation_timestamp)) - except asyncio.CancelledError: - raise - except Exception: - self.c_stop_tracking_order(order_id) - order_type_str = order_type.name.lower() - self.logger().network( - f"Error submitting buy {order_type_str} order to Coinbase Pro for " - f"{decimal_amount} {trading_pair} {price}.", - exc_info=True, - app_warning_msg="Failed to submit buy order to Coinbase Pro. " - "Check API key and network connection." - ) - self.c_trigger_event(self.MARKET_ORDER_FAILURE_EVENT_TAG, - MarketOrderFailureEvent(self._current_timestamp, order_id, order_type)) - - cdef str c_buy(self, str trading_pair, object amount, object order_type=OrderType.LIMIT, object price=s_decimal_0, - dict kwargs={}): - """ - *required - Synchronous wrapper that generates a client-side order ID and schedules the buy order. - """ - cdef: - int64_t tracking_nonce = get_tracking_nonce() - str order_id = str(f"buy-{trading_pair}-{tracking_nonce}") - - safe_ensure_future(self.execute_buy(order_id, trading_pair, amount, order_type, price)) - return order_id - - async def execute_sell(self, - order_id: str, - trading_pair: str, - amount: Decimal, - order_type: OrderType, - price: Optional[Decimal] = s_decimal_0): - """ - Function that takes strategy inputs, auto corrects itself with trading rule, - and submit an API request to place a sell order - """ - cdef: - TradingRule trading_rule = self._trading_rules[trading_pair] - - decimal_amount = self.quantize_order_amount(trading_pair, amount) - decimal_price = self.quantize_order_price(trading_pair, price) - if decimal_amount < trading_rule.min_order_size: - raise ValueError(f"Sell order amount {decimal_amount} is lower than the minimum order size " - f"{trading_rule.min_order_size}.") - - try: - self.c_start_tracking_order(order_id, trading_pair, order_type, TradeType.SELL, decimal_price, decimal_amount) - order_result = await self.place_order(order_id, trading_pair, decimal_amount, False, order_type, decimal_price) - - exchange_order_id = order_result["id"] - tracked_order = self._in_flight_orders.get(order_id) - if tracked_order is not None: - self.logger().info(f"Created {order_type} sell order {order_id} for {decimal_amount} {trading_pair}.") - tracked_order.update_exchange_order_id(exchange_order_id) - - self.c_trigger_event(self.MARKET_SELL_ORDER_CREATED_EVENT_TAG, - SellOrderCreatedEvent(self._current_timestamp, - order_type, - trading_pair, - decimal_amount, - decimal_price, - order_id, - tracked_order.creation_timestamp)) - except asyncio.CancelledError: - raise - except Exception: - self.c_stop_tracking_order(order_id) - order_type_str = order_type.name.lower() - self.logger().network( - f"Error submitting sell {order_type_str} order to Coinbase Pro for " - f"{decimal_amount} {trading_pair} {price}.", - exc_info=True, - app_warning_msg="Failed to submit sell order to Coinbase Pro. " - "Check API key and network connection." - ) - self.c_trigger_event(self.MARKET_ORDER_FAILURE_EVENT_TAG, - MarketOrderFailureEvent(self._current_timestamp, order_id, order_type)) - - cdef str c_sell(self, - str trading_pair, - object amount, - object order_type=OrderType.LIMIT, - object price=s_decimal_0, - dict kwargs={}): - """ - *required - Synchronous wrapper that generates a client-side order ID and schedules the sell order. - """ - cdef: - int64_t tracking_nonce = get_tracking_nonce() - str order_id = str(f"sell-{trading_pair}-{tracking_nonce}") - safe_ensure_future(self.execute_sell(order_id, trading_pair, amount, order_type, price)) - return order_id - - async def execute_cancel(self, trading_pair: str, order_id: str): - """ - Function that makes API request to cancel an active order - """ - try: - exchange_order_id = await self._in_flight_orders.get(order_id).get_exchange_order_id() - endpoint = f"{CONSTANTS.ORDERS_PATH_URL}/{exchange_order_id}" - cancelled_id = await self._api_request(RESTMethod.DELETE, endpoint=endpoint) - if cancelled_id == exchange_order_id: - self.logger().info(f"Successfully canceled order {order_id}.") - self.c_stop_tracking_order(order_id) - self.c_trigger_event(self.MARKET_ORDER_CANCELED_EVENT_TAG, - OrderCancelledEvent(self._current_timestamp, order_id)) - return order_id - except IOError as e: - if "order not found" in str(e): - # The order was never there to begin with. So cancelling it is a no-op but semantically successful. - self.logger().info(f"The order {order_id} does not exist on Coinbase Pro. No cancelation needed.") - self.c_stop_tracking_order(order_id) - self.c_trigger_event(self.MARKET_ORDER_CANCELED_EVENT_TAG, - OrderCancelledEvent(self._current_timestamp, order_id)) - return order_id - except asyncio.CancelledError: - raise - except Exception as e: - self.logger().network( - f"Failed to cancel order {order_id}: ", - exc_info=True, - app_warning_msg=f"Failed to cancel the order {order_id} on Coinbase Pro. " - f"Check API key and network connection.{e}" - ) - return None - - cdef c_cancel(self, str trading_pair, str order_id): - """ - *required - Synchronous wrapper that schedules cancelling an order. - """ - safe_ensure_future(self.execute_cancel(trading_pair, order_id)) - return order_id - - async def cancel_all(self, timeout_seconds: float) -> List[CancellationResult]: - """ - *required - Async function that cancels all active orders. - Used by bot's top level stop and exit commands (cancelling outstanding orders on exit) - :returns: List of CancellationResult which indicates whether each order is successfully cancelled. - """ - incomplete_orders = [o for o in self._in_flight_orders.values() if not o.is_done] - tasks = [self.execute_cancel(o.trading_pair, o.client_order_id) for o in incomplete_orders] - order_id_set = set([o.client_order_id for o in incomplete_orders]) - successful_cancellations = [] - - try: - async with timeout(timeout_seconds): - results = await safe_gather(*tasks, return_exceptions=True) - for client_order_id in results: - if type(client_order_id) is str: - order_id_set.remove(client_order_id) - successful_cancellations.append(CancellationResult(client_order_id, True)) - else: - self.logger().warning( - f"failed to cancel order with error: " - f"{repr(client_order_id)}" - ) - except Exception as e: - self.logger().network( - f"Unexpected error canceling orders.", - exc_info=True, - app_warning_msg="Failed to cancel order on Coinbase Pro. Check API key and network connection." - ) - - failed_cancellations = [CancellationResult(oid, False) for oid in order_id_set] - return successful_cancellations + failed_cancellations - - async def _status_polling_loop(self): - """ - Background process that periodically pulls for changes from the rest API - """ - while True: - try: - self._poll_notifier = asyncio.Event() - await self._poll_notifier.wait() - - await safe_gather( - self._update_balances(), - self._update_order_status(), - self._update_fee_percentage(), - ) - except asyncio.CancelledError: - raise - except Exception: - self.logger().network( - "Unexpected error while fetching account updates.", - exc_info=True, - app_warning_msg=f"Could not fetch account updates on Coinbase Pro. " - f"Check API key and network connection." - ) - - async def _trading_rules_polling_loop(self): - """ - Separate background process that periodically pulls for trading rule changes - (Since trading rules don't get updated often, it is pulled less often.) - """ - while True: - try: - await safe_gather(self._update_trading_rules()) - await asyncio.sleep(60) - except asyncio.CancelledError: - raise - except Exception: - self.logger().network( - "Unexpected error while fetching trading rules.", - exc_info=True, - app_warning_msg=f"Could not fetch trading rule updates on Coinbase Pro. " - f"Check network connection." - ) - await asyncio.sleep(0.5) - - async def get_order(self, client_order_id: str) -> Dict[str, Any]: - """ - Gets status update for a particular order via rest API - :returns: json response - """ - order = self._in_flight_orders.get(client_order_id) - if order is None: - return None - exchange_order_id = await order.get_exchange_order_id() - endpoint = f"{CONSTANTS.ORDERS_PATH_URL}/{exchange_order_id}" - result = await self._api_request(RESTMethod.GET, endpoint=endpoint) - return result - - async def list_orders(self) -> List[Any]: - """ - Gets a list of the user's active orders via rest API - :returns: json response - """ - endpoint = f"{CONSTANTS.ORDERS_PATH_URL}?status=all" - result = await self._api_request(RESTMethod.GET, endpoint=endpoint) - return result - - cdef OrderBook c_get_order_book(self, str trading_pair): - """ - :returns: OrderBook for a specific trading pair - """ - cdef: - dict order_books = self._order_book_tracker.order_books - - if trading_pair not in order_books: - raise ValueError(f"No order book exists for '{trading_pair}'.") - return order_books[trading_pair] - - def start_tracking_order(self, - order_id: str, - trading_pair: str, - order_type: OrderType, - trade_type: TradeType, - price: Decimal, - amount: Decimal): - self.c_start_tracking_order( - order_id, - trading_pair, - order_type, - trade_type, - price, - amount) - - cdef c_start_tracking_order(self, - str client_order_id, - str trading_pair, - object order_type, - object trade_type, - object price, - object amount): - """ - Add new order to self._in_flight_orders mapping - """ - self._in_flight_orders[client_order_id] = CoinbaseProInFlightOrder( - client_order_id, - None, - trading_pair, - order_type, - trade_type, - price, - amount, - creation_timestamp=self.current_timestamp - ) - - cdef c_stop_tracking_order(self, str order_id): - """ - Delete an order from self._in_flight_orders mapping - """ - if order_id in self._in_flight_orders: - del self._in_flight_orders[order_id] - - cdef c_did_timeout_tx(self, str tracking_id): - """ - Triggers MarketEvent.TransactionFailure when an Ethereum transaction has timed out - """ - self.c_trigger_event(self.MARKET_TRANSACTION_FAILURE_EVENT_TAG, - MarketTransactionFailureEvent(self._current_timestamp, tracking_id)) - - cdef object c_get_order_price_quantum(self, str trading_pair, object price): - """ - *required - Get the minimum increment interval for price - :return: Min order price increment in Decimal format - """ - cdef: - TradingRule trading_rule = self._trading_rules[trading_pair] - return trading_rule.min_price_increment - - cdef object c_get_order_size_quantum(self, str trading_pair, object order_size): - """ - *required - Get the minimum increment interval for order size (e.g. 0.01 USD) - :return: Min order size increment in Decimal format - """ - cdef: - TradingRule trading_rule = self._trading_rules[trading_pair] - return trading_rule.min_base_amount_increment - - cdef object c_quantize_order_amount(self, str trading_pair, object amount, object price=s_decimal_0): - """ - *required - Check current order amount against trading rule, and correct any rule violations - :return: Valid order amount in Decimal format - """ - cdef: - TradingRule trading_rule = self._trading_rules[trading_pair] - - global s_decimal_0 - quantized_amount = ExchangeBase.c_quantize_order_amount(self, trading_pair, amount) - - # Check against min_order_size. If not passing either check, return 0. - if quantized_amount < trading_rule.min_order_size: - return s_decimal_0 - - # Check against max_order_size. If not passing either check, return 0. - if quantized_amount > trading_rule.max_order_size: - return s_decimal_0 - - return quantized_amount - - def get_price(self, trading_pair: str, is_buy: bool) -> Decimal: - return self.c_get_price(trading_pair, is_buy) - - def buy(self, trading_pair: str, amount: Decimal, order_type=OrderType.MARKET, - price: Decimal = s_decimal_nan, **kwargs) -> str: - return self.c_buy(trading_pair, amount, order_type, price, kwargs) - - def sell(self, trading_pair: str, amount: Decimal, order_type=OrderType.MARKET, - price: Decimal = s_decimal_nan, **kwargs) -> str: - return self.c_sell(trading_pair, amount, order_type, price, kwargs) - - def cancel(self, trading_pair: str, client_order_id: str): - return self.c_cancel(trading_pair, client_order_id) - - def get_fee(self, - base_currency: str, - quote_currency: str, - order_type: OrderType, - order_side: TradeType, - amount: Decimal, - price: Decimal = s_decimal_nan, - is_maker: Optional[bool] = None) -> AddedToCostTradeFee: - return self.c_get_fee(base_currency, quote_currency, order_type, order_side, amount, price, is_maker) - - def get_order_book(self, trading_pair: str) -> OrderBook: - return self.c_get_order_book(trading_pair) - - async def all_trading_pairs(self) -> List[str]: - # This method should be removed and instead we should implement _initialize_trading_pair_symbol_map - return await CoinbaseProAPIOrderBookDataSource.fetch_trading_pairs() - - async def get_last_traded_prices(self, trading_pairs: List[str]) -> Dict[str, float]: - # This method should be removed and instead we should implement _get_last_traded_price - return await CoinbaseProAPIOrderBookDataSource.get_last_traded_prices(trading_pairs=trading_pairs) diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_in_flight_order.pxd b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_in_flight_order.pxd deleted file mode 100644 index fef8bd2733..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_in_flight_order.pxd +++ /dev/null @@ -1,6 +0,0 @@ -from hummingbot.connector.in_flight_order_base cimport InFlightOrderBase - - -cdef class CoinbaseProInFlightOrder(InFlightOrderBase): - cdef: - object trade_id_set diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_in_flight_order.pyx b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_in_flight_order.pyx deleted file mode 100644 index 85bf8b67bb..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_in_flight_order.pyx +++ /dev/null @@ -1,81 +0,0 @@ -from decimal import Decimal -from typing import Any, Dict, Optional - -from hummingbot.connector.in_flight_order_base import InFlightOrderBase -from hummingbot.core.data_type.common import OrderType, TradeType - - -cdef class CoinbaseProInFlightOrder(InFlightOrderBase): - def __init__(self, - client_order_id: str, - exchange_order_id: Optional[str], - trading_pair: str, - order_type: OrderType, - trade_type: TradeType, - price: Decimal, - amount: Decimal, - creation_timestamp: float, - initial_state: str = "open"): - super().__init__( - client_order_id, - exchange_order_id, - trading_pair, - order_type, - trade_type, - price, - amount, - creation_timestamp, - initial_state, - ) - - self.trade_id_set = set() - self.fee_asset = self.quote_asset - - @property - def is_done(self) -> bool: - return self.last_state in {"filled", "canceled", "done"} - - @property - def is_failure(self) -> bool: - # This is the only known canceled state - return self.last_state == "canceled" - - @property - def is_cancelled(self) -> bool: - return self.last_state == "canceled" - - @property - def order_type_description(self) -> str: - """ - :return: Order description string . One of ["limit buy" / "limit sell" / "market buy" / "market sell"] - """ - order_type = "limit_maker" if self.order_type is OrderType.LIMIT_MAKER else "limit" - side = "buy" if self.trade_type == TradeType.BUY else "sell" - return f"{order_type} {side}" - - def fee_rate_from_trade_update(self, trade_update: Dict[str, Any]) -> Decimal: - maker_fee_rate = Decimal(str(trade_update.get("maker_fee_rate", "0"))) - taker_fee_rate = Decimal(str(trade_update.get("taker_fee_rate", "0"))) - fee_rate = max(maker_fee_rate, taker_fee_rate) - return fee_rate - - def update_with_trade_update(self, trade_update: Dict[str, Any]) -> bool: - """ - Updates the in flight order with trade update (from GET /trade_history end point) - return: True if the order gets updated otherwise False - """ - trade_id = trade_update["trade_id"] - if (self.exchange_order_id not in [trade_update["maker_order_id"], trade_update["taker_order_id"]] - or trade_id in self.trade_id_set): - return False - self.trade_id_set.add(trade_id) - trade_amount = Decimal(str(trade_update["size"])) - trade_price = Decimal(str(trade_update["price"])) - quote_amount = trade_amount * trade_price - - self.executed_amount_base += trade_amount - self.executed_amount_quote += quote_amount - fee_rate = self.fee_rate_from_trade_update(trade_update) - self.fee_paid += quote_amount * fee_rate - - return True diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book.pxd b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book.pxd deleted file mode 100644 index a4d1ee15ac..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book.pxd +++ /dev/null @@ -1,5 +0,0 @@ -from hummingbot.core.data_type.order_book cimport OrderBook - - -cdef class CoinbaseProOrderBook(OrderBook): - pass diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book.pyx b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book.pyx deleted file mode 100644 index 6837e0f0a3..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book.pyx +++ /dev/null @@ -1,69 +0,0 @@ -import logging -from typing import Dict, List, Optional - -import pandas as pd - -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_message import CoinbaseProOrderBookMessage -from hummingbot.core.data_type.order_book cimport OrderBook -from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType -from hummingbot.logger import HummingbotLogger - -_cbpob_logger = None - - -cdef class CoinbaseProOrderBook(OrderBook): - @classmethod - def logger(cls) -> HummingbotLogger: - global _cbpob_logger - if _cbpob_logger is None: - _cbpob_logger = logging.getLogger(__name__) - return _cbpob_logger - - @classmethod - def snapshot_message_from_exchange(cls, - msg: Dict[str, any], - timestamp: float, - metadata: Optional[Dict] = None) -> OrderBookMessage: - """ - *required - Convert json snapshot data into standard OrderBookMessage format - :param msg: json snapshot data from live web socket stream - :param timestamp: timestamp attached to incoming data - :return: CoinbaseProOrderBookMessage - """ - if metadata: - msg.update(metadata) - return CoinbaseProOrderBookMessage( - message_type=OrderBookMessageType.SNAPSHOT, - content=msg, - timestamp=timestamp - ) - - @classmethod - def diff_message_from_exchange(cls, - msg: Dict[str, any], - timestamp: Optional[float] = None, - metadata: Optional[Dict] = None) -> OrderBookMessage: - """ - *required - Convert json diff data into standard OrderBookMessage format - :param msg: json diff data from live web socket stream - :param timestamp: timestamp attached to incoming data - :return: CoinbaseProOrderBookMessage - """ - if metadata: - msg.update(metadata) - if "time" in msg: - msg_time = pd.Timestamp(msg["time"]).timestamp() - return CoinbaseProOrderBookMessage( - message_type=OrderBookMessageType.DIFF, - content=msg, - timestamp=timestamp or msg_time) - - @classmethod - def from_snapshot(cls, snapshot: OrderBookMessage): - raise NotImplementedError("Coinbase Pro order book needs to retain individual order data.") - - @classmethod - def restore_from_snapshot_and_diffs(self, snapshot: OrderBookMessage, diffs: List[OrderBookMessage]): - raise NotImplementedError("Coinbase Pro order book needs to retain individual order data.") diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_message.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_message.py deleted file mode 100644 index 3050d5bedb..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_message.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python - -from typing import Dict, List, Optional - -import pandas as pd - -from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType -from hummingbot.core.data_type.order_book_row import OrderBookRow - - -class CoinbaseProOrderBookMessage(OrderBookMessage): - def __new__( - cls, - message_type: OrderBookMessageType, - content: Dict[str, any], - timestamp: Optional[float] = None, - *args, - **kwargs, - ): - if timestamp is None: - if message_type is OrderBookMessageType.SNAPSHOT: - raise ValueError("timestamp must not be None when initializing snapshot messages.") - timestamp = pd.Timestamp(content["time"], tz="UTC").timestamp() - return super(CoinbaseProOrderBookMessage, cls).__new__( - cls, message_type, content, timestamp=timestamp, *args, **kwargs - ) - - @property - def update_id(self) -> int: - if self.type in [OrderBookMessageType.DIFF, OrderBookMessageType.SNAPSHOT]: - return int(self.content["sequence"]) - else: - return -1 - - @property - def trade_id(self) -> int: - if self.type is OrderBookMessageType.TRADE: - return int(self.content["sequence"]) - return -1 - - @property - def trading_pair(self) -> str: - if "product_id" in self.content: - return self.content["product_id"] - elif "symbol" in self.content: - return self.content["symbol"] - - @property - def asks(self) -> List[OrderBookRow]: - raise NotImplementedError("Coinbase Pro order book messages have different semantics.") - - @property - def bids(self) -> List[OrderBookRow]: - raise NotImplementedError("Coinbase Pro order book messages have different semantics.") diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_tracker.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_tracker.py deleted file mode 100644 index 81b7fcfecb..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_tracker.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/env python - -import asyncio -import bisect -import logging -import time -from collections import defaultdict, deque -from typing import Deque, Dict, List, Optional - -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_active_order_tracker import CoinbaseProActiveOrderTracker -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_api_order_book_data_source import ( - CoinbaseProAPIOrderBookDataSource -) -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book import CoinbaseProOrderBook -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_message import CoinbaseProOrderBookMessage -from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType -from hummingbot.core.data_type.order_book_tracker import OrderBookTracker -from hummingbot.core.data_type.common import TradeType -from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory -from hummingbot.logger import HummingbotLogger - - -class CoinbaseProOrderBookTracker(OrderBookTracker): - _cbpobt_logger: Optional[HummingbotLogger] = None - - @classmethod - def logger(cls) -> HummingbotLogger: - if cls._cbpobt_logger is None: - cls._cbpobt_logger = logging.getLogger(__name__) - return cls._cbpobt_logger - - def __init__( - self, - trading_pairs: Optional[List[str]] = None, - web_assistants_factory: Optional[WebAssistantsFactory] = None, - ): - super().__init__( - data_source=CoinbaseProAPIOrderBookDataSource(trading_pairs, web_assistants_factory), - trading_pairs=trading_pairs, - ) - self._ev_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() - self._order_book_snapshot_stream: asyncio.Queue = asyncio.Queue() - self._order_book_diff_stream: asyncio.Queue = asyncio.Queue() - self._process_msg_deque_task: Optional[asyncio.Task] = None - self._past_diffs_windows: Dict[str, Deque] = {} - self._order_books: Dict[str, CoinbaseProOrderBook] = {} - self._saved_message_queues: Dict[str, Deque[CoinbaseProOrderBookMessage]] = defaultdict(lambda: deque(maxlen=1000)) - self._active_order_trackers: Dict[str, CoinbaseProActiveOrderTracker] = defaultdict(CoinbaseProActiveOrderTracker) - - @property - def exchange_name(self) -> str: - """ - *required - Name of the current exchange - """ - return "coinbase_pro" - - async def _order_book_diff_router(self): - """ - Route the real-time order book diff messages to the correct order book. - """ - last_message_timestamp: float = time.time() - messages_queued: int = 0 - messages_accepted: int = 0 - messages_rejected: int = 0 - while True: - try: - ob_message: CoinbaseProOrderBookMessage = await self._order_book_diff_stream.get() - trading_pair: str = ob_message.trading_pair - if trading_pair not in self._tracking_message_queues: - messages_queued += 1 - # Save diff messages received before snapshots are ready - self._saved_message_queues[trading_pair].append(ob_message) - continue - message_queue: asyncio.Queue = self._tracking_message_queues[trading_pair] - # Check the order book's initial update ID. If it's larger, don't bother. - order_book: CoinbaseProOrderBook = self._order_books[trading_pair] - - if order_book.snapshot_uid > ob_message.update_id: - messages_rejected += 1 - continue - await message_queue.put(ob_message) - messages_accepted += 1 - if ob_message.content["type"] == "match": # put match messages to trade queue - trade_type = float(TradeType.SELL.value) if ob_message.content["side"].upper() == "SELL" \ - else float(TradeType.BUY.value) - self._order_book_trade_stream.put_nowait(OrderBookMessage(OrderBookMessageType.TRADE, { - "trading_pair": ob_message.trading_pair, - "trade_type": trade_type, - "trade_id": ob_message.update_id, - "update_id": ob_message.timestamp, - "price": ob_message.content["price"], - "amount": ob_message.content["size"] - }, timestamp=ob_message.timestamp)) - - # Log some statistics. - now: float = time.time() - if int(now / 60.0) > int(last_message_timestamp / 60.0): - self.logger().debug(f"Diff messages processed: {messages_accepted}, " - f"rejected: {messages_rejected}, queued: {messages_queued}") - messages_accepted = 0 - messages_rejected = 0 - messages_queued = 0 - - last_message_timestamp = now - except asyncio.CancelledError: - raise - except Exception: - self.logger().network( - f'{"Unexpected error routing order book messages."}', - exc_info=True, - app_warning_msg=f'{"Unexpected error routing order book messages. Retrying after 5 seconds."}' - ) - await asyncio.sleep(5.0) - - async def _track_single_book(self, trading_pair: str): - """ - Update an order book with changes from the latest batch of received messages - """ - past_diffs_window: Deque[CoinbaseProOrderBookMessage] = deque() - self._past_diffs_windows[trading_pair] = past_diffs_window - - message_queue: asyncio.Queue = self._tracking_message_queues[trading_pair] - order_book: CoinbaseProOrderBook = self._order_books[trading_pair] - active_order_tracker: CoinbaseProActiveOrderTracker = self._active_order_trackers[trading_pair] - - last_message_timestamp: float = time.time() - diff_messages_accepted: int = 0 - - while True: - try: - message: CoinbaseProOrderBookMessage = None - saved_messages: Deque[CoinbaseProOrderBookMessage] = self._saved_message_queues[trading_pair] - # Process saved messages first if there are any - if len(saved_messages) > 0: - message = saved_messages.popleft() - else: - message = await message_queue.get() - - if message.type is OrderBookMessageType.DIFF: - bids, asks = active_order_tracker.convert_diff_message_to_order_book_row(message) - order_book.apply_diffs(bids, asks, message.update_id) - past_diffs_window.append(message) - while len(past_diffs_window) > self.PAST_DIFF_WINDOW_SIZE: - past_diffs_window.popleft() - diff_messages_accepted += 1 - - # Output some statistics periodically. - now: float = time.time() - if int(now / 60.0) > int(last_message_timestamp / 60.0): - self.logger().debug(f"Processed {diff_messages_accepted} order book diffs for {trading_pair}.") - diff_messages_accepted = 0 - last_message_timestamp = now - elif message.type is OrderBookMessageType.SNAPSHOT: - past_diffs: List[CoinbaseProOrderBookMessage] = list(past_diffs_window) - # only replay diffs later than snapshot, first update active order with snapshot then replay diffs - replay_position = bisect.bisect_right(past_diffs, message) - replay_diffs = past_diffs[replay_position:] - s_bids, s_asks = active_order_tracker.convert_snapshot_message_to_order_book_row(message) - order_book.apply_snapshot(s_bids, s_asks, message.update_id) - for diff_message in replay_diffs: - d_bids, d_asks = active_order_tracker.convert_diff_message_to_order_book_row(diff_message) - order_book.apply_diffs(d_bids, d_asks, diff_message.update_id) - - self.logger().debug(f"Processed order book snapshot for {trading_pair}.") - except asyncio.CancelledError: - raise - except Exception: - self.logger().network( - f"Unexpected error processing order book messages for {trading_pair}.", - exc_info=True, - app_warning_msg=f'{"Unexpected error processing order book messages. Retrying after 5 seconds."}' - ) - await asyncio.sleep(5.0) diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_tracker_entry.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_tracker_entry.py deleted file mode 100644 index 3a4baf3d58..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_order_book_tracker_entry.py +++ /dev/null @@ -1,23 +0,0 @@ -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_active_order_tracker import CoinbaseProActiveOrderTracker -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.data_type.order_book_tracker_entry import OrderBookTrackerEntry - - -class CoinbaseProOrderBookTrackerEntry(OrderBookTrackerEntry): - def __init__(self, - trading_pair: str, - timestamp: float, - order_book: OrderBook, - active_order_tracker: CoinbaseProActiveOrderTracker): - self._active_order_tracker = active_order_tracker - super(CoinbaseProOrderBookTrackerEntry, self).__init__(trading_pair, timestamp, order_book) - - def __repr__(self) -> str: - return ( - f"CoinbaseProOrderBookTrackerEntry(trading_pair='{self._trading_pair}', timestamp='{self._timestamp}', " - f"order_book='{self._order_book}')" - ) - - @property - def active_order_tracker(self) -> CoinbaseProActiveOrderTracker: - return self._active_order_tracker diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_user_stream_tracker.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_user_stream_tracker.py deleted file mode 100644 index f2d0d91d7d..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_user_stream_tracker.py +++ /dev/null @@ -1,65 +0,0 @@ -import logging -from typing import List, Optional - -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_api_user_stream_data_source import ( - CoinbaseProAPIUserStreamDataSource -) -from hummingbot.core.data_type.user_stream_tracker import UserStreamTracker -from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource -from hummingbot.core.utils.async_utils import safe_ensure_future, safe_gather -from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory -from hummingbot.logger import HummingbotLogger - - -class CoinbaseProUserStreamTracker(UserStreamTracker): - _cbpust_logger: Optional[HummingbotLogger] = None - - @classmethod - def logger(cls) -> HummingbotLogger: - if cls._bust_logger is None: - cls._bust_logger = logging.getLogger(__name__) - return cls._bust_logger - - def __init__( - self, - trading_pairs: Optional[List[str]] = None, - web_assistants_factory: Optional[WebAssistantsFactory] = None, - ): - self._trading_pairs: List[str] = trading_pairs or [] - self._web_assistants_factory = web_assistants_factory - super().__init__(data_source=CoinbaseProAPIUserStreamDataSource( - trading_pairs=self._trading_pairs, - web_assistants_factory=self._web_assistants_factory, - )) - - @property - def data_source(self) -> UserStreamTrackerDataSource: - """ - *required - Initializes a user stream data source (user specific order diffs from live socket stream) - :return: OrderBookTrackerDataSource - """ - if not self._data_source: - self._data_source = CoinbaseProAPIUserStreamDataSource( - trading_pairs=self._trading_pairs, - web_assistants_factory=self._web_assistants_factory, - ) - return self._data_source - - @property - def exchange_name(self) -> str: - """ - *required - Name of the current exchange - """ - return "coinbase_pro" - - async def start(self): - """ - *required - Start all listeners and tasks - """ - self._user_stream_tracking_task = safe_ensure_future( - self.data_source.listen_for_user_stream(self._user_stream) - ) - await safe_gather(self._user_stream_tracking_task) diff --git a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_utils.py b/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_utils.py deleted file mode 100644 index deccceedbf..0000000000 --- a/hummingbot/connector/exchange/coinbase_pro/coinbase_pro_utils.py +++ /dev/null @@ -1,79 +0,0 @@ -import typing -from dataclasses import dataclass -from typing import Optional - -from pydantic import Field, SecretStr - -from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData -from hummingbot.connector.exchange.coinbase_pro import coinbase_pro_constants as CONSTANTS -from hummingbot.core.api_throttler.async_throttler import AsyncThrottler -from hummingbot.core.web_assistant.connections.data_types import EndpointRESTRequest -from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory - -if typing.TYPE_CHECKING: - from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_auth import CoinbaseProAuth - -CENTRALIZED = True - -EXAMPLE_PAIR = "ETH-USDC" - -DEFAULT_FEES = [0.5, 0.5] - - -class CoinbaseProConfigMap(BaseConnectorConfigMap): - connector: str = Field(default="coinbase_pro", client_data=None) - coinbase_pro_api_key: SecretStr = Field( - default=..., - client_data=ClientFieldData( - prompt=lambda cm: "Enter your Coinbase API key", - is_secure=True, - is_connect_key=True, - prompt_on_new=True, - ) - ) - coinbase_pro_secret_key: SecretStr = Field( - default=..., - client_data=ClientFieldData( - prompt=lambda cm: "Enter your Coinbase secret key", - is_secure=True, - is_connect_key=True, - prompt_on_new=True, - ) - ) - coinbase_pro_passphrase: SecretStr = Field( - default=..., - client_data=ClientFieldData( - prompt=lambda cm: "Enter your Coinbase passphrase", - is_secure=True, - is_connect_key=True, - prompt_on_new=True, - ) - ) - - class Config: - title = "coinbase_pro" - - -KEYS = CoinbaseProConfigMap.construct() - - -@dataclass -class CoinbaseProRESTRequest(EndpointRESTRequest): - def __post_init__(self): - super().__post_init__() - self._ensure_endpoint_for_auth() - - @property - def base_url(self) -> str: - return CONSTANTS.REST_URL - - def _ensure_endpoint_for_auth(self): - if self.is_auth_required and self.endpoint is None: - raise ValueError("The endpoint must be specified if authentication is required.") - - -def build_coinbase_pro_web_assistant_factory(auth: Optional['CoinbaseProAuth'] = None) -> WebAssistantsFactory: - """The web-assistant's composition root.""" - throttler = AsyncThrottler(rate_limits=[]) - api_factory = WebAssistantsFactory(throttler=throttler, auth=auth) - return api_factory diff --git a/test/connector/exchange/ascend_ex/__init__.py b/hummingbot/connector/exchange/hashkey/__init__.py similarity index 100% rename from test/connector/exchange/ascend_ex/__init__.py rename to hummingbot/connector/exchange/hashkey/__init__.py diff --git a/hummingbot/connector/exchange/hashkey/hashkey_api_order_book_data_source.py b/hummingbot/connector/exchange/hashkey/hashkey_api_order_book_data_source.py new file mode 100644 index 0000000000..c5efc4c5fd --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_api_order_book_data_source.py @@ -0,0 +1,236 @@ +import asyncio +import time +from collections import defaultdict +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional + +import hummingbot.connector.exchange.hashkey.hashkey_constants as CONSTANTS +from hummingbot.connector.exchange.hashkey import hashkey_web_utils as web_utils +from hummingbot.connector.exchange.hashkey.hashkey_order_book import HashkeyOrderBook +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.data_type.order_book_message import OrderBookMessage +from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.logger import HummingbotLogger + +if TYPE_CHECKING: + from hummingbot.connector.exchange.hashkey.hashkey_exchange import HashkeyExchange + + +class HashkeyAPIOrderBookDataSource(OrderBookTrackerDataSource): + HEARTBEAT_TIME_INTERVAL = 30.0 + ONE_HOUR = 60 * 60 + + _logger: Optional[HummingbotLogger] = None + _trading_pair_symbol_map: Dict[str, Mapping[str, str]] = {} + _mapping_initialization_lock = asyncio.Lock() + + def __init__(self, + trading_pairs: List[str], + connector: 'HashkeyExchange', + api_factory: Optional[WebAssistantsFactory] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + throttler: Optional[AsyncThrottler] = None, + time_synchronizer: Optional[TimeSynchronizer] = None): + super().__init__(trading_pairs) + self._connector = connector + self._domain = domain + self._snapshot_messages_queue_key = CONSTANTS.SNAPSHOT_EVENT_TYPE + self._trade_messages_queue_key = CONSTANTS.TRADE_EVENT_TYPE + self._time_synchronizer = time_synchronizer + self._throttler = throttler + self._api_factory = api_factory or web_utils.build_api_factory( + throttler=self._throttler, + time_synchronizer=self._time_synchronizer, + domain=self._domain, + ) + self._message_queue: Dict[str, asyncio.Queue] = defaultdict(asyncio.Queue) + self._last_ws_message_sent_timestamp = 0 + + async def get_last_traded_prices(self, + trading_pairs: List[str], + domain: Optional[str] = None) -> Dict[str, float]: + return await self._connector.get_last_traded_prices(trading_pairs=trading_pairs) + + async def _request_order_book_snapshot(self, trading_pair: str) -> Dict[str, Any]: + """ + Retrieves a copy of the full order book from the exchange, for a particular trading pair. + + :param trading_pair: the trading pair for which the order book will be retrieved + + :return: the response from the exchange (JSON dictionary) + """ + params = { + "symbol": await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair), + "limit": "1000" + } + data = await self._connector._api_request(path_url=CONSTANTS.SNAPSHOT_PATH_URL, + method=RESTMethod.GET, + params=params) + return data + + async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: + snapshot: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair) + snapshot_timestamp: float = float(snapshot["t"]) * 1e-3 + snapshot_msg: OrderBookMessage = HashkeyOrderBook.snapshot_message_from_exchange_rest( + snapshot, + snapshot_timestamp, + metadata={"trading_pair": trading_pair} + ) + return snapshot_msg + + async def _parse_trade_message(self, raw_message: Dict[str, Any], message_queue: asyncio.Queue): + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol(symbol=raw_message["symbol"]) + for trades in raw_message["data"]: + trade_message: OrderBookMessage = HashkeyOrderBook.trade_message_from_exchange( + trades, {"trading_pair": trading_pair}) + message_queue.put_nowait(trade_message) + + async def listen_for_order_book_snapshots(self, ev_loop: asyncio.AbstractEventLoop, output: asyncio.Queue): + """ + This method runs continuously and request the full order book content from the exchange every hour. + The method uses the REST API from the exchange because it does not provide an endpoint to get the full order + book through websocket. With the information creates a snapshot messages that is added to the output queue + :param ev_loop: the event loop the method will run in + :param output: a queue to add the created snapshot messages + """ + while True: + try: + await asyncio.wait_for(self._process_ob_snapshot(snapshot_queue=output), timeout=self.ONE_HOUR) + except asyncio.TimeoutError: + await self._take_full_order_book_snapshot(trading_pairs=self._trading_pairs, snapshot_queue=output) + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error.", exc_info=True) + await self._take_full_order_book_snapshot(trading_pairs=self._trading_pairs, snapshot_queue=output) + await self._sleep(5.0) + + async def listen_for_subscriptions(self): + """ + Connects to the trade events and order diffs websocket endpoints and listens to the messages sent by the + exchange. Each message is stored in its own queue. + """ + ws = None + while True: + try: + ws: WSAssistant = await self._api_factory.get_ws_assistant() + await ws.connect(ws_url=CONSTANTS.WSS_PUBLIC_URL[self._domain]) + await self._subscribe_channels(ws) + self._last_ws_message_sent_timestamp = self._time() + + while True: + try: + seconds_until_next_ping = (CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL - ( + self._time() - self._last_ws_message_sent_timestamp)) + await asyncio.wait_for(self._process_ws_messages(ws=ws), timeout=seconds_until_next_ping) + except asyncio.TimeoutError: + ping_time = self._time() + payload = { + "ping": int(ping_time * 1e3) + } + ping_request = WSJSONRequest(payload=payload) + await ws.send(request=ping_request) + self._last_ws_message_sent_timestamp = ping_time + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred when listening to order book streams. Retrying in 5 seconds...", + exc_info=True, + ) + await self._sleep(5.0) + finally: + ws and await ws.disconnect() + + async def _subscribe_channels(self, ws: WSAssistant): + """ + Subscribes to the trade events and diff orders events through the provided websocket connection. + :param ws: the websocket assistant used to connect to the exchange + """ + try: + for trading_pair in self._trading_pairs: + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + trade_payload = { + "topic": "trade", + "event": "sub", + "symbol": symbol, + "params": { + "binary": False + } + } + subscribe_trade_request: WSJSONRequest = WSJSONRequest(payload=trade_payload) + + depth_payload = { + "topic": "depth", + "event": "sub", + "symbol": symbol, + "params": { + "binary": False + } + } + subscribe_orderbook_request: WSJSONRequest = WSJSONRequest(payload=depth_payload) + + await ws.send(subscribe_trade_request) + await ws.send(subscribe_orderbook_request) + + self.logger().info(f"Subscribed to public order book and trade channels of {trading_pair}...") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred subscribing to order book trading and delta streams...", + exc_info=True + ) + raise + + async def _process_ws_messages(self, ws: WSAssistant): + async for ws_response in ws.iter_messages(): + data = ws_response.data + if data.get("msg") == "Success": + continue + event_type = data.get("topic") + if event_type == CONSTANTS.SNAPSHOT_EVENT_TYPE: + self._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE].put_nowait(data) + elif event_type == CONSTANTS.TRADE_EVENT_TYPE: + self._message_queue[CONSTANTS.TRADE_EVENT_TYPE].put_nowait(data) + + async def _process_ob_snapshot(self, snapshot_queue: asyncio.Queue): + message_queue = self._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] + while True: + try: + json_msg = await message_queue.get() + trading_pair = await self._connector.trading_pair_associated_to_exchange_symbol( + symbol=json_msg["symbol"]) + order_book_message: OrderBookMessage = HashkeyOrderBook.snapshot_message_from_exchange_websocket( + json_msg["data"][0], json_msg["data"][0], {"trading_pair": trading_pair}) + snapshot_queue.put_nowait(order_book_message) + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error when processing public order book updates from exchange") + raise + + async def _take_full_order_book_snapshot(self, trading_pairs: List[str], snapshot_queue: asyncio.Queue): + for trading_pair in trading_pairs: + try: + snapshot: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair=trading_pair) + snapshot_timestamp: float = float(snapshot["t"]) * 1e-3 + snapshot_msg: OrderBookMessage = HashkeyOrderBook.snapshot_message_from_exchange_rest( + snapshot, + snapshot_timestamp, + metadata={"trading_pair": trading_pair} + ) + snapshot_queue.put_nowait(snapshot_msg) + self.logger().debug(f"Saved order book snapshot for {trading_pair}") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error(f"Unexpected error fetching order book snapshot for {trading_pair}.", + exc_info=True) + await self._sleep(5.0) + + def _time(self): + return time.time() diff --git a/hummingbot/connector/exchange/hashkey/hashkey_api_user_stream_data_source.py b/hummingbot/connector/exchange/hashkey/hashkey_api_user_stream_data_source.py new file mode 100644 index 0000000000..9107cad8d3 --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_api_user_stream_data_source.py @@ -0,0 +1,142 @@ +import asyncio +import time +from typing import TYPE_CHECKING, Any, List, Optional + +from hummingbot.connector.exchange.hashkey import hashkey_constants as CONSTANTS +from hummingbot.connector.exchange.hashkey.hashkey_auth import HashkeyAuth +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.logger import HummingbotLogger + +if TYPE_CHECKING: + from hummingbot.connector.exchange.hashkey.hashkey_exchange import HashkeyExchange + + +class HashkeyAPIUserStreamDataSource(UserStreamTrackerDataSource): + + LISTEN_KEY_KEEP_ALIVE_INTERVAL = 1800 # Recommended to Ping/Update listen key to keep connection alive + HEARTBEAT_TIME_INTERVAL = 30.0 + + _logger: Optional[HummingbotLogger] = None + + def __init__(self, + auth: HashkeyAuth, + trading_pairs: List[str], + connector: "HashkeyExchange", + api_factory: WebAssistantsFactory, + domain: str = CONSTANTS.DEFAULT_DOMAIN): + super().__init__() + self._auth: HashkeyAuth = auth + self._current_listen_key = None + self._domain = domain + self._api_factory = api_factory + self._connector = connector + + self._listen_key_initialized_event: asyncio.Event = asyncio.Event() + self._last_listen_key_ping_ts = 0 + + async def _connected_websocket_assistant(self) -> WSAssistant: + """ + Creates an instance of WSAssistant connected to the exchange + """ + self._manage_listen_key_task = safe_ensure_future(self._manage_listen_key_task_loop()) + await self._listen_key_initialized_event.wait() + + ws: WSAssistant = await self._get_ws_assistant() + url = CONSTANTS.WSS_PRIVATE_URL[self._domain].format(listenKey=self._current_listen_key) + await ws.connect(ws_url=url, ping_timeout=CONSTANTS.WS_HEARTBEAT_TIME_INTERVAL) + return ws + + async def _subscribe_channels(self, websocket_assistant: WSAssistant): + """ + Subscribes to the trade events and diff orders events through the provided websocket connection. + + Hashkey does not require any channel subscription. + + :param websocket_assistant: the websocket assistant used to connect to the exchange + """ + pass + + async def _get_listen_key(self): + try: + data = await self._connector._api_request( + method=RESTMethod.POST, + path_url=CONSTANTS.USER_STREAM_PATH_URL, + is_auth_required=True, + ) + except asyncio.CancelledError: + raise + except Exception as exception: + raise IOError(f"Error fetching user stream listen key. Error: {exception}") + + return data["listenKey"] + + async def _ping_listen_key(self) -> bool: + try: + data = await self._connector._api_request( + method=RESTMethod.PUT, + path_url=CONSTANTS.USER_STREAM_PATH_URL, + params={"listenKey": self._current_listen_key}, + return_err=True, + ) + if "code" in data: + self.logger().warning(f"Failed to refresh the listen key {self._current_listen_key}: {data}") + return False + + except asyncio.CancelledError: + raise + except Exception as exception: + self.logger().warning(f"Failed to refresh the listen key {self._current_listen_key}: {exception}") + return False + + return True + + async def _manage_listen_key_task_loop(self): + try: + while True: + now = int(time.time()) + if self._current_listen_key is None: + self._current_listen_key = await self._get_listen_key() + self.logger().info(f"Successfully obtained listen key {self._current_listen_key}") + self._listen_key_initialized_event.set() + self._last_listen_key_ping_ts = int(time.time()) + + if now - self._last_listen_key_ping_ts >= self.LISTEN_KEY_KEEP_ALIVE_INTERVAL: + success: bool = await self._ping_listen_key() + if not success: + self.logger().error("Error occurred renewing listen key ...") + break + else: + self.logger().info(f"Refreshed listen key {self._current_listen_key}.") + self._last_listen_key_ping_ts = int(time.time()) + else: + await self._sleep(self.LISTEN_KEY_KEEP_ALIVE_INTERVAL) + finally: + self._current_listen_key = None + self._listen_key_initialized_event.clear() + + async def _process_event_message(self, event_message: Any, queue: asyncio.Queue): + if event_message == "ping" and self._pong_response_event: + websocket_assistant = await self._get_ws_assistant() + pong_request = WSJSONRequest(payload={"pong": event_message["ping"]}) + await websocket_assistant.send(request=pong_request) + else: + await super()._process_event_message(event_message=event_message, queue=queue) + + async def _get_ws_assistant(self) -> WSAssistant: + if self._ws_assistant is None: + self._ws_assistant = await self._api_factory.get_ws_assistant() + return self._ws_assistant + + async def _on_user_stream_interruption(self, websocket_assistant: Optional[WSAssistant]): + await super()._on_user_stream_interruption(websocket_assistant=websocket_assistant) + self._manage_listen_key_task and self._manage_listen_key_task.cancel() + self._current_listen_key = None + self._listen_key_initialized_event.clear() + await self._sleep(5) + + def _time(self): + return time.time() diff --git a/hummingbot/connector/exchange/hashkey/hashkey_auth.py b/hummingbot/connector/exchange/hashkey/hashkey_auth.py new file mode 100644 index 0000000000..5fb90171cb --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_auth.py @@ -0,0 +1,79 @@ +import hashlib +import hmac +import time +from collections import OrderedDict +from typing import Any, Dict, Optional +from urllib.parse import urlencode + +import hummingbot.connector.exchange.hashkey.hashkey_constants as CONSTANTS +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.connections.data_types import RESTRequest, WSRequest + + +class HashkeyAuth(AuthBase): + + def __init__(self, api_key: str, secret_key: str, time_provider: TimeSynchronizer): + self.api_key = api_key + self.secret_key = secret_key + self.time_provider = time_provider + + @staticmethod + def keysort(dictionary: Dict[str, str]) -> Dict[str, str]: + return OrderedDict(sorted(dictionary.items(), key=lambda t: t[0])) + + async def rest_authenticate(self, request: RESTRequest) -> RESTRequest: + """ + Adds the server time and the signature to the request, required for authenticated interactions. It also adds + the required parameter in the request header. + :param request: the request to be configured for authenticated interaction + """ + request.params = self.add_auth_to_params(params=request.params) + headers = { + "X-HK-APIKEY": self.api_key, + "INPUT-SOURCE": CONSTANTS.HBOT_BROKER_ID, + } + if request.headers is not None: + headers.update(request.headers) + request.headers = headers + return request + + async def ws_authenticate(self, request: WSRequest) -> WSRequest: + """ + This method is intended to configure a websocket request to be authenticated. Hashkey does not use this + functionality + """ + return request # pass-through + + def add_auth_to_params(self, + params: Optional[Dict[str, Any]]): + timestamp = int(self.time_provider.time() * 1e3) + request_params = params or {} + request_params["timestamp"] = timestamp + request_params = self.keysort(request_params) + signature = self._generate_signature(params=request_params) + request_params["signature"] = signature + return request_params + + def _generate_signature(self, params: Dict[str, Any]) -> str: + encoded_params_str = urlencode(params) + digest = hmac.new(self.secret_key.encode("utf8"), encoded_params_str.encode("utf8"), hashlib.sha256).hexdigest() + return digest + + def generate_ws_authentication_message(self): + """ + Generates the authentication message to start receiving messages from + the 3 private ws channels + """ + expires = int((self.time_provider.time() + 10) * 1e3) + _val = f'GET/realtime{expires}' + signature = hmac.new(self.secret_key.encode("utf8"), + _val.encode("utf8"), hashlib.sha256).hexdigest() + auth_message = { + "op": "auth", + "args": [self.api_key, expires, signature] + } + return auth_message + + def _time(self): + return time.time() diff --git a/hummingbot/connector/exchange/hashkey/hashkey_constants.py b/hummingbot/connector/exchange/hashkey/hashkey_constants.py new file mode 100644 index 0000000000..64d1f375a3 --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_constants.py @@ -0,0 +1,123 @@ +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit +from hummingbot.core.data_type.in_flight_order import OrderState + +DEFAULT_DOMAIN = "hashkey_global" + +HBOT_ORDER_ID_PREFIX = "HASHKEY-" +MAX_ORDER_ID_LEN = 32 +HBOT_BROKER_ID = "10000800001" + +SIDE_BUY = "BUY" +SIDE_SELL = "SELL" + +TIME_IN_FORCE_GTC = "GTC" +# Base URL +REST_URLS = {"hashkey_global": "https://api-glb.hashkey.com", + "hashkey_global_testnet": "https://api.sim.bmuxdc.com"} + +WSS_PUBLIC_URL = {"hashkey_global": "wss://stream-glb.hashkey.com/quote/ws/v1", + "hashkey_global_testnet": "wss://stream.sim.bmuxdc.com/quote/ws/v1"} + +WSS_PRIVATE_URL = {"hashkey_global": "wss://stream-glb.hashkey.com/api/v1/ws/{listenKey}", + "hashkey_global_testnet": "wss://stream.sim.bmuxdc.com/api/v1/ws/{listenKey}"} + +# Websocket event types +TRADE_EVENT_TYPE = "trade" +SNAPSHOT_EVENT_TYPE = "depth" + +# Public API endpoints +LAST_TRADED_PRICE_PATH = "/quote/v1/ticker/price" +EXCHANGE_INFO_PATH_URL = "/api/v1/exchangeInfo" +SNAPSHOT_PATH_URL = "/quote/v1/depth" +SERVER_TIME_PATH_URL = "/api/v1/time" + +# Private API endpoints +ACCOUNTS_PATH_URL = "/api/v1/account" +MY_TRADES_PATH_URL = "/api/v1/account/trades" +ORDER_PATH_URL = "/api/v1/spot/order" +MARKET_ORDER_PATH_URL = "/api/v1.1/spot/order" +USER_STREAM_PATH_URL = "/api/v1/userDataStream" + +# Order States +ORDER_STATE = { + "PENDING": OrderState.PENDING_CREATE, + "NEW": OrderState.OPEN, + "PARTIALLY_FILLED": OrderState.PARTIALLY_FILLED, + "FILLED": OrderState.FILLED, + "PENDING_CANCEL": OrderState.PENDING_CANCEL, + "CANCELED": OrderState.CANCELED, + "REJECTED": OrderState.FAILED, + "PARTIALLY_CANCELED": OrderState.CANCELED, +} + +WS_HEARTBEAT_TIME_INTERVAL = 30 + +# Rate Limit Type +REQUEST_GET = "GET" +REQUEST_GET_BURST = "GET_BURST" +REQUEST_GET_MIXED = "GET_MIXED" +REQUEST_POST = "POST" +REQUEST_POST_BURST = "POST_BURST" +REQUEST_POST_MIXED = "POST_MIXED" +REQUEST_PUT = "PUT" +REQUEST_PUT_BURST = "PUT_BURST" +REQUEST_PUT_MIXED = "PUT_MIXED" + +# Rate Limit Max request + +MAX_REQUEST_GET = 6000 +MAX_REQUEST_GET_BURST = 70 +MAX_REQUEST_GET_MIXED = 400 +MAX_REQUEST_POST = 2400 +MAX_REQUEST_POST_BURST = 50 +MAX_REQUEST_POST_MIXED = 270 +MAX_REQUEST_PUT = 2400 +MAX_REQUEST_PUT_BURST = 50 +MAX_REQUEST_PUT_MIXED = 270 + +# Rate Limit time intervals +TWO_MINUTES = 120 +ONE_SECOND = 1 +SIX_SECONDS = 6 +ONE_DAY = 86400 + +RATE_LIMITS = { + # General + RateLimit(limit_id=REQUEST_GET, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES), + RateLimit(limit_id=REQUEST_GET_BURST, limit=MAX_REQUEST_GET_BURST, time_interval=ONE_SECOND), + RateLimit(limit_id=REQUEST_GET_MIXED, limit=MAX_REQUEST_GET_MIXED, time_interval=SIX_SECONDS), + RateLimit(limit_id=REQUEST_POST, limit=MAX_REQUEST_POST, time_interval=TWO_MINUTES), + RateLimit(limit_id=REQUEST_POST_BURST, limit=MAX_REQUEST_POST_BURST, time_interval=ONE_SECOND), + RateLimit(limit_id=REQUEST_POST_MIXED, limit=MAX_REQUEST_POST_MIXED, time_interval=SIX_SECONDS), + # Linked limits + RateLimit(limit_id=LAST_TRADED_PRICE_PATH, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), + LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), + RateLimit(limit_id=EXCHANGE_INFO_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), + LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), + RateLimit(limit_id=SNAPSHOT_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), + LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), + RateLimit(limit_id=SERVER_TIME_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_GET, 1), LinkedLimitWeightPair(REQUEST_GET_BURST, 1), + LinkedLimitWeightPair(REQUEST_GET_MIXED, 1)]), + RateLimit(limit_id=ORDER_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), + LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), + RateLimit(limit_id=MARKET_ORDER_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), + LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), + RateLimit(limit_id=ACCOUNTS_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), + LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), + RateLimit(limit_id=MY_TRADES_PATH_URL, limit=MAX_REQUEST_GET, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), + LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), + RateLimit(limit_id=USER_STREAM_PATH_URL, limit=MAX_REQUEST_POST, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_POST, 1), LinkedLimitWeightPair(REQUEST_POST_BURST, 1), + LinkedLimitWeightPair(REQUEST_POST_MIXED, 1)]), + RateLimit(limit_id=USER_STREAM_PATH_URL, limit=MAX_REQUEST_PUT, time_interval=TWO_MINUTES, + linked_limits=[LinkedLimitWeightPair(REQUEST_PUT, 1), LinkedLimitWeightPair(REQUEST_PUT_BURST, 1), + LinkedLimitWeightPair(REQUEST_PUT_MIXED, 1)]), +} diff --git a/hummingbot/connector/exchange/hashkey/hashkey_exchange.py b/hummingbot/connector/exchange/hashkey/hashkey_exchange.py new file mode 100644 index 0000000000..459a8c8679 --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_exchange.py @@ -0,0 +1,589 @@ +import asyncio +from decimal import Decimal +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple + +from bidict import bidict + +import hummingbot.connector.exchange.hashkey.hashkey_constants as CONSTANTS +import hummingbot.connector.exchange.hashkey.hashkey_utils as hashkey_utils +import hummingbot.connector.exchange.hashkey.hashkey_web_utils as web_utils +from hummingbot.connector.exchange.hashkey.hashkey_api_order_book_data_source import HashkeyAPIOrderBookDataSource +from hummingbot.connector.exchange.hashkey.hashkey_api_user_stream_data_source import HashkeyAPIUserStreamDataSource +from hummingbot.connector.exchange.hashkey.hashkey_auth import HashkeyAuth +from hummingbot.connector.exchange_py_base import ExchangePyBase +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair +from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderUpdate, TradeUpdate +from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource +from hummingbot.core.data_type.trade_fee import TokenAmount, TradeFeeBase +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.utils.estimate_fee import build_trade_fee +from hummingbot.core.web_assistant.connections.data_types import RESTMethod +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + +if TYPE_CHECKING: + from hummingbot.client.config.config_helpers import ClientConfigAdapter + +s_logger = None +s_decimal_NaN = Decimal("nan") + + +class HashkeyExchange(ExchangePyBase): + web_utils = web_utils + + def __init__(self, + client_config_map: "ClientConfigAdapter", + hashkey_api_key: str, + hashkey_api_secret: str, + trading_pairs: Optional[List[str]] = None, + trading_required: bool = True, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + ): + self.api_key = hashkey_api_key + self.secret_key = hashkey_api_secret + self._domain = domain + self._trading_required = trading_required + self._trading_pairs = trading_pairs + self._last_trades_poll_hashkey_timestamp = 1.0 + super().__init__(client_config_map) + + @staticmethod + def hashkey_order_type(order_type: OrderType) -> str: + return order_type.name.upper() + + @staticmethod + def to_hb_order_type(hashkey_type: str) -> OrderType: + return OrderType[hashkey_type] + + @property + def authenticator(self): + return HashkeyAuth( + api_key=self.api_key, + secret_key=self.secret_key, + time_provider=self._time_synchronizer) + + @property + def name(self) -> str: + if self._domain == "hashkey_global": + return "hashkey" + else: + return self._domain + + @property + def rate_limits_rules(self): + return CONSTANTS.RATE_LIMITS + + @property + def domain(self): + return self._domain + + @property + def client_order_id_max_length(self): + return CONSTANTS.MAX_ORDER_ID_LEN + + @property + def client_order_id_prefix(self): + return CONSTANTS.HBOT_ORDER_ID_PREFIX + + @property + def trading_rules_request_path(self): + return CONSTANTS.EXCHANGE_INFO_PATH_URL + + @property + def trading_pairs_request_path(self): + return CONSTANTS.EXCHANGE_INFO_PATH_URL + + @property + def check_network_request_path(self): + return CONSTANTS.SERVER_TIME_PATH_URL + + @property + def trading_pairs(self): + return self._trading_pairs + + @property + def is_cancel_request_in_exchange_synchronous(self) -> bool: + return True + + @property + def is_trading_required(self) -> bool: + return self._trading_required + + def supported_order_types(self): + return [OrderType.MARKET, OrderType.LIMIT, OrderType.LIMIT_MAKER] + + def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception): + error_description = str(request_exception) + is_time_synchronizer_related = ("-1021" in error_description + and "Timestamp for the request" in error_description) + return is_time_synchronizer_related + + def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool: + # TODO: implement this method correctly for the connector + # The default implementation was added when the functionality to detect not found orders was introduced in the + # ExchangePyBase class. Also fix the unit test test_lost_order_removed_if_not_found_during_order_status_update + # when replacing the dummy implementation + return False + + def _is_order_not_found_during_cancelation_error(self, cancelation_exception: Exception) -> bool: + # TODO: implement this method correctly for the connector + # The default implementation was added when the functionality to detect not found orders was introduced in the + # ExchangePyBase class. Also fix the unit test test_cancel_order_not_found_in_the_exchange when replacing the + # dummy implementation + return False + + def _create_web_assistants_factory(self) -> WebAssistantsFactory: + return web_utils.build_api_factory( + throttler=self._throttler, + time_synchronizer=self._time_synchronizer, + domain=self._domain, + auth=self._auth) + + def _create_order_book_data_source(self) -> OrderBookTrackerDataSource: + return HashkeyAPIOrderBookDataSource( + trading_pairs=self._trading_pairs, + connector=self, + domain=self.domain, + api_factory=self._web_assistants_factory, + throttler=self._throttler, + time_synchronizer=self._time_synchronizer) + + def _create_user_stream_data_source(self) -> UserStreamTrackerDataSource: + return HashkeyAPIUserStreamDataSource( + auth=self._auth, + trading_pairs=self._trading_pairs, + connector=self, + api_factory=self._web_assistants_factory, + domain=self.domain, + ) + + def _get_fee(self, + base_currency: str, + quote_currency: str, + order_type: OrderType, + order_side: TradeType, + amount: Decimal, + price: Decimal = s_decimal_NaN, + is_maker: Optional[bool] = None) -> TradeFeeBase: + is_maker = order_type is OrderType.LIMIT_MAKER + trade_base_fee = build_trade_fee( + exchange=self.name, + is_maker=is_maker, + order_side=order_side, + order_type=order_type, + amount=amount, + price=price, + base_currency=base_currency, + quote_currency=quote_currency + ) + return trade_base_fee + + async def _place_order(self, + order_id: str, + trading_pair: str, + amount: Decimal, + trade_type: TradeType, + order_type: OrderType, + price: Decimal, + **kwargs) -> Tuple[str, float]: + amount_str = f"{amount:f}" + type_str = self.hashkey_order_type(order_type) + + side_str = CONSTANTS.SIDE_BUY if trade_type is TradeType.BUY else CONSTANTS.SIDE_SELL + symbol = await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + api_params = {"symbol": symbol, + "side": side_str, + "quantity": amount_str, + "type": type_str, + "recvWindow": 10000, + "newClientOrderId": order_id} + path_url = CONSTANTS.ORDER_PATH_URL + + if order_type != OrderType.MARKET: + api_params["price"] = f"{price:f}" + else: + path_url = CONSTANTS.MARKET_ORDER_PATH_URL + + if order_type == OrderType.LIMIT: + api_params["timeInForce"] = CONSTANTS.TIME_IN_FORCE_GTC + + order_result = await self._api_post( + path_url=path_url, + params=api_params, + is_auth_required=True, + trading_pair=trading_pair, + headers={"INPUT-SOURCE": CONSTANTS.HBOT_BROKER_ID}, + ) + + o_id = str(order_result["orderId"]) + transact_time = int(order_result["transactTime"]) * 1e-3 + return (o_id, transact_time) + + async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder): + api_params = {} + if tracked_order.exchange_order_id: + api_params["orderId"] = tracked_order.exchange_order_id + else: + api_params["clientOrderId"] = tracked_order.client_order_id + cancel_result = await self._api_delete( + path_url=CONSTANTS.ORDER_PATH_URL, + params=api_params, + is_auth_required=True) + + if isinstance(cancel_result, dict) and "clientOrderId" in cancel_result: + return True + return False + + async def _format_trading_rules(self, exchange_info_dict: Dict[str, Any]) -> List[TradingRule]: + """ + Example: + { + "timezone": "UTC", + "serverTime": "1703696385826", + "brokerFilters": [], + "symbols": [ + { + "symbol": "ETHUSD", + "symbolName": "ETHUSD", + "status": "TRADING", + "baseAsset": "ETH", + "baseAssetName": "ETH", + "baseAssetPrecision": "0.0001", + "quoteAsset": "USD", + "quoteAssetName": "USD", + "quotePrecision": "0.0000001", + "retailAllowed": true, + "piAllowed": true, + "corporateAllowed": true, + "omnibusAllowed": true, + "icebergAllowed": false, + "isAggregate": false, + "allowMargin": false, + "filters": [ + { + "minPrice": "0.01", + "maxPrice": "100000.00000000", + "tickSize": "0.01", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.005", + "maxQty": "53", + "stepSize": "0.0001", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "10", + "filterType": "MIN_NOTIONAL" + }, + { + "minAmount": "10", + "maxAmount": "10000000", + "minBuyPrice": "0", + "filterType": "TRADE_AMOUNT" + }, + { + "maxSellPrice": "0", + "buyPriceUpRate": "0.2", + "sellPriceDownRate": "0.2", + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.2", + "sellPriceDownRate": "0.2", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ] + } + ], + "options": [], + "contracts": [], + "coins": [ + { + "orgId": "9001", + "coinId": "BTC", + "coinName": "BTC", + "coinFullName": "Bitcoin", + "allowWithdraw": true, + "allowDeposit": true, + "chainTypes": [ + { + "chainType": "Bitcoin", + "withdrawFee": "0", + "minWithdrawQuantity": "0.0005", + "maxWithdrawQuantity": "0", + "minDepositQuantity": "0.0001", + "allowDeposit": true, + "allowWithdraw": true + } + ] + }, + { + "orgId": "9001", + "coinId": "ETH", + "coinName": "ETH", + "coinFullName": "Ethereum", + "allowWithdraw": true, + "allowDeposit": true, + "chainTypes": [ + { + "chainType": "ERC20", + "withdrawFee": "0", + "minWithdrawQuantity": "0", + "maxWithdrawQuantity": "0", + "minDepositQuantity": "0.0075", + "allowDeposit": true, + "allowWithdraw": true + } + ] + }, + { + "orgId": "9001", + "coinId": "USD", + "coinName": "USD", + "coinFullName": "USD", + "allowWithdraw": true, + "allowDeposit": true, + "chainTypes": [] + } + ] + } + """ + trading_pair_rules = exchange_info_dict.get("symbols", []) + retval = [] + for rule in trading_pair_rules: + try: + trading_pair = await self.trading_pair_associated_to_exchange_symbol(symbol=rule.get("symbol")) + + trading_filter_info = {item["filterType"]: item for item in rule.get("filters", [])} + + min_order_size = trading_filter_info.get("LOT_SIZE", {}).get("minQty") + min_price_increment = trading_filter_info.get("PRICE_FILTER", {}).get("minPrice") + min_base_amount_increment = rule.get("baseAssetPrecision") + min_notional_size = trading_filter_info.get("TRADE_AMOUNT", {}).get("minAmount") + + retval.append( + TradingRule(trading_pair, + min_order_size=Decimal(min_order_size), + min_price_increment=Decimal(min_price_increment), + min_base_amount_increment=Decimal(min_base_amount_increment), + min_notional_size=Decimal(min_notional_size))) + + except Exception: + self.logger().exception(f"Error parsing the trading pair rule {rule.get('symbol')}. Skipping.") + return retval + + async def _update_trading_fees(self): + """ + Update fees information from the exchange + """ + pass + + async def _user_stream_event_listener(self): + """ + This functions runs in background continuously processing the events received from the exchange by the user + stream data source. It keeps reading events from the queue until the task is interrupted. + The events received are balance updates, order updates and trade events. + """ + async for event_messages in self._iter_user_event_queue(): + if isinstance(event_messages, dict) and "ping" in event_messages: + continue + + for event_message in event_messages: + try: + event_type = event_message.get("e") + if event_type == "executionReport": + execution_type = event_message.get("X") + client_order_id = event_message.get("c") + tracked_order = self._order_tracker.fetch_order(client_order_id=client_order_id) + if tracked_order is not None: + if execution_type in ["PARTIALLY_FILLED", "FILLED"]: + fee = TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=tracked_order.trade_type, + flat_fees=[TokenAmount(amount=Decimal(event_message["n"]), token=event_message["N"])] + ) + trade_update = TradeUpdate( + trade_id=str(event_message["d"]), + client_order_id=client_order_id, + exchange_order_id=str(event_message["i"]), + trading_pair=tracked_order.trading_pair, + fee=fee, + fill_base_amount=Decimal(event_message["l"]), + fill_quote_amount=Decimal(event_message["l"]) * Decimal(event_message["L"]), + fill_price=Decimal(event_message["L"]), + fill_timestamp=int(event_message["E"]) * 1e-3, + ) + self._order_tracker.process_trade_update(trade_update) + + order_update = OrderUpdate( + trading_pair=tracked_order.trading_pair, + update_timestamp=int(event_message["E"]) * 1e-3, + new_state=CONSTANTS.ORDER_STATE[event_message["X"]], + client_order_id=client_order_id, + exchange_order_id=str(event_message["i"]), + ) + self._order_tracker.process_order_update(order_update=order_update) + + elif event_type == "outboundAccountInfo": + balances = event_message["B"] + for balance_entry in balances: + asset_name = balance_entry["a"] + free_balance = Decimal(balance_entry["f"]) + total_balance = Decimal(balance_entry["f"]) + Decimal(balance_entry["l"]) + self._account_available_balances[asset_name] = free_balance + self._account_balances[asset_name] = total_balance + + except asyncio.CancelledError: + raise + except Exception: + self.logger().error("Unexpected error in user stream listener loop.", exc_info=True) + await self._sleep(5.0) + + async def _all_trade_updates_for_order(self, order: InFlightOrder) -> List[TradeUpdate]: + trade_updates = [] + + if order.exchange_order_id is not None: + exchange_order_id = int(order.exchange_order_id) + trading_pair = await self.exchange_symbol_associated_to_pair(trading_pair=order.trading_pair) + fills_data = await self._api_get( + path_url=CONSTANTS.MY_TRADES_PATH_URL, + params={ + "clientOrderId": order.client_order_id, + }, + is_auth_required=True, + limit_id=CONSTANTS.MY_TRADES_PATH_URL) + if fills_data is not None: + for trade in fills_data: + exchange_order_id = str(trade["orderId"]) + if exchange_order_id != str(order.exchange_order_id): + continue + fee = TradeFeeBase.new_spot_fee( + fee_schema=self.trade_fee_schema(), + trade_type=order.trade_type, + percent_token=trade["commissionAsset"], + flat_fees=[TokenAmount(amount=Decimal(trade["commission"]), token=trade["commissionAsset"])] + ) + trade_update = TradeUpdate( + trade_id=str(trade["ticketId"]), + client_order_id=order.client_order_id, + exchange_order_id=exchange_order_id, + trading_pair=trading_pair, + fee=fee, + fill_base_amount=Decimal(trade["qty"]), + fill_quote_amount=Decimal(trade["price"]) * Decimal(trade["qty"]), + fill_price=Decimal(trade["price"]), + fill_timestamp=int(trade["time"]) * 1e-3, + ) + trade_updates.append(trade_update) + + return trade_updates + + async def _request_order_status(self, tracked_order: InFlightOrder) -> OrderUpdate: + updated_order_data = await self._api_get( + path_url=CONSTANTS.ORDER_PATH_URL, + params={ + "origClientOrderId": tracked_order.client_order_id}, + is_auth_required=True) + + new_state = CONSTANTS.ORDER_STATE[updated_order_data["status"]] + + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=str(updated_order_data["orderId"]), + trading_pair=tracked_order.trading_pair, + update_timestamp=int(updated_order_data["updateTime"]) * 1e-3, + new_state=new_state, + ) + + return order_update + + async def _update_balances(self): + local_asset_names = set(self._account_balances.keys()) + remote_asset_names = set() + + account_info = await self._api_request( + method=RESTMethod.GET, + path_url=CONSTANTS.ACCOUNTS_PATH_URL, + is_auth_required=True) + balances = account_info["balances"] + for balance_entry in balances: + asset_name = balance_entry["asset"] + free_balance = Decimal(balance_entry["free"]) + total_balance = Decimal(balance_entry["total"]) + self._account_available_balances[asset_name] = free_balance + self._account_balances[asset_name] = total_balance + remote_asset_names.add(asset_name) + + asset_names_to_remove = local_asset_names.difference(remote_asset_names) + for asset_name in asset_names_to_remove: + del self._account_available_balances[asset_name] + del self._account_balances[asset_name] + + def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: Dict[str, Any]): + mapping = bidict() + for symbol_data in filter(hashkey_utils.is_exchange_information_valid, exchange_info["symbols"]): + mapping[symbol_data["symbol"]] = combine_to_hb_trading_pair(base=symbol_data["baseAsset"], + quote=symbol_data["quoteAsset"]) + self._set_trading_pair_symbol_map(mapping) + + async def _get_last_traded_price(self, trading_pair: str) -> float: + params = { + "symbol": await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair), + } + resp_json = await self._api_request( + method=RESTMethod.GET, + path_url=CONSTANTS.LAST_TRADED_PRICE_PATH, + params=params, + ) + + return float(resp_json["price"]) + + async def _api_request(self, + path_url, + method: RESTMethod = RESTMethod.GET, + params: Optional[Dict[str, Any]] = None, + data: Optional[Dict[str, Any]] = None, + is_auth_required: bool = False, + return_err: bool = False, + limit_id: Optional[str] = None, + trading_pair: Optional[str] = None, + **kwargs) -> Dict[str, Any]: + last_exception = None + rest_assistant = await self._web_assistants_factory.get_rest_assistant() + url = web_utils.rest_url(path_url, domain=self.domain) + local_headers = { + "Content-Type": "application/x-www-form-urlencoded"} + for _ in range(2): + try: + request_result = await rest_assistant.execute_request( + url=url, + params=params, + data=data, + method=method, + is_auth_required=is_auth_required, + return_err=return_err, + headers=local_headers, + throttler_limit_id=limit_id if limit_id else path_url, + ) + return request_result + except IOError as request_exception: + last_exception = request_exception + if self._is_request_exception_related_to_time_synchronizer(request_exception=request_exception): + self._time_synchronizer.clear_time_offset_ms_samples() + await self._update_time_synchronizer() + else: + raise + + # Failed even after the last retry + raise last_exception diff --git a/hummingbot/connector/exchange/hashkey/hashkey_order_book.py b/hummingbot/connector/exchange/hashkey/hashkey_order_book.py new file mode 100644 index 0000000000..5b0b8486b1 --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_order_book.py @@ -0,0 +1,71 @@ +from typing import Dict, Optional + +from hummingbot.core.data_type.common import TradeType +from hummingbot.core.data_type.order_book import OrderBook +from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType + + +class HashkeyOrderBook(OrderBook): + @classmethod + def snapshot_message_from_exchange_websocket(cls, + msg: Dict[str, any], + timestamp: float, + metadata: Optional[Dict] = None) -> OrderBookMessage: + """ + Creates a snapshot message with the order book snapshot message + :param msg: the response from the exchange when requesting the order book snapshot + :param timestamp: the snapshot timestamp + :param metadata: a dictionary with extra information to add to the snapshot data + :return: a snapshot message with the snapshot information received from the exchange + """ + if metadata: + msg.update(metadata) + ts = msg["t"] + return OrderBookMessage(OrderBookMessageType.SNAPSHOT, { + "trading_pair": msg["trading_pair"], + "update_id": ts, + "bids": msg["b"], + "asks": msg["a"] + }, timestamp=timestamp) + + @classmethod + def snapshot_message_from_exchange_rest(cls, + msg: Dict[str, any], + timestamp: float, + metadata: Optional[Dict] = None) -> OrderBookMessage: + """ + Creates a snapshot message with the order book snapshot message + :param msg: the response from the exchange when requesting the order book snapshot + :param timestamp: the snapshot timestamp + :param metadata: a dictionary with extra information to add to the snapshot data + :return: a snapshot message with the snapshot information received from the exchange + """ + if metadata: + msg.update(metadata) + ts = msg["t"] + return OrderBookMessage(OrderBookMessageType.SNAPSHOT, { + "trading_pair": msg["trading_pair"], + "update_id": ts, + "bids": msg["b"], + "asks": msg["a"] + }, timestamp=timestamp) + + @classmethod + def trade_message_from_exchange(cls, msg: Dict[str, any], metadata: Optional[Dict] = None): + """ + Creates a trade message with the information from the trade event sent by the exchange + :param msg: the trade event details sent by the exchange + :param metadata: a dictionary with extra information to add to trade message + :return: a trade message with the details of the trade as provided by the exchange + """ + if metadata: + msg.update(metadata) + ts = msg["t"] + return OrderBookMessage(OrderBookMessageType.TRADE, { + "trading_pair": msg["trading_pair"], + "trade_type": float(TradeType.BUY.value) if msg["m"] else float(TradeType.SELL.value), + "trade_id": ts, + "update_id": ts, + "price": msg["p"], + "amount": msg["q"] + }, timestamp=ts * 1e-3) diff --git a/hummingbot/connector/exchange/hashkey/hashkey_utils.py b/hummingbot/connector/exchange/hashkey/hashkey_utils.py new file mode 100644 index 0000000000..a3ca7f4097 --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_utils.py @@ -0,0 +1,92 @@ +from decimal import Decimal +from typing import Any, Dict + +from pydantic import Field, SecretStr + +from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData +from hummingbot.core.data_type.trade_fee import TradeFeeSchema + +CENTRALIZED = True +EXAMPLE_PAIR = "BTC-USDT" +DEFAULT_FEES = TradeFeeSchema( + maker_percent_fee_decimal=Decimal("0.000"), + taker_percent_fee_decimal=Decimal("0.000"), +) + + +def is_exchange_information_valid(exchange_info: Dict[str, Any]) -> bool: + """ + Verifies if a trading pair is enabled to operate with based on its exchange information + :param exchange_info: the exchange information for a trading pair + :return: True if the trading pair is enabled, False otherwise + """ + return exchange_info.get("status") == "TRADING" + + +class HashkeyGlobalConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="hashkey", const=True, client_data=None) + hashkey_api_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Global API key", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + hashkey_api_secret: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Global API secret", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + + class Config: + title = "hashkey" + + +KEYS = HashkeyGlobalConfigMap.construct() + +OTHER_DOMAINS = ["hashkey_global_testnet"] +OTHER_DOMAINS_PARAMETER = { + "hashkey_global_testnet": "hashkey_global_testnet", +} +OTHER_DOMAINS_EXAMPLE_PAIR = { + "hashkey_global_testnet": "BTC-USDT", +} +OTHER_DOMAINS_DEFAULT_FEES = { + "hashkey_global_testnet": DEFAULT_FEES, +} + + +class HashkeyGlobalTestnetConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="hashkey_global_testnet", const=True, client_data=None) + hashkey_api_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Global API key", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + hashkey_api_secret: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Hashkey Global API secret", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + + class Config: + title = "hashkey_global_testnet" + + +OTHER_DOMAINS_KEYS = { + "hashkey_global_testnet": HashkeyGlobalTestnetConfigMap.construct(), +} diff --git a/hummingbot/connector/exchange/hashkey/hashkey_web_utils.py b/hummingbot/connector/exchange/hashkey/hashkey_web_utils.py new file mode 100644 index 0000000000..f4d8bdcc9d --- /dev/null +++ b/hummingbot/connector/exchange/hashkey/hashkey_web_utils.py @@ -0,0 +1,124 @@ +from typing import Any, Callable, Dict, Optional + +import hummingbot.connector.exchange.hashkey.hashkey_constants as CONSTANTS +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.connector.utils import TimeSynchronizerRESTPreProcessor +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + + +def rest_url(path_url: str, domain: str = CONSTANTS.DEFAULT_DOMAIN) -> str: + """ + Creates a full URL for provided public REST endpoint + :param path_url: a public REST endpoint + :param domain: the Hashkey domain to connect to ("mainnet" or "testnet"). The default value is "mainnet" + :return: the full URL to the endpoint + """ + return CONSTANTS.REST_URLS[domain] + path_url + + +def build_api_factory( + throttler: Optional[AsyncThrottler] = None, + time_synchronizer: Optional[TimeSynchronizer] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + time_provider: Optional[Callable] = None, + auth: Optional[AuthBase] = None, ) -> WebAssistantsFactory: + time_synchronizer = time_synchronizer or TimeSynchronizer() + time_provider = time_provider or (lambda: get_current_server_time( + throttler=throttler, + domain=domain, + )) + throttler = throttler or create_throttler() + api_factory = WebAssistantsFactory( + throttler=throttler, + auth=auth, + rest_pre_processors=[ + TimeSynchronizerRESTPreProcessor(synchronizer=time_synchronizer, time_provider=time_provider), + ]) + return api_factory + + +def build_api_factory_without_time_synchronizer_pre_processor(throttler: AsyncThrottler) -> WebAssistantsFactory: + api_factory = WebAssistantsFactory(throttler=throttler) + return api_factory + + +def create_throttler() -> AsyncThrottler: + return AsyncThrottler(CONSTANTS.RATE_LIMITS) + + +async def api_request(path: str, + api_factory: Optional[WebAssistantsFactory] = None, + throttler: Optional[AsyncThrottler] = None, + time_synchronizer: Optional[TimeSynchronizer] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + params: Optional[Dict[str, Any]] = None, + data: Optional[Dict[str, Any]] = None, + method: RESTMethod = RESTMethod.GET, + is_auth_required: bool = False, + return_err: bool = False, + limit_id: Optional[str] = None, + timeout: Optional[float] = None, + headers: Dict[str, Any] = {}): + throttler = throttler or create_throttler() + time_synchronizer = time_synchronizer or TimeSynchronizer() + + # If api_factory is not provided a default one is created + # The default instance has no authentication capabilities and all authenticated requests will fail + api_factory = api_factory or build_api_factory( + throttler=throttler, + time_synchronizer=time_synchronizer, + domain=domain, + ) + rest_assistant = await api_factory.get_rest_assistant() + + local_headers = { + "Content-Type": "application/x-www-form-urlencoded"} + local_headers.update(headers) + url = rest_url(path, domain=domain) + + request = RESTRequest( + method=method, + url=url, + params=params, + data=data, + headers=local_headers, + is_auth_required=is_auth_required, + throttler_limit_id=limit_id if limit_id else path + ) + + async with throttler.execute_task(limit_id=limit_id if limit_id else path): + response = await rest_assistant.call(request=request, timeout=timeout) + if response.status != 200: + if return_err: + error_response = await response.json() + return error_response + else: + error_response = await response.text() + if error_response is not None and "ret_code" in error_response and "ret_msg" in error_response: + raise IOError(f"The request to Hashkey failed. Error: {error_response}. Request: {request}") + else: + raise IOError(f"Error executing request {method.name} {path}. " + f"HTTP status is {response.status}. " + f"Error: {error_response}") + + return await response.json() + + +async def get_current_server_time( + throttler: Optional[AsyncThrottler] = None, + domain: str = CONSTANTS.DEFAULT_DOMAIN, +) -> float: + throttler = throttler or create_throttler() + api_factory = build_api_factory_without_time_synchronizer_pre_processor(throttler=throttler) + response = await api_request( + path=CONSTANTS.SERVER_TIME_PATH_URL, + api_factory=api_factory, + throttler=throttler, + domain=domain, + method=RESTMethod.GET) + server_time = response["serverTime"] + + return server_time diff --git a/hummingbot/connector/exchange/injective_v2/data_sources/injective_data_source.py b/hummingbot/connector/exchange/injective_v2/data_sources/injective_data_source.py index a077a67d8c..bd9ca49831 100644 --- a/hummingbot/connector/exchange/injective_v2/data_sources/injective_data_source.py +++ b/hummingbot/connector/exchange/injective_v2/data_sources/injective_data_source.py @@ -415,7 +415,7 @@ async def create_orders( try: result = await self._send_in_transaction(messages=order_creation_messages) - if result["rawLog"] != "[]" or result["txhash"] in [None, ""]: + if result["code"] != 0 or result["txhash"] in [None, ""]: raise ValueError(f"Error sending the order creation transaction ({result['rawLog']})") else: transaction_hash = result["txhash"] @@ -472,7 +472,7 @@ async def cancel_orders( try: result = await self._send_in_transaction(messages=[delegated_message]) - if result["rawLog"] != "[]": + if result["code"] != 0: raise ValueError(f"Error sending the order cancel transaction ({result['rawLog']})") else: cancel_transaction_hash = result.get("txhash", "") @@ -511,7 +511,7 @@ async def cancel_all_subaccount_orders( ) result = await self._send_in_transaction(messages=[delegated_message]) - if result["rawLog"] != "[]": + if result["code"] != 0: raise ValueError(f"Error sending the order cancel transaction ({result['rawLog']})") async def spot_trade_updates(self, market_ids: List[str], start_time: float) -> List[TradeUpdate]: @@ -1544,12 +1544,14 @@ def _create_trading_rules( try: min_price_tick_size = market.min_price_tick_size() min_quantity_tick_size = market.min_quantity_tick_size() + min_notional = market.min_notional() trading_rule = TradingRule( trading_pair=market.trading_pair(), min_order_size=min_quantity_tick_size, min_price_increment=min_price_tick_size, min_base_amount_increment=min_quantity_tick_size, min_quote_amount_increment=min_price_tick_size, + min_notional_size=min_notional ) trading_rules.append(trading_rule) except asyncio.CancelledError: diff --git a/hummingbot/connector/exchange/injective_v2/injective_market.py b/hummingbot/connector/exchange/injective_v2/injective_market.py index 2cb74b3b9c..1cec1d6c8b 100644 --- a/hummingbot/connector/exchange/injective_v2/injective_market.py +++ b/hummingbot/connector/exchange/injective_v2/injective_market.py @@ -74,6 +74,9 @@ def maker_fee_rate(self) -> Decimal: def taker_fee_rate(self) -> Decimal: return self.native_market.taker_fee_rate + def min_notional(self) -> Decimal: + return self.quote_token.value_from_chain_format(chain_value=self.native_market.min_notional) + @dataclass(frozen=True) class InjectiveDerivativeMarket: @@ -124,3 +127,6 @@ def oracle_quote(self) -> str: def oracle_type(self) -> str: return self.native_market.oracle_type + + def min_notional(self) -> Decimal: + return self.quote_token.value_from_chain_format(chain_value=self.native_market.min_notional) diff --git a/hummingbot/connector/exchange/injective_v2/injective_v2_utils.py b/hummingbot/connector/exchange/injective_v2/injective_v2_utils.py index a882d3f87f..73438a53d3 100644 --- a/hummingbot/connector/exchange/injective_v2/injective_v2_utils.py +++ b/hummingbot/connector/exchange/injective_v2/injective_v2_utils.py @@ -1,3 +1,4 @@ +import re from abc import ABC, abstractmethod from decimal import Decimal from typing import TYPE_CHECKING, Dict, List, Optional, Union @@ -12,6 +13,7 @@ TransactionFeeCalculator, ) from pyinjective.core.network import Network +from pyinjective.wallet import PrivateKey from hummingbot.client.config.config_data_types import BaseClientModel, BaseConnectorConfigMap, ClientFieldData from hummingbot.connector.exchange.injective_v2 import injective_constants as CONSTANTS @@ -239,6 +241,7 @@ def network(self) -> Network: chain_stream_endpoint=self.chain_stream_endpoint, chain_id=self.chain_id, env=self.env, + official_tokens_list_url=Network.mainnet().official_tokens_list_url, ) def use_secure_connection(self) -> bool: @@ -254,6 +257,9 @@ def rate_limits(self) -> List[RateLimit]: InjectiveCustomNetworkMode.Config.title: InjectiveCustomNetworkMode, } +# Captures a 12 or 24-word BIP39 seed phrase +RE_SEED_PHRASE = re.compile(r"^(?:[a-z]+(?: [a-z]+){11}|[a-z]+(?: [a-z]+){23})$") + class InjectiveAccountMode(BaseClientModel, ABC): @@ -272,7 +278,7 @@ class InjectiveDelegatedAccountMode(InjectiveAccountMode): private_key: SecretStr = Field( default=..., client_data=ClientFieldData( - prompt=lambda cm: "Enter your Injective trading account private key", + prompt=lambda cm: "Enter your Injective trading account private key or seed phrase", is_secure=True, is_connect_key=True, prompt_on_new=True, @@ -300,6 +306,16 @@ class InjectiveDelegatedAccountMode(InjectiveAccountMode): ), ) + @validator("private_key", pre=True) + def validate_network(cls, v: str): + # Both seed phrase and hex private keys supported + if isinstance(v, str): + v = v.strip() + if RE_SEED_PHRASE.match(v): + private_key = PrivateKey.from_mnemonic(v) + return private_key.to_hex() + return v + class Config: title = "delegate_account" diff --git a/hummingbot/connector/exchange/kucoin/kucoin_constants.py b/hummingbot/connector/exchange/kucoin/kucoin_constants.py index 942fed3a09..a72366e810 100644 --- a/hummingbot/connector/exchange/kucoin/kucoin_constants.py +++ b/hummingbot/connector/exchange/kucoin/kucoin_constants.py @@ -74,3 +74,5 @@ RET_MSG_ORDER_NOT_EXIST_OR_NOT_ALLOW_TO_CANCEL = "order_not_exist_or_not_allow_to_cancel" RET_CODE_RESOURCE_NOT_FOUND = 404 RET_MSG_RESOURCE_NOT_FOUND = "Not Found" +RET_CODE_AUTH_TIMESTAMP_ERROR = "400002" +RET_MSG_AUTH_TIMESTAMP_ERROR = "KC-API-TIMESTAMP" diff --git a/hummingbot/connector/exchange/kucoin/kucoin_exchange.py b/hummingbot/connector/exchange/kucoin/kucoin_exchange.py index 7286fcbc6f..8d3dae12f6 100644 --- a/hummingbot/connector/exchange/kucoin/kucoin_exchange.py +++ b/hummingbot/connector/exchange/kucoin/kucoin_exchange.py @@ -117,8 +117,8 @@ async def get_all_pairs_prices(self) -> List[Dict[str, str]]: return pairs_prices def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception): - # API documentation does not clarify the error message for timestamp related problems - return False + error_description = str(request_exception) + return CONSTANTS.RET_CODE_AUTH_TIMESTAMP_ERROR in error_description and CONSTANTS.RET_MSG_AUTH_TIMESTAMP_ERROR in error_description def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool: return (str(CONSTANTS.RET_CODE_RESOURCE_NOT_FOUND) in str(status_update_exception) and diff --git a/hummingbot/connector/exchange/mexc/mexc_constants.py b/hummingbot/connector/exchange/mexc/mexc_constants.py index 6cbadc9260..5a50d5dc7c 100644 --- a/hummingbot/connector/exchange/mexc/mexc_constants.py +++ b/hummingbot/connector/exchange/mexc/mexc_constants.py @@ -112,3 +112,5 @@ ORDER_NOT_EXIST_MESSAGE = "Order does not exist" UNKNOWN_ORDER_ERROR_CODE = -2011 UNKNOWN_ORDER_MESSAGE = "Unknown order sent" +TIMESTAMP_RELATED_ERROR_CODE = 700003 +TIMESTAMP_RELATED_ERROR_MESSAGE = "Timestamp for this request is outside of the recvWindow" diff --git a/hummingbot/connector/exchange/mexc/mexc_exchange.py b/hummingbot/connector/exchange/mexc/mexc_exchange.py index f5360a0c19..15d0c4ec3d 100755 --- a/hummingbot/connector/exchange/mexc/mexc_exchange.py +++ b/hummingbot/connector/exchange/mexc/mexc_exchange.py @@ -117,10 +117,9 @@ async def get_all_pairs_prices(self) -> List[Dict[str, str]]: return pairs_prices def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception): - error_description = str(request_exception) - is_time_synchronizer_related = ("-1021" in error_description - and "Timestamp for this request" in error_description) - return is_time_synchronizer_related + return str(CONSTANTS.TIMESTAMP_RELATED_ERROR_CODE) in str( + request_exception + ) and CONSTANTS.TIMESTAMP_RELATED_ERROR_MESSAGE in str(request_exception) def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool: return str(CONSTANTS.ORDER_NOT_EXIST_ERROR_CODE) in str( @@ -189,13 +188,13 @@ async def _place_order(self, price_str = f"{price:f}" api_params["price"] = price_str else: - if trade_type.name.lower() == 'buy': + if trade_type == TradeType.BUY: if price.is_nan(): price = self.get_price_for_volume( trading_pair, True, amount - ) + ).result_price del api_params['quantity'] api_params.update({ "quoteOrderQty": f"{price * amount:f}", diff --git a/hummingbot/connector/exchange/mexc/mexc_utils.py b/hummingbot/connector/exchange/mexc/mexc_utils.py index 0acf5520f6..5852362054 100644 --- a/hummingbot/connector/exchange/mexc/mexc_utils.py +++ b/hummingbot/connector/exchange/mexc/mexc_utils.py @@ -22,7 +22,7 @@ def is_exchange_information_valid(exchange_info: Dict[str, Any]) -> bool: :param exchange_info: the exchange information for a trading pair :return: True if the trading pair is enabled, False otherwise """ - return exchange_info.get("status", None) == "ENABLED" and "SPOT" in exchange_info.get("permissions", list()) \ + return exchange_info.get("status", None) == "1" and "SPOT" in exchange_info.get("permissions", list()) \ and exchange_info.get("isSpotTradingAllowed", True) is True diff --git a/hummingbot/connector/exchange/ndax/ndax_exchange.py b/hummingbot/connector/exchange/ndax/ndax_exchange.py index 8444081f3a..d58ac812dd 100644 --- a/hummingbot/connector/exchange/ndax/ndax_exchange.py +++ b/hummingbot/connector/exchange/ndax/ndax_exchange.py @@ -615,7 +615,7 @@ async def cancel_all(self, timeout_sec: float) -> List[CancellationResult]: :returns List of CancellationResult which indicates whether each order is successfully cancelled. """ - # Note: NDAX's CancelOrder endpoint simply indicates if the cancel requests has been succesfully received. + # Note: NDAX's CancelOrder endpoint simply indicates if the cancel requests has been successfully received. cancellation_results = [] tracked_orders = self.in_flight_orders try: diff --git a/hummingbot/connector/exchange/okx/okx_constants.py b/hummingbot/connector/exchange/okx/okx_constants.py index e56ddd27e8..e5ad36adc2 100644 --- a/hummingbot/connector/exchange/okx/okx_constants.py +++ b/hummingbot/connector/exchange/okx/okx_constants.py @@ -64,7 +64,7 @@ NO_LIMIT = sys.maxsize RATE_LIMITS = [ - RateLimit(WS_CONNECTION_LIMIT_ID, limit=1, time_interval=1), + RateLimit(WS_CONNECTION_LIMIT_ID, limit=3, time_interval=1), RateLimit(WS_REQUEST_LIMIT_ID, limit=100, time_interval=10), RateLimit(WS_SUBSCRIPTION_LIMIT_ID, limit=240, time_interval=60 * 60), RateLimit(WS_LOGIN_LIMIT_ID, limit=1, time_interval=15), @@ -72,9 +72,9 @@ RateLimit(limit_id=OKX_INSTRUMENTS_PATH, limit=20, time_interval=2), RateLimit(limit_id=OKX_TICKER_PATH, limit=20, time_interval=2), RateLimit(limit_id=OKX_ORDER_BOOK_PATH, limit=20, time_interval=2), - RateLimit(limit_id=OKX_PLACE_ORDER_PATH, limit=60, time_interval=2), - RateLimit(limit_id=OKX_ORDER_DETAILS_PATH, limit=60, time_interval=2), - RateLimit(limit_id=OKX_ORDER_CANCEL_PATH, limit=60, time_interval=2), + RateLimit(limit_id=OKX_PLACE_ORDER_PATH, limit=20, time_interval=2), + RateLimit(limit_id=OKX_ORDER_DETAILS_PATH, limit=20, time_interval=2), + RateLimit(limit_id=OKX_ORDER_CANCEL_PATH, limit=20, time_interval=2), RateLimit(limit_id=OKX_BATCH_ORDER_CANCEL_PATH, limit=300, time_interval=2), RateLimit(limit_id=OKX_BALANCE_PATH, limit=10, time_interval=2), RateLimit(limit_id=OKX_TRADE_FILLS_PATH, limit=60, time_interval=2), diff --git a/hummingbot/connector/exchange/okx/okx_exchange.py b/hummingbot/connector/exchange/okx/okx_exchange.py index effccf6eb4..9a81b5eedd 100644 --- a/hummingbot/connector/exchange/okx/okx_exchange.py +++ b/hummingbot/connector/exchange/okx/okx_exchange.py @@ -194,7 +194,10 @@ async def _place_order(self, "sz": str(amount), } if order_type.is_limit_type(): - data["px"] = str(price) + data["px"] = f"{price:f}" + else: + # Specify that the the order quantity for market orders is denominated in base currency + data["tgtCcy"] = "base_ccy" exchange_order_id = await self._api_request( path_url=CONSTANTS.OKX_PLACE_ORDER_PATH, @@ -276,6 +279,7 @@ def _update_balance_from_details(self, balance_details: Dict[str, Any]): async def _update_trading_rules(self): # This has to be reimplemented because the request requires an extra parameter + # TODO: Normalize the rest requests so they can be used standalone exchange_info = await self._api_get( path_url=self.trading_rules_request_path, params={"instType": "SPOT"}, @@ -383,11 +387,13 @@ async def _user_stream_event_listener(self): for data in stream_message.get("data", []): order_status = CONSTANTS.ORDER_STATE[data["state"]] client_order_id = data["clOrdId"] + trade_id = data["tradeId"] fillable_order = self._order_tracker.all_fillable_orders.get(client_order_id) updatable_order = self._order_tracker.all_updatable_orders.get(client_order_id) if (fillable_order is not None - and order_status in [OrderState.PARTIALLY_FILLED, OrderState.FILLED]): + and order_status in [OrderState.PARTIALLY_FILLED, OrderState.FILLED] + and trade_id): fee = TradeFeeBase.new_spot_fee( fee_schema=self.trade_fee_schema(), trade_type=fillable_order.trade_type, @@ -395,7 +401,7 @@ async def _user_stream_event_listener(self): flat_fees=[TokenAmount(amount=Decimal(data["fillFee"]), token=data["fillFeeCcy"])] ) trade_update = TradeUpdate( - trade_id=str(data["tradeId"]), + trade_id=str(trade_id), client_order_id=fillable_order.client_order_id, exchange_order_id=str(data["ordId"]), trading_pair=fillable_order.trading_pair, diff --git a/hummingbot/connector/exchange/xrpl/xrpl_api_order_book_data_source.py b/hummingbot/connector/exchange/xrpl/xrpl_api_order_book_data_source.py index 024c6d6077..3448f258e8 100644 --- a/hummingbot/connector/exchange/xrpl/xrpl_api_order_book_data_source.py +++ b/hummingbot/connector/exchange/xrpl/xrpl_api_order_book_data_source.py @@ -31,7 +31,7 @@ def __init__(self, trading_pairs: List[str], connector: "XrplExchange", api_fact self._trade_messages_queue_key = CONSTANTS.TRADE_EVENT_TYPE self._diff_messages_queue_key = CONSTANTS.DIFF_EVENT_TYPE self._snapshot_messages_queue_key = CONSTANTS.SNAPSHOT_EVENT_TYPE - self._xrpl_client = AsyncWebsocketClient(self._connector.node_url) + self._xrpl_client = self._connector.order_book_data_client self._open_client_lock = asyncio.Lock() async def get_last_traded_prices(self, trading_pairs: List[str], domain: Optional[str] = None) -> Dict[str, float]: @@ -78,22 +78,37 @@ async def _request_order_book_snapshot(self, trading_pair: str) -> Dict[str, Any return order_book - async def fetch_order_book_side(self, client: AsyncWebsocketClient, ledger_index, taker_gets, taker_pays, limit): - response = await client.request( - BookOffers( - ledger_index=ledger_index, - taker_gets=taker_gets, - taker_pays=taker_pays, - limit=limit, + async def fetch_order_book_side( + self, client: AsyncWebsocketClient, ledger_index, taker_gets, taker_pays, limit, try_count: int = 0 + ): + try: + response = await client.request( + BookOffers( + ledger_index=ledger_index, + taker_gets=taker_gets, + taker_pays=taker_pays, + limit=limit, + ) ) - ) - if response.status != "success": - error = response.to_dict().get("error", "") - error_message = response.to_dict().get("error_message", "") - exception_msg = f"Error fetching order book snapshot: {error} - {error_message}" - self.logger().error(exception_msg) - raise ValueError(exception_msg) - return response + if response.status != "success": + error = response.to_dict().get("error", "") + error_message = response.to_dict().get("error_message", "") + exception_msg = f"Error fetching order book snapshot: {error} - {error_message}" + self.logger().error(exception_msg) + raise ValueError(exception_msg) + return response + except (TimeoutError, asyncio.exceptions.TimeoutError) as e: + self.logger().debug( + f"Verify transaction timeout error, Attempt {try_count + 1}/{CONSTANTS.FETCH_ORDER_BOOK_MAX_RETRY}" + ) + if try_count < CONSTANTS.FETCH_ORDER_BOOK_MAX_RETRY: + await self._sleep(CONSTANTS.FETCH_ORDER_BOOK_RETRY_INTERVAL) + return await self.fetch_order_book_side( + client, ledger_index, taker_gets, taker_pays, limit, try_count + 1 + ) + else: + self.logger().error("Max retries reached. Fetching order book failed due to timeout.") + raise e async def listen_for_order_book_snapshots(self, ev_loop: asyncio.AbstractEventLoop, output: asyncio.Queue): """ @@ -108,12 +123,12 @@ async def listen_for_order_book_snapshots(self, ev_loop: asyncio.AbstractEventLo while True: try: await self._request_order_book_snapshots(output=output) - await self._sleep(2.0) + await self._sleep(CONSTANTS.REQUEST_ORDERBOOK_INTERVAL) except asyncio.CancelledError: raise except Exception: self.logger().exception("Unexpected error when processing public order book snapshots from exchange") - await self._sleep(2.0) + await self._sleep(CONSTANTS.REQUEST_ORDERBOOK_INTERVAL) async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: snapshot: Dict[str, Any] = await self._request_order_book_snapshot(trading_pair) diff --git a/hummingbot/connector/exchange/xrpl/xrpl_api_user_stream_data_source.py b/hummingbot/connector/exchange/xrpl/xrpl_api_user_stream_data_source.py index 0d41f76144..42fea73c20 100644 --- a/hummingbot/connector/exchange/xrpl/xrpl_api_user_stream_data_source.py +++ b/hummingbot/connector/exchange/xrpl/xrpl_api_user_stream_data_source.py @@ -22,7 +22,7 @@ def __init__(self, super().__init__() self._connector = connector self._auth = auth - self._xrpl_client = AsyncWebsocketClient(self._connector.node_url) + self._xrpl_client = self._connector.user_stream_client self._last_recv_time: float = 0 @property @@ -43,15 +43,23 @@ async def listen_for_user_stream(self, output: asyncio.Queue): :param output: the queue to use to store the received messages """ while True: + listener = None try: subscribe = Subscribe(accounts=[self._auth.get_account()]) async with self._xrpl_client as client: + # set up a listener task + listener = asyncio.create_task(self.on_message(client, output_queue=output)) + + # subscribe to the ledger await client.send(subscribe) - async for message in client: - self._last_recv_time = time.time() - await self._process_event_message(event_message=message, queue=output) + # sleep infinitely until the connection closes on us + while client.is_open(): + await asyncio.sleep(0) + + listener.cancel() + await listener except asyncio.CancelledError: self.logger().info("User stream listener task has been cancelled. Exiting...") raise @@ -59,11 +67,22 @@ async def listen_for_user_stream(self, output: asyncio.Queue): self.logger().warning(f"The websocket connection was closed ({connection_exception})") except TimeoutError: self.logger().warning( - "Timeout error occurred while listening to user stream. Retrying after 5 seconds...") + "Timeout error occurred while listening to user stream. Retrying...") except Exception: - self.logger().exception("Unexpected error while listening to user stream. Retrying after 5 seconds...") + self.logger().exception("Unexpected error while listening to user stream. Retrying...") finally: - await self._sleep(5.0) + if listener is not None: + listener.cancel() + try: + await listener + except asyncio.CancelledError: + pass # Swallow the cancellation error if it happens + await self._xrpl_client.close() + + async def on_message(self, client: AsyncWebsocketClient, output_queue: asyncio.Queue): + async for message in client: + self._last_recv_time = time.time() + await self._process_event_message(event_message=message, queue=output_queue) async def _process_event_message(self, event_message: Dict[str, Any], queue: asyncio.Queue): queue.put_nowait(event_message) diff --git a/hummingbot/connector/exchange/xrpl/xrpl_constants.py b/hummingbot/connector/exchange/xrpl/xrpl_constants.py index 1fbdce60f1..3d5f791203 100644 --- a/hummingbot/connector/exchange/xrpl/xrpl_constants.py +++ b/hummingbot/connector/exchange/xrpl/xrpl_constants.py @@ -13,8 +13,8 @@ MAX_ORDER_ID_LEN = 64 # Base URL -DEFAULT_JSON_RPC_URL = "https://s1.ripple.com:51234/" -DEFAULT_WSS_URL = "wss://s1.ripple.com/" +DEFAULT_JSON_RPC_URL = "https://xrplcluster.com/" +DEFAULT_WSS_URL = "wss://xrplcluster.com/" # Websocket channels TRADE_EVENT_TYPE = "trades" @@ -35,24 +35,34 @@ # Order Types XRPL_ORDER_TYPE = { - OrderType.LIMIT: 524288, - OrderType.LIMIT_MAKER: 589824, - OrderType.MARKET: 786432, + OrderType.LIMIT: 65536, + OrderType.LIMIT_MAKER: 65536, + OrderType.MARKET: 262144, } +XRPL_SELL_FLAG = 524288 + # Market Order Max Slippage -MARKET_ORDER_MAX_SLIPPAGE = Decimal("0.005") +MARKET_ORDER_MAX_SLIPPAGE = Decimal("0.02") # Order Side SIDE_BUY = 0 SIDE_SELL = 1 # Orderbook settings -ORDER_BOOK_DEPTH = 500 +ORDER_BOOK_DEPTH = 150 +FETCH_ORDER_BOOK_MAX_RETRY = 3 +FETCH_ORDER_BOOK_RETRY_INTERVAL = 1 # Ledger offset for getting order status: LEDGER_OFFSET = _LEDGER_OFFSET * 2 +# Timeout for pending order status check +PENDING_ORDER_STATUS_CHECK_TIMEOUT = 120 + +# Request Timeout +REQUEST_TIMEOUT = 30 + # Rate Limits # NOTE: We don't have rate limits for xrpl at the moment RAW_REQUESTS = "RAW_REQUESTS" @@ -65,6 +75,9 @@ PLACE_ORDER_MAX_RETRY = 3 PLACE_ORDER_RETRY_INTERVAL = 3 +# Transaction fee multiplier +FEE_MULTIPLIER = 2 + # Cancel All Timeout CANCEL_ALL_TIMEOUT = 60.0 @@ -72,6 +85,22 @@ CANCEL_MAX_RETRY = 3 CANCEL_RETRY_INTERVAL = 3 +# Verify transaction retry parameters +VERIFY_TRANSACTION_MAX_RETRY = 3 +VERIFY_TRANSACTION_RETRY_INTERVAL = 2 + +# Autofill transaction retry parameters +AUTOFILL_TRANSACTION_MAX_RETRY = 5 + +# Request retry interval +REQUEST_RETRY_INTERVAL = 2 + +# Request Orderbook Interval +REQUEST_ORDERBOOK_INTERVAL = 3 + +# Client refresh interval +CLIENT_REFRESH_INTERVAL = 60 + # Markets list MARKETS = { "XRP-USD": { diff --git a/hummingbot/connector/exchange/xrpl/xrpl_exchange.py b/hummingbot/connector/exchange/xrpl/xrpl_exchange.py index 43aa606c84..3e9bd0747b 100644 --- a/hummingbot/connector/exchange/xrpl/xrpl_exchange.py +++ b/hummingbot/connector/exchange/xrpl/xrpl_exchange.py @@ -1,24 +1,28 @@ import asyncio import math import time +from asyncio import Lock from decimal import ROUND_DOWN, Decimal from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union from bidict import bidict # XRPL Imports -from xrpl.asyncio.clients import AsyncWebsocketClient, Client -from xrpl.asyncio.transaction import autofill, sign, submit -from xrpl.asyncio.transaction.reliable_submission import _wait_for_final_transaction_outcome +from xrpl.asyncio.clients import AsyncWebsocketClient, Client, XRPLRequestFailureException +from xrpl.asyncio.transaction import sign +from xrpl.core.binarycodec import encode from xrpl.models import ( XRP, AccountInfo, + AccountLines, AccountObjects, AccountTx, IssuedCurrency, Memo, OfferCancel, OfferCreate, + Request, + SubmitOnly, Transaction, ) from xrpl.models.amounts import IssuedCurrencyAmount @@ -38,7 +42,13 @@ from hummingbot.connector.exchange.xrpl.xrpl_api_order_book_data_source import XRPLAPIOrderBookDataSource from hummingbot.connector.exchange.xrpl.xrpl_api_user_stream_data_source import XRPLAPIUserStreamDataSource from hummingbot.connector.exchange.xrpl.xrpl_auth import XRPLAuth -from hummingbot.connector.exchange.xrpl.xrpl_utils import XRPLMarket, convert_string_to_hex, get_token_from_changes +from hummingbot.connector.exchange.xrpl.xrpl_utils import ( + XRPLMarket, + _wait_for_final_transaction_outcome, + autofill, + convert_string_to_hex, + get_token_from_changes, +) from hummingbot.connector.exchange_py_base import ExchangePyBase from hummingbot.connector.trading_rule import TradingRule from hummingbot.connector.utils import get_new_client_order_id @@ -48,7 +58,7 @@ from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource from hummingbot.core.data_type.trade_fee import DeductedFromReturnsTradeFee, TradeFeeBase from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource -from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.core.utils.async_utils import safe_ensure_future, safe_gather from hummingbot.core.utils.tracking_nonce import NonceCreator from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory @@ -57,7 +67,7 @@ class XrplExchange(ExchangePyBase): - UPDATE_ORDER_STATUS_MIN_INTERVAL = 10.0 + LONG_POLL_INTERVAL = 60.0 web_utils = xrpl_web_utils @@ -67,6 +77,7 @@ def __init__( xrpl_secret_key: str, wss_node_url: str, wss_second_node_url: str, + wss_third_node_url: str, trading_pairs: Optional[List[str]] = None, trading_required: bool = True, custom_markets: Dict[str, XRPLMarket] = None, @@ -74,17 +85,22 @@ def __init__( self._xrpl_secret_key = xrpl_secret_key self._wss_node_url = wss_node_url self._wss_second_node_url = wss_second_node_url - self._xrpl_client = AsyncWebsocketClient(self._wss_node_url) - self._xrpl_place_order_client = AsyncWebsocketClient(self._wss_second_node_url) + self._wss_third_node_url = wss_third_node_url + # self._xrpl_place_order_client = AsyncWebsocketClient(self._wss_node_url) + self._xrpl_query_client = AsyncWebsocketClient(self._wss_second_node_url) + self._xrpl_order_book_data_client = AsyncWebsocketClient(self._wss_second_node_url) + self._xrpl_user_stream_client = AsyncWebsocketClient(self._wss_third_node_url) self._trading_required = trading_required self._trading_pairs = trading_pairs self._auth: XRPLAuth = self.authenticator self._trading_pair_symbol_map: Optional[Mapping[str, str]] = None self._trading_pair_fee_rules: Dict[str, Dict[str, Any]] = {} - self._xrpl_client_lock = asyncio.Lock() + self._xrpl_query_client_lock = asyncio.Lock() self._xrpl_place_order_client_lock = asyncio.Lock() + self._xrpl_fetch_trades_client_lock = asyncio.Lock() self._nonce_creator = NonceCreator.for_microseconds() self._custom_markets = custom_markets or {} + self._last_clients_refresh_time = 0 super().__init__(client_config_map) @@ -152,6 +168,18 @@ def node_url(self) -> str: def second_node_url(self) -> str: return self._wss_second_node_url + @property + def third_node_url(self) -> str: + return self._wss_third_node_url + + @property + def user_stream_client(self) -> AsyncWebsocketClient: + return self._xrpl_user_stream_client + + @property + def order_book_data_client(self) -> AsyncWebsocketClient: + return self._xrpl_order_book_data_client + @property def auth(self) -> XRPLAuth: return self._auth @@ -226,6 +254,25 @@ async def _place_order( await self._get_best_price(trading_pair, is_buy=True if trade_type is TradeType.BUY else False) ) + if order_type is OrderType.MARKET: + market = self.order_books.get(trading_pair) + + if market is None: + raise ValueError(f"Market {trading_pair} not found in markets list") + + get_price_with_enough_liquidity = market.get_price_for_volume( + is_buy=True if trade_type is TradeType.BUY else False, + volume=float(amount), # Make sure we have enough liquidity + ) + + price = Decimal(get_price_with_enough_liquidity.result_price) + + # Adding slippage to make sure we get the order filled and not cross our own offers + if trade_type is TradeType.SELL: + price *= Decimal("1") - CONSTANTS.MARKET_ORDER_MAX_SLIPPAGE + else: + price *= Decimal("1") + CONSTANTS.MARKET_ORDER_MAX_SLIPPAGE + base_currency, quote_currency = self.get_currencies_from_trading_pair(trading_pair) account = self._auth.get_account() trading_rule = self._trading_rules[trading_pair] @@ -236,14 +283,6 @@ async def _place_order( amount_in_base = Decimal(amount.quantize(amount_in_base_quantum, rounding=ROUND_DOWN)) amount_in_quote = Decimal((amount * price).quantize(amount_in_quote_quantum, rounding=ROUND_DOWN)) - if order_type is OrderType.MARKET: - # Increase price by MARKET_ORDER_MAX_SLIPPAGE if it is buy order - # Decrease price by MARKET_ORDER_MAX_SLIPPAGE if it is sell order - if trade_type is TradeType.SELL: - amount_in_quote *= Decimal("1") - CONSTANTS.MARKET_ORDER_MAX_SLIPPAGE - else: - amount_in_quote *= Decimal("1") + CONSTANTS.MARKET_ORDER_MAX_SLIPPAGE - # Count the digit in the base and quote amount # If the digit is more than 16, we need to round it to 16 # This is to prevent the error of "Decimal precision out of range for issued currency value." @@ -296,6 +335,10 @@ async def _place_order( ) flags = CONSTANTS.XRPL_ORDER_TYPE[order_type] + + if trade_type is TradeType.SELL and order_type is OrderType.MARKET: + flags += CONSTANTS.XRPL_SELL_FLAG + memo = Memo( memo_data=convert_string_to_hex(order_id, padding=False), ) @@ -309,18 +352,18 @@ async def _place_order( o_id = None while retry < CONSTANTS.PLACE_ORDER_MAX_RETRY: - await self._make_network_check_request() async with self._xrpl_place_order_client_lock: - filled_tx = await self.tx_autofill(request, self._xrpl_place_order_client) - signed_tx = self.tx_sign(filled_tx, self._auth.get_wallet()) - o_id = f"{signed_tx.sequence}-{signed_tx.last_ledger_sequence}" - submit_response = await self.tx_submit(signed_tx, self._xrpl_place_order_client) - transact_time = time.time() - prelim_result = submit_response.result["engine_result"] + async with AsyncWebsocketClient(self._wss_node_url) as client: + filled_tx = await self.tx_autofill(request, client) + signed_tx = self.tx_sign(filled_tx, self._auth.get_wallet()) + o_id = f"{signed_tx.sequence}-{signed_tx.last_ledger_sequence}" + submit_response = await self.tx_submit(signed_tx, client) + transact_time = time.time() + prelim_result = submit_response.result["engine_result"] - submit_data = {"transaction": signed_tx, "prelim_result": prelim_result} + submit_data = {"transaction": signed_tx, "prelim_result": prelim_result} - if prelim_result[0:3] != "tes": + if prelim_result[0:3] != "tes" and prelim_result != "terQUEUED": error_message = submit_response.result["engine_result_message"] self.logger().error(f"{prelim_result}: {error_message}, data: {submit_response}") raise Exception(f"Failed to place order {order_id} ({o_id})") @@ -381,10 +424,12 @@ async def _place_order_and_process_update(self, order: InFlightOrder, **kwargs) **kwargs, ) - order_update = await self._request_order_status(order) + order_update = await self._request_order_status( + order, creation_tx_resp=order_creation_resp.to_dict().get("result") + ) if order_update.new_state in [OrderState.FILLED, OrderState.PARTIALLY_FILLED]: - trade_update = self.process_trade_fills(order_creation_resp.to_dict(), order) + trade_update = await self.process_trade_fills(order_creation_resp.to_dict(), order) if trade_update is not None: self._order_tracker.process_trade_update(trade_update) else: @@ -396,7 +441,9 @@ async def _place_order_and_process_update(self, order: InFlightOrder, **kwargs) return exchange_order_id - async def _verify_transaction_result(self, submit_data: dict[str, Any]) -> tuple[bool, Optional[Response]]: + async def _verify_transaction_result( + self, submit_data: dict[str, Any], try_count: int = 0 + ) -> tuple[bool, Optional[Response]]: transaction: Transaction = submit_data.get("transaction") prelim_result = submit_data.get("prelim_result") @@ -409,48 +456,89 @@ async def _verify_transaction_result(self, submit_data: dict[str, Any]) -> tuple return False, None try: - await self._make_network_check_request() + # await self._make_network_check_request() resp = await self.wait_for_final_transaction_outcome(transaction, prelim_result) return True, resp + except (TimeoutError, asyncio.exceptions.TimeoutError): + self.logger().debug( + f"Verify transaction timeout error, Attempt {try_count + 1}/{CONSTANTS.VERIFY_TRANSACTION_MAX_RETRY}" + ) + if try_count < CONSTANTS.VERIFY_TRANSACTION_MAX_RETRY: + await self._sleep(CONSTANTS.VERIFY_TRANSACTION_RETRY_INTERVAL) + return await self._verify_transaction_result(submit_data, try_count + 1) + else: + self.logger().error("Max retries reached. Verify transaction failed due to timeout.") + return False, None + except Exception as e: + # If there is code 429, retry the request + if "429" in str(e): + self.logger().debug( + f"Verify transaction failed with code 429, Attempt {try_count + 1}/{CONSTANTS.VERIFY_TRANSACTION_MAX_RETRY}" + ) + if try_count < CONSTANTS.VERIFY_TRANSACTION_MAX_RETRY: + await self._sleep(CONSTANTS.VERIFY_TRANSACTION_RETRY_INTERVAL) + return await self._verify_transaction_result(submit_data, try_count + 1) + else: + self.logger().error("Max retries reached. Verify transaction failed with code 429.") + return False, None + self.logger().error(f"Submitted transaction failed: {e}") + return False, None async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder): exchange_order_id = tracked_order.exchange_order_id + cancel_result = False + cancel_data = {} + submit_response = None if exchange_order_id is None: self.logger().error(f"Unable to cancel order {order_id}, it does not yet have exchange order id") return False, {} try: + # await self._client_health_check() async with self._xrpl_place_order_client_lock: - await self._make_network_check_request() + async with AsyncWebsocketClient(self._wss_node_url) as client: + sequence, _ = exchange_order_id.split("-") + memo = Memo( + memo_data=convert_string_to_hex(order_id, padding=False), + ) + request = OfferCancel(account=self._auth.get_account(), offer_sequence=int(sequence), memos=[memo]) - sequence, _ = exchange_order_id.split("-") - request = OfferCancel( - account=self._auth.get_account(), - offer_sequence=int(sequence), - ) + filled_tx = await self.tx_autofill(request, client) + signed_tx = self.tx_sign(filled_tx, self._auth.get_wallet()) - filled_tx = await self.tx_autofill(request, self._xrpl_place_order_client) - signed_tx = self.tx_sign(filled_tx, self._auth.get_wallet()) + submit_response = await self.tx_submit(signed_tx, client) + prelim_result = submit_response.result["engine_result"] + + if prelim_result is None: + raise Exception( + f"prelim_result is None for {order_id} ({exchange_order_id}), data: {submit_response}" + ) - submit_response = await self.tx_submit(signed_tx, self._xrpl_place_order_client) - prelim_result = submit_response.result["engine_result"] if prelim_result[0:3] != "tes": error_message = submit_response.result["engine_result_message"] - self.logger().error(f"{prelim_result}: {error_message}, data: {submit_response}") - return False, {} + raise Exception(f"{prelim_result}: {error_message}, data: {submit_response}") + + cancel_result = True + cancel_data = {"transaction": signed_tx, "prelim_result": prelim_result} await self._sleep(0.3) except Exception as e: - self.logger().error(f"Order cancellation failed: {e}, order_id: {exchange_order_id}") - return False, {} + self.logger().error( + f"Order cancellation failed: {e}, order_id: {exchange_order_id}, submit_response: {submit_response}" + ) + cancel_result = False + cancel_data = {} - return True, {"transaction": signed_tx, "prelim_result": prelim_result} + return cancel_result, cancel_data async def _execute_order_cancel_and_process_update(self, order: InFlightOrder) -> bool: + if not self.ready: + await self._sleep(3) + retry = 0 submitted = False verified = False @@ -460,6 +548,7 @@ async def _execute_order_cancel_and_process_update(self, order: InFlightOrder) - update_timestamp = self.current_timestamp if update_timestamp is None or math.isnan(update_timestamp): update_timestamp = self._time() + order_update: OrderUpdate = OrderUpdate( client_order_id=order.client_order_id, trading_pair=order.trading_pair, @@ -476,7 +565,7 @@ async def _execute_order_cancel_and_process_update(self, order: InFlightOrder) - retry = CONSTANTS.CANCEL_MAX_RETRY else: retry += 1 - self.logger().info( + self.logger().debug( f"Order cancellation failed. Retrying in {CONSTANTS.CANCEL_RETRY_INTERVAL} seconds..." ) await self._sleep(CONSTANTS.CANCEL_RETRY_INTERVAL) @@ -594,7 +683,14 @@ async def _user_stream_event_listener(self): """ async for event_message in self._iter_user_event_queue(): try: - transaction = event_message.get("transaction") + transaction = event_message.get("transaction", None) + + if transaction is None: + transaction = event_message.get("tx", None) + + if transaction is None: + transaction = event_message.get("tx_json", None) + meta = event_message.get("meta") if transaction is None or meta is None: @@ -621,7 +717,7 @@ async def _user_stream_event_listener(self): new_order_state = OrderState.FAILED else: new_order_state = OrderState.FILLED - trade_update = self.process_trade_fills(event_message, tracked_order) + trade_update = await self.process_trade_fills(event_message, tracked_order) if trade_update is not None: self._order_tracker.process_trade_update(trade_update) else: @@ -683,7 +779,7 @@ async def _user_stream_event_listener(self): new_order_state = OrderState.OPEN if new_order_state == OrderState.FILLED or new_order_state == OrderState.PARTIALLY_FILLED: - trade_update = self.process_trade_fills(event_message, tracked_order) + trade_update = await self.process_trade_fills(event_message, tracked_order) if trade_update is not None: self._order_tracker.process_trade_update(trade_update) else: @@ -717,48 +813,70 @@ async def _user_stream_event_listener(self): await self._sleep(5.0) async def _all_trade_updates_for_order(self, order: InFlightOrder) -> List[TradeUpdate]: - async with self._xrpl_client_lock: - await self._make_network_check_request() + if order.exchange_order_id is None: + return [] - if order.exchange_order_id is None: - return [] + _, ledger_index = order.exchange_order_id.split("-") - _, ledger_index = order.exchange_order_id.split("-") + transactions = await self._fetch_account_transactions(ledger_index, is_forward=True) - transactions = await self._fetch_account_transactions(ledger_index, is_forward=True) + trade_fills = [] - trade_fills = [] + for transaction in transactions: + tx = transaction.get("tx", None) - for transaction in transactions: - tx = transaction.get("tx", None) - tx_type = tx.get("TransactionType", None) + if tx is None: + tx = transaction.get("transaction", None) - if tx_type is None or tx_type not in ["OfferCreate", "Payment"]: - continue + if tx is None: + tx = transaction.get("tx_json", None) - trade_update = self.process_trade_fills(transaction, order) - if trade_update is not None: - trade_fills.append(trade_update) + tx_type = tx.get("TransactionType", None) + + if tx_type is None or tx_type not in ["OfferCreate", "Payment"]: + continue + + trade_update = await self.process_trade_fills(transaction, order) + if trade_update is not None: + trade_fills.append(trade_update) - return trade_fills + return trade_fills - def process_trade_fills(self, data: Dict[str, Any], order: InFlightOrder) -> Optional[TradeUpdate]: + async def process_trade_fills(self, data: Dict[str, Any], order: InFlightOrder) -> Optional[TradeUpdate]: base_currency, quote_currency = self.get_currencies_from_trading_pair(order.trading_pair) sequence, ledger_index = order.exchange_order_id.split("-") fee_rules = self._trading_pair_fee_rules.get(order.trading_pair) + if fee_rules is None: + await self._update_trading_rules() + fee_rules = self._trading_pair_fee_rules.get(order.trading_pair) + if "result" in data: data_result = data.get("result", {}) meta = data_result.get("meta", {}) - tx = data_result + + if "tx_json" in data_result: + tx = data_result.get("tx_json") + tx["hash"] = data_result.get("hash") + elif "transaction" in data_result: + tx = data_result.get("transaction") + tx["hash"] = data_result.get("hash") + else: + tx = data_result else: meta = data.get("meta", {}) + tx = {} # check if transaction has key "tx" or "transaction"? if "tx" in data: tx = data.get("tx", None) - else: + elif "transaction" in data: tx = data.get("transaction", None) + elif "tx_json" in data: + tx = data.get("tx_json", None) + + if "hash" in data: + tx["hash"] = data.get("hash") if not isinstance(tx, dict): self.logger().error( @@ -769,6 +887,11 @@ def process_trade_fills(self, data: Dict[str, Any], order: InFlightOrder) -> Opt if tx.get("TransactionType") not in ["OfferCreate", "Payment"]: return None + if tx["hash"] is None: + self.logger().error("Hash is None") + self.logger().error(f"Data: {data}") + self.logger().error(f"Tx: {tx}") + offer_changes = get_order_book_changes(meta) balance_changes = get_balance_changes(meta) @@ -950,130 +1073,145 @@ def process_trade_fills(self, data: Dict[str, Any], order: InFlightOrder) -> Opt return None - async def _request_order_status(self, tracked_order: InFlightOrder) -> OrderUpdate: - async with self._xrpl_client_lock: - await self._make_network_check_request() - new_order_state = tracked_order.current_state - latest_status = "UNKNOWN" - - if tracked_order.exchange_order_id is None: - order_update = OrderUpdate( - client_order_id=tracked_order.client_order_id, - exchange_order_id=tracked_order.exchange_order_id, - trading_pair=tracked_order.trading_pair, - update_timestamp=time.time(), - new_state=new_order_state, - ) + async def _request_order_status(self, tracked_order: InFlightOrder, creation_tx_resp: Dict = None) -> OrderUpdate: + # await self._make_network_check_request() + new_order_state = tracked_order.current_state + latest_status = "UNKNOWN" + + if tracked_order.exchange_order_id is None: + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=tracked_order.exchange_order_id, + trading_pair=tracked_order.trading_pair, + update_timestamp=time.time(), + new_state=new_order_state, + ) - return order_update + return order_update - sequence, ledger_index = tracked_order.exchange_order_id.split("-") + sequence, ledger_index = tracked_order.exchange_order_id.split("-") - if tracked_order.order_type is OrderType.MARKET: + if tracked_order.order_type is OrderType.MARKET: + if creation_tx_resp is None: transactions = await self._fetch_account_transactions(ledger_index) + else: + transactions = [creation_tx_resp] - for transaction in transactions: - tx = transaction.get("tx") + for transaction in transactions: + if "result" in transaction: + data_result = transaction.get("result", {}) + meta = data_result.get("meta", {}) + tx = data_result + else: meta = transaction.get("meta", {}) - tx_sequence = tx.get("Sequence") + if "tx" in transaction: + tx = transaction.get("tx", None) + elif "transaction" in transaction: + tx = transaction.get("transaction", None) + elif "tx_json" in transaction: + tx = transaction.get("tx_json", None) + else: + tx = transaction - if int(tx_sequence) == int(sequence): - tx_status = meta.get("TransactionResult") - if tx_status != "tesSUCCESS": - new_order_state = OrderState.FAILED - update_timestamp = time.time() - self.logger().error( - f"Order {tracked_order.client_order_id} ({tracked_order.exchange_order_id}) failed: {tx_status}, data: {transaction}" - ) - else: - update_time = tx.get("date") - update_timestamp = ripple_time_to_posix(update_time) - new_order_state = OrderState.FILLED + tx_sequence = tx.get("Sequence") - order_update = OrderUpdate( - client_order_id=tracked_order.client_order_id, - exchange_order_id=tracked_order.exchange_order_id, - trading_pair=tracked_order.trading_pair, - update_timestamp=update_timestamp, - new_state=new_order_state, + if int(tx_sequence) == int(sequence): + tx_status = meta.get("TransactionResult") + update_timestamp = time.time() + if tx_status != "tesSUCCESS": + new_order_state = OrderState.FAILED + self.logger().error( + f"Order {tracked_order.client_order_id} ({tracked_order.exchange_order_id}) failed: {tx_status}, data: {transaction}" ) + else: + new_order_state = OrderState.FILLED - return order_update + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=tracked_order.exchange_order_id, + trading_pair=tracked_order.trading_pair, + update_timestamp=update_timestamp, + new_state=new_order_state, + ) - update_timestamp = time.time() - self.logger().error( - f"Order {tracked_order.client_order_id} ({sequence}) not found in transaction history, tx history: {transactions}" - ) + return order_update - order_update = OrderUpdate( - client_order_id=tracked_order.client_order_id, - exchange_order_id=tracked_order.exchange_order_id, - trading_pair=tracked_order.trading_pair, - update_timestamp=update_timestamp, - new_state=new_order_state, - ) + update_timestamp = time.time() + self.logger().debug( + f"Order {tracked_order.client_order_id} ({sequence}) not found in transaction history, tx history: {transactions}" + ) - return order_update - else: + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=tracked_order.exchange_order_id, + trading_pair=tracked_order.trading_pair, + update_timestamp=update_timestamp, + new_state=new_order_state, + ) + + return order_update + else: + if creation_tx_resp is None: transactions = await self._fetch_account_transactions(ledger_index, is_forward=True) + else: + transactions = [creation_tx_resp] - found = False - update_timestamp = time.time() + found = False + update_timestamp = time.time() - for transaction in transactions: - if found: - break + for transaction in transactions: + if found: + break + + if "result" in transaction: + data_result = transaction.get("result", {}) + meta = data_result.get("meta", {}) + else: meta = transaction.get("meta", {}) - changes_array = get_order_book_changes(meta) - # Filter out change that is not from this account - changes_array = [x for x in changes_array if x.get("maker_account") == self._auth.get_account()] - - for offer_change in changes_array: - changes = offer_change.get("offer_changes", []) - - for change in changes: - if int(change.get("sequence")) == int(sequence): - tx = transaction.get("tx") - update_time = tx.get("date") - update_timestamp = ripple_time_to_posix(update_time) - latest_status = change.get("status") - found = True - - if latest_status == "UNKNOWN": - current_state = tracked_order.current_state - if current_state is OrderState.PENDING_CREATE or current_state is OrderState.PENDING_CANCEL: - # give order at least 60 seconds to be processed - if time.time() - tracked_order.last_update_timestamp > 60: - new_order_state = OrderState.FAILED - self.logger().error( - f"Order status not found for order {tracked_order.client_order_id} ({sequence}), tx history: {transactions}" - ) - else: - new_order_state = current_state - update_timestamp = tracked_order.last_update_timestamp - else: + + changes_array = get_order_book_changes(meta) + # Filter out change that is not from this account + changes_array = [x for x in changes_array if x.get("maker_account") == self._auth.get_account()] + + for offer_change in changes_array: + changes = offer_change.get("offer_changes", []) + + for change in changes: + if int(change.get("sequence")) == int(sequence): + latest_status = change.get("status") + found = True + + if latest_status == "UNKNOWN": + current_state = tracked_order.current_state + if current_state is OrderState.PENDING_CREATE or current_state is OrderState.PENDING_CANCEL: + # give order at least 120 seconds to be processed + if time.time() - tracked_order.last_update_timestamp > CONSTANTS.PENDING_ORDER_STATUS_CHECK_TIMEOUT: new_order_state = OrderState.FAILED self.logger().error( f"Order status not found for order {tracked_order.client_order_id} ({sequence}), tx history: {transactions}" ) - elif latest_status == "filled": - new_order_state = OrderState.FILLED - elif latest_status == "partially-filled": - new_order_state = OrderState.PARTIALLY_FILLED - elif latest_status == "cancelled": - new_order_state = OrderState.CANCELED - elif latest_status == "created": - new_order_state = OrderState.OPEN - - order_update = OrderUpdate( - client_order_id=tracked_order.client_order_id, - exchange_order_id=tracked_order.exchange_order_id, - trading_pair=tracked_order.trading_pair, - update_timestamp=update_timestamp, - new_state=new_order_state, - ) + else: + new_order_state = current_state + else: + new_order_state = current_state + elif latest_status == "filled": + new_order_state = OrderState.FILLED + elif latest_status == "partially-filled": + new_order_state = OrderState.PARTIALLY_FILLED + elif latest_status == "cancelled": + new_order_state = OrderState.CANCELED + elif latest_status == "created": + new_order_state = OrderState.OPEN + + order_update = OrderUpdate( + client_order_id=tracked_order.client_order_id, + exchange_order_id=tracked_order.exchange_order_id, + trading_pair=tracked_order.trading_pair, + update_timestamp=update_timestamp, + new_state=new_order_state, + ) - return order_update + return order_update async def _fetch_account_transactions(self, ledger_index: int, is_forward: bool = False) -> list: """ @@ -1084,94 +1222,145 @@ async def _fetch_account_transactions(self, ledger_index: int, is_forward: bool :return: A list of transactions. """ try: - request = AccountTx( - account=self._auth.get_account(), - ledger_index="validated", - ledger_index_min=int(ledger_index) - CONSTANTS.LEDGER_OFFSET, - forward=is_forward, - ) + async with self._xrpl_fetch_trades_client_lock: + request = AccountTx( + account=self._auth.get_account(), + ledger_index_min=int(ledger_index) - CONSTANTS.LEDGER_OFFSET, + forward=is_forward, + ) + + client_one = AsyncWebsocketClient(self._wss_node_url) + client_two = AsyncWebsocketClient(self._wss_second_node_url) + tasks = [ + self.request_with_retry(client_one, request, 5), + self.request_with_retry(client_two, request, 5), + ] + task_results = await safe_gather(*tasks, return_exceptions=True) + + return_transactions = [] + + for task_id, task_result in enumerate(task_results): + if isinstance(task_result, Response): + result = task_result.result + if result is not None: + transactions = result.get("transactions", []) + + if len(transactions) > len(return_transactions): + return_transactions = transactions + await self._sleep(3) - resp = await self._xrpl_client.request(request) - transactions = resp.result.get("transactions", []) except Exception as e: self.logger().error(f"Failed to fetch account transactions: {e}") - transactions = [] + return_transactions = [] - return transactions + return return_transactions async def _update_balances(self): + await self._client_health_check() account_address = self._auth.get_account() - async with self._xrpl_client_lock: - await self._make_network_check_request() - account_info = await self._xrpl_client.request( - AccountInfo( - account=account_address, - ledger_index="validated", - ) - ) - objects = await self._xrpl_client.request( - AccountObjects( - account=account_address, - ) - ) - open_offers = [x for x in objects.result.get("account_objects", []) if x.get("LedgerEntryType") == "Offer"] - balances = [ - x.get("Balance") - for x in objects.result.get("account_objects", []) - if x.get("LedgerEntryType") == "RippleState" - ] - - xrp_balance = account_info.result.get("account_data", {}).get("Balance", "0") - total_xrp = drops_to_xrp(xrp_balance) - total_ledger_objects = len(objects.result.get("account_objects", [])) - fixed_wallet_reserve = 10 - available_xrp = total_xrp - fixed_wallet_reserve - total_ledger_objects * 2 - - account_balances = { - "XRP": Decimal(total_xrp), - } + account_info = await self.request_with_retry( + self._xrpl_query_client, + AccountInfo(account=account_address, ledger_index="validated"), + 5, + self._xrpl_query_client_lock, + 0.3, + ) - # update balance for each token - for balance in balances: - currency = balance.get("currency") - if len(currency) > 3: - currency = hex_to_str(currency) - - token = currency.strip("\x00") - amount = balance.get("value") - account_balances[token] = abs(Decimal(amount)) - - account_available_balances = account_balances.copy() - account_available_balances["XRP"] = Decimal(available_xrp) - - for offer in open_offers: - taker_gets = offer.get("TakerGets") - taker_gets_funded = offer.get("taker_gets_funded", None) - - if taker_gets_funded is not None: - if isinstance(taker_gets_funded, dict): - token = taker_gets_funded.get("currency") - if len(token) > 3: - token = hex_to_str(token).strip("\x00") - amount = Decimal(taker_gets_funded.get("value")) - else: - amount = drops_to_xrp(taker_gets_funded) - token = "XRP" + objects = await self.request_with_retry( + self._xrpl_query_client, + AccountObjects( + account=account_address, + ), + 5, + self._xrpl_query_client_lock, + 0.3, + ) + + open_offers = [x for x in objects.result.get("account_objects", []) if x.get("LedgerEntryType") == "Offer"] + + account_lines = await self.request_with_retry( + self._xrpl_query_client, + AccountLines( + account=account_address, + ), + 5, + self._xrpl_query_client_lock, + 0.3, + ) + + if account_lines is not None: + balances = account_lines.result.get("lines", []) + else: + balances = [] + + xrp_balance = account_info.result.get("account_data", {}).get("Balance", "0") + total_xrp = drops_to_xrp(xrp_balance) + total_ledger_objects = len(objects.result.get("account_objects", [])) + fixed_wallet_reserve = 10 + available_xrp = total_xrp - fixed_wallet_reserve - total_ledger_objects * 2 + + account_balances = { + "XRP": Decimal(total_xrp), + } + + # update balance for each token + for balance in balances: + currency = balance.get("currency") + if len(currency) > 3: + currency = hex_to_str(currency) + + token = currency.strip("\x00").upper() + token_issuer = balance.get("account") + token_symbol = self.get_token_symbol_from_all_markets(token, token_issuer) + + amount = balance.get("balance") + + if token_symbol is None: + continue + + account_balances[token_symbol] = abs(Decimal(amount)) + + if self._account_balances is not None and len(balances) == 0: + account_balances = self._account_balances.copy() + + account_available_balances = account_balances.copy() + account_available_balances["XRP"] = Decimal(available_xrp) + + for offer in open_offers: + taker_gets = offer.get("TakerGets") + taker_gets_funded = offer.get("taker_gets_funded", None) + + if taker_gets_funded is not None: + if isinstance(taker_gets_funded, dict): + token = taker_gets_funded.get("currency") + token_issuer = taker_gets_funded.get("issuer") + if len(token) > 3: + token = hex_to_str(token).strip("\x00").upper() + token_symbol = self.get_token_symbol_from_all_markets(token, token_issuer) + amount = Decimal(taker_gets_funded.get("value")) else: - if isinstance(taker_gets, dict): - token = taker_gets.get("currency") - if len(token) > 3: - token = hex_to_str(token).strip("\x00") - amount = Decimal(taker_gets.get("value")) - else: - amount = drops_to_xrp(taker_gets) - token = "XRP" + amount = drops_to_xrp(taker_gets_funded) + token_symbol = "XRP" + else: + if isinstance(taker_gets, dict): + token = taker_gets.get("currency") + token_issuer = taker_gets.get("issuer") + if len(token) > 3: + token = hex_to_str(token).strip("\x00").upper() + token_symbol = self.get_token_symbol_from_all_markets(token, token_issuer) + amount = Decimal(taker_gets.get("value")) + else: + amount = drops_to_xrp(taker_gets) + token_symbol = "XRP" + + if token_symbol is None: + continue - account_available_balances[token] -= amount + account_available_balances[token_symbol] -= amount - self._account_balances = account_balances - self._account_available_balances = account_available_balances + self._account_balances = account_balances + self._account_available_balances = account_available_balances def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: Dict[str, XRPLMarket]): markets = exchange_info @@ -1285,78 +1474,84 @@ async def _initialize_trading_pair_symbol_map(self): self.logger().exception(f"There was an error requesting exchange info: {e}") async def _make_network_check_request(self): - if not self._xrpl_client.is_open(): - await self._xrpl_client.open() + await self._xrpl_query_client.open() - if not self._xrpl_place_order_client.is_open(): - await self._xrpl_place_order_client.open() + async def _client_health_check(self): + # Clear client memory to prevent memory leak + if time.time() - self._last_clients_refresh_time > CONSTANTS.CLIENT_REFRESH_INTERVAL: + async with self._xrpl_query_client_lock: + await self._xrpl_query_client.close() + + self._last_clients_refresh_time = time.time() + + await self._xrpl_query_client.open() async def _make_trading_rules_request(self) -> Dict[str, Any]: + await self._client_health_check() zeroTransferRate = 1000000000 trading_rules_info = {} - async with self._xrpl_client_lock: - await self._make_network_check_request() - - for trading_pair in self._trading_pairs: - base_currency, quote_currency = self.get_currencies_from_trading_pair(trading_pair) + for trading_pair in self._trading_pairs: + base_currency, quote_currency = self.get_currencies_from_trading_pair(trading_pair) - if base_currency.currency == XRP().currency: - baseTickSize = 6 - baseTransferRate = 0 - else: - base_info = await self._xrpl_client.request( - AccountInfo( - account=base_currency.issuer, - ledger_index="validated", - ) - ) + if base_currency.currency == XRP().currency: + baseTickSize = 6 + baseTransferRate = 0 + else: + base_info = await self.request_with_retry( + self._xrpl_query_client, + AccountInfo(account=base_currency.issuer, ledger_index="validated"), + 3, + self._xrpl_query_client_lock, + 1, + ) - if base_info.status == ResponseStatus.ERROR: - error_message = base_info.result.get("error_message") - raise ValueError(f"Base currency {base_currency} not found in ledger: {error_message}") + if base_info.status == ResponseStatus.ERROR: + error_message = base_info.result.get("error_message") + raise ValueError(f"Base currency {base_currency} not found in ledger: {error_message}") - baseTickSize = base_info.result.get("account_data", {}).get("TickSize", 15) - rawTransferRate = base_info.result.get("account_data", {}).get("TransferRate", zeroTransferRate) - baseTransferRate = float(rawTransferRate / zeroTransferRate) - 1 + baseTickSize = base_info.result.get("account_data", {}).get("TickSize", 15) + rawTransferRate = base_info.result.get("account_data", {}).get("TransferRate", zeroTransferRate) + baseTransferRate = float(rawTransferRate / zeroTransferRate) - 1 - if quote_currency.currency == XRP().currency: - quoteTickSize = 6 - quoteTransferRate = 0 - else: - quote_info = await self._xrpl_client.request( - AccountInfo( - account=quote_currency.issuer, - ledger_index="validated", - ) - ) + if quote_currency.currency == XRP().currency: + quoteTickSize = 6 + quoteTransferRate = 0 + else: + quote_info = await self.request_with_retry( + self._xrpl_query_client, + AccountInfo(account=quote_currency.issuer, ledger_index="validated"), + 3, + self._xrpl_query_client_lock, + 1, + ) - if quote_info.status == ResponseStatus.ERROR: - error_message = quote_info.result.get("error_message") - raise ValueError(f"Quote currency {quote_currency} not found in ledger: {error_message}") + if quote_info.status == ResponseStatus.ERROR: + error_message = quote_info.result.get("error_message") + raise ValueError(f"Quote currency {quote_currency} not found in ledger: {error_message}") - quoteTickSize = quote_info.result.get("account_data", {}).get("TickSize", 15) - rawTransferRate = quote_info.result.get("account_data", {}).get("TransferRate", zeroTransferRate) - quoteTransferRate = float(rawTransferRate / zeroTransferRate) - 1 + quoteTickSize = quote_info.result.get("account_data", {}).get("TickSize", 15) + rawTransferRate = quote_info.result.get("account_data", {}).get("TransferRate", zeroTransferRate) + quoteTransferRate = float(rawTransferRate / zeroTransferRate) - 1 - if baseTickSize is None or quoteTickSize is None: - raise ValueError(f"Tick size not found for trading pair {trading_pair}") + if baseTickSize is None or quoteTickSize is None: + raise ValueError(f"Tick size not found for trading pair {trading_pair}") - if baseTransferRate is None or quoteTransferRate is None: - raise ValueError(f"Transfer rate not found for trading pair {trading_pair}") + if baseTransferRate is None or quoteTransferRate is None: + raise ValueError(f"Transfer rate not found for trading pair {trading_pair}") - smallestTickSize = min(baseTickSize, quoteTickSize) - minimumOrderSize = float(10) ** -smallestTickSize + smallestTickSize = min(baseTickSize, quoteTickSize) + minimumOrderSize = float(10) ** -smallestTickSize - trading_rules_info[trading_pair] = { - "base_currency": base_currency, - "quote_currency": quote_currency, - "base_tick_size": baseTickSize, - "quote_tick_size": quoteTickSize, - "base_transfer_rate": baseTransferRate, - "quote_transfer_rate": quoteTransferRate, - "minimum_order_size": minimumOrderSize, - } + trading_rules_info[trading_pair] = { + "base_currency": base_currency, + "quote_currency": quote_currency, + "base_tick_size": baseTickSize, + "quote_tick_size": quoteTickSize, + "base_transfer_rate": baseTransferRate, + "quote_transfer_rate": quoteTransferRate, + "minimum_order_size": minimumOrderSize, + } return trading_rules_info @@ -1372,6 +1567,7 @@ def _make_trading_pairs_request(self) -> Dict[str, XRPLMarket]: base_issuer=v["base_issuer"], quote=v["quote"], quote_issuer=v["quote_issuer"], + trading_pair_symbol=k, ) # Merge default markets with custom markets @@ -1429,9 +1625,59 @@ async def tx_submit( *, fail_hard: bool = False, ) -> Response: - return await submit(transaction, client, fail_hard=fail_hard) - async def wait_for_final_transaction_outcome(self, transaction, prelim_result) -> Response: - return await _wait_for_final_transaction_outcome( - transaction.get_hash(), self._xrpl_client, prelim_result, transaction.last_ledger_sequence + transaction_blob = encode(transaction.to_xrpl()) + response = await client._request_impl( + SubmitOnly(tx_blob=transaction_blob, fail_hard=fail_hard), timeout=CONSTANTS.REQUEST_TIMEOUT ) + if response.is_successful(): + return response + + raise XRPLRequestFailureException(response.result) + + async def wait_for_final_transaction_outcome(self, transaction, prelim_result) -> Response: + async with AsyncWebsocketClient(self._wss_node_url) as client: + resp = await _wait_for_final_transaction_outcome( + transaction.get_hash(), client, prelim_result, transaction.last_ledger_sequence + ) + return resp + + async def request_with_retry( + self, + client: AsyncWebsocketClient, + request: Request, + max_retries: int = 3, + lock: Lock = None, + delay_time: float = 0.0, + ) -> Response: + try: + await client.open() + + if lock is not None: + async with lock: + async with client: + resp = await client.request(request) + else: + async with client: + resp = await client.request(request) + + await self._sleep(delay_time) + return resp + except (TimeoutError, asyncio.exceptions.TimeoutError) as e: + self.logger().debug(f"Request {request} timeout error: {e}") + if max_retries > 0: + await self._sleep(CONSTANTS.REQUEST_RETRY_INTERVAL) + return await self.request_with_retry(client, request, max_retries - 1, lock, delay_time) + else: + self.logger().error(f"Max retries reached. Request {request} failed due to timeout.") + except Exception as e: + self.logger().error(f"Request {request} failed: {e}") + + def get_token_symbol_from_all_markets(self, code: str, issuer: str) -> Optional[str]: + all_markets = self._make_trading_pairs_request() + for market in all_markets.values(): + token_symbol = market.get_token_symbol(code, issuer) + + if token_symbol is not None: + return token_symbol.upper() + return None diff --git a/hummingbot/connector/exchange/xrpl/xrpl_utils.py b/hummingbot/connector/exchange/xrpl/xrpl_utils.py index 051661e394..4e02b85315 100644 --- a/hummingbot/connector/exchange/xrpl/xrpl_utils.py +++ b/hummingbot/connector/exchange/xrpl/xrpl_utils.py @@ -1,9 +1,23 @@ +import asyncio import binascii +from dataclasses import dataclass, field from decimal import Decimal -from typing import Any, Dict, List, Optional +from random import randrange +from typing import Any, Dict, Final, List, Optional, cast from pydantic import BaseModel, Field, SecretStr, validator -from xrpl.models import TransactionMetadata +from xrpl.asyncio.account import get_next_valid_seq_number +from xrpl.asyncio.clients import Client, XRPLRequestFailureException +from xrpl.asyncio.transaction import XRPLReliableSubmissionException +from xrpl.asyncio.transaction.main import ( + _LEDGER_OFFSET, + _calculate_fee_per_transaction_type, + _get_network_id_and_build_version, + _tx_needs_networkID, +) +from xrpl.models import Request, Response, Transaction, TransactionMetadata, Tx +from xrpl.models.requests.request import LookupByLedgerRequest, RequestMethod +from xrpl.models.utils import require_kwargs_on_init from xrpl.utils.txn_parser.utils import NormalizedNode, normalize_nodes from xrpl.utils.txn_parser.utils.order_book_parser import ( _get_change_amount, @@ -17,6 +31,7 @@ from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData from hummingbot.client.config.config_validators import validate_with_regex +from hummingbot.connector.exchange.xrpl import xrpl_constants as CONSTANTS from hummingbot.core.data_type.trade_fee import TradeFeeSchema CENTRALIZED = True @@ -27,6 +42,7 @@ taker_percent_fee_decimal=Decimal("0"), buy_percent_fee_deducted_from_returns=True, ) +_REQ_ID_MAX: Final[int] = 1_000_000 def get_order_book_changes(metadata: TransactionMetadata) -> List[AccountOfferChanges]: @@ -121,10 +137,23 @@ class XRPLMarket(BaseModel): quote: str base_issuer: str quote_issuer: str + trading_pair_symbol: Optional[str] = None def __repr__(self): return str(self.dict()) + def get_token_symbol(self, code: str, issuer: str) -> Optional[str]: + if self.trading_pair_symbol is None: + return None + + if code.upper() == self.base.upper() and issuer.upper() == self.base_issuer.upper(): + return self.trading_pair_symbol.split("-")[0] + + if code.upper() == self.quote.upper() and issuer.upper() == self.quote_issuer.upper(): + return self.trading_pair_symbol.split("-")[1] + + return None + def represent_xrpl_market(dumper, data): return dumper.represent_dict(data.dict()) @@ -133,6 +162,138 @@ def represent_xrpl_market(dumper, data): SafeRepresenter.add_representer(XRPLMarket, represent_xrpl_market) +@require_kwargs_on_init +@dataclass(frozen=True) +class Ledger(Request, LookupByLedgerRequest): + """ + Retrieve information about the public ledger. + `See ledger `_ + """ + + method: RequestMethod = field(default=RequestMethod.LEDGER, init=False) + transactions: bool = False + expand: bool = False + owner_funds: bool = False + binary: bool = False + queue: bool = False + + +async def autofill( + transaction: Transaction, client: Client, signers_count: Optional[int] = None, try_count: int = 0 +) -> Transaction: + """ + Autofills fields in a transaction. This will set `sequence`, `fee`, and + `last_ledger_sequence` according to the current state of the server this Client is + connected to. It also converts all X-Addresses to classic addresses. + + Args: + transaction: the transaction to be signed. + client: a network client. + signers_count: the expected number of signers for this transaction. + Only used for multisigned transactions. + + Returns: + The autofilled transaction. + """ + try: + transaction_json = transaction.to_dict() + if not client.network_id: + await _get_network_id_and_build_version(client) + if "network_id" not in transaction_json and _tx_needs_networkID(client): + transaction_json["network_id"] = client.network_id + if "sequence" not in transaction_json: + sequence = await get_next_valid_seq_number(transaction_json["account"], client) + transaction_json["sequence"] = sequence + if "fee" not in transaction_json: + fee = int(await _calculate_fee_per_transaction_type(transaction, client, signers_count)) + fee = fee * CONSTANTS.FEE_MULTIPLIER + transaction_json["fee"] = str(fee) + if "last_ledger_sequence" not in transaction_json: + ledger_sequence = await get_latest_validated_ledger_sequence(client) + transaction_json["last_ledger_sequence"] = ledger_sequence + _LEDGER_OFFSET + return Transaction.from_dict(transaction_json) + except Exception as e: + if try_count < CONSTANTS.VERIFY_TRANSACTION_MAX_RETRY: + return await autofill(transaction, client, signers_count, try_count + 1) + else: + raise Exception(f"Autofill failed: {e}") + + +async def get_latest_validated_ledger_sequence(client: Client) -> int: + """ + Returns the sequence number of the latest validated ledger. + + Args: + client: The network client to use to send the request. + + Returns: + The sequence number of the latest validated ledger. + + Raises: + XRPLRequestFailureException: if the rippled API call fails. + """ + + request = Ledger(ledger_index="validated") + request_dict = request.to_dict() + request_dict["id"] = f"{request.method}_{randrange(_REQ_ID_MAX)}" + request_with_id = Ledger.from_dict(request_dict) + + response = await client._request_impl(request_with_id) + if response.is_successful(): + return cast(int, response.result["ledger_index"]) + + raise XRPLRequestFailureException(response.result) + + +_LEDGER_CLOSE_TIME: Final[int] = 1 + + +async def _wait_for_final_transaction_outcome( + transaction_hash: str, client: Client, prelim_result: str, last_ledger_sequence: int +) -> Response: + """ + The core logic of reliable submission. Polls the ledger until the result of the + transaction can be considered final, meaning it has either been included in a + validated ledger, or the transaction's LastLedgerSequence has been surpassed by the + latest ledger sequence (meaning it will never be included in a validated ledger). + """ + await asyncio.sleep(_LEDGER_CLOSE_TIME) + + current_ledger_sequence = await get_latest_validated_ledger_sequence(client) + + if current_ledger_sequence >= last_ledger_sequence: + raise XRPLReliableSubmissionException( + f"The latest validated ledger sequence {current_ledger_sequence} is " + f"greater than LastLedgerSequence {last_ledger_sequence} in " + f"the transaction. Prelim result: {prelim_result}" + ) + + # query transaction by hash + transaction_response = await client._request_impl(Tx(transaction=transaction_hash)) + if not transaction_response.is_successful(): + if transaction_response.result["error"] == "txnNotFound": + """ + For the case if a submitted transaction is still + in queue and not processed on the ledger yet. + """ + return await _wait_for_final_transaction_outcome( + transaction_hash, client, prelim_result, last_ledger_sequence + ) + else: + raise XRPLRequestFailureException(transaction_response.result) + + result = transaction_response.result + if "validated" in result and result["validated"]: + # result is in a validated ledger, outcome is final + return_code = result["meta"]["TransactionResult"] + if return_code != "tesSUCCESS": + raise XRPLReliableSubmissionException(f"Transaction failed: {return_code}") + return transaction_response + + # outcome is not yet final + return await _wait_for_final_transaction_outcome(transaction_hash, client, prelim_result, last_ledger_sequence) + + class XRPLConfigMap(BaseConnectorConfigMap): connector: str = Field(default="xrpl", const=True, client_data=None) xrpl_secret_key: SecretStr = Field( @@ -146,7 +307,7 @@ class XRPLConfigMap(BaseConnectorConfigMap): ) wss_node_url = Field( - default="wss://s1.ripple.com/", + default="wss://xrplcluster.com/", client_data=ClientFieldData( prompt=lambda cm: "Enter your XRPL Websocket Node URL", is_secure=False, @@ -165,6 +326,16 @@ class XRPLConfigMap(BaseConnectorConfigMap): ), ) + wss_third_node_url = Field( + default="wss://s2.ripple.com/", + client_data=ClientFieldData( + prompt=lambda cm: "Enter your third XRPL Websocket Node URL", + is_secure=False, + is_connect_key=True, + prompt_on_new=True, + ), + ) + custom_markets: Dict[str, XRPLMarket] = Field( default={ "SOLO-XRP": XRPLMarket( @@ -209,5 +380,14 @@ def validate_wss_second_node_url(cls, v: str): raise ValueError(ret) return v + @validator("wss_third_node_url", pre=True) + def validate_wss_third_node_url(cls, v: str): + pattern = r"^(wss://)[\w.-]+(:\d+)?(/[\w.-]*)*$" + error_message = "Invalid node url. Node url should be in websocket format." + ret = validate_with_regex(v, pattern, error_message) + if ret is not None: + raise ValueError(ret) + return v + KEYS = XRPLConfigMap.construct() diff --git a/hummingbot/connector/gateway/amm/gateway_telos_amm.py b/hummingbot/connector/gateway/amm/gateway_telos_amm.py new file mode 100644 index 0000000000..c5b70e3880 --- /dev/null +++ b/hummingbot/connector/gateway/amm/gateway_telos_amm.py @@ -0,0 +1,81 @@ +import asyncio +from typing import TYPE_CHECKING, List, Optional + +from hummingbot.connector.gateway.amm.gateway_evm_amm import GatewayEVMAMM +from hummingbot.core.data_type.cancellation_result import CancellationResult + +if TYPE_CHECKING: + from hummingbot.client.config.config_helpers import ClientConfigAdapter + + +class GatewayTelosAMM(GatewayEVMAMM): + """ + Defines basic functions common to connectors that interact with Gateway. + """ + + API_CALL_TIMEOUT = 60.0 + POLL_INTERVAL = 15.0 + + def __init__(self, + client_config_map: "ClientConfigAdapter", + connector_name: str, + chain: str, + network: str, + address: str, + trading_pairs: List[str] = [], + additional_spenders: List[str] = [], # not implemented + trading_required: bool = True + ): + """ + :param connector_name: name of connector on gateway + :param chain: refers to a block chain, e.g. ethereum or avalanche + :param network: refers to a network of a particular blockchain e.g. mainnet or kovan + :param address: the address of the eth wallet which has been added on gateway + :param trading_pairs: a list of trading pairs + :param trading_required: Whether actual trading is needed. Useful for some functionalities or commands like the balance command + """ + super().__init__(client_config_map=client_config_map, + connector_name=connector_name, + chain=chain, + network=network, + address=address, + trading_pairs=trading_pairs, + additional_spenders=additional_spenders, + trading_required=trading_required) + + async def get_chain_info(self): + """ + Calls the base endpoint of the connector on Gateway to know basic info about chain being used. + """ + try: + self._chain_info = await self._get_gateway_instance().get_network_status( + chain=self.chain, network=self.network + ) + if not isinstance(self._chain_info, list): + self._native_currency = self._chain_info.get("nativeCurrency", "TLOS") + except asyncio.CancelledError: + raise + except Exception as e: + self.logger().network( + "Error fetching chain info", + exc_info=True, + app_warning_msg=str(e) + ) + + async def cancel_all(self, timeout_seconds: float) -> List[CancellationResult]: + """ + This is intentionally left blank, because cancellation is not supported for telos blockchain. + """ + return [] + + async def _execute_cancel(self, order_id: str, cancel_age: int) -> Optional[str]: + """ + This is intentionally left blank, because cancellation is not supported for telos blockchain. + """ + pass + + async def cancel_outdated_orders(self, cancel_age: int) -> List[CancellationResult]: + """ + This is intentionally left blank, because cancellation is not supported for telos blockchain. + """ + return [] diff --git a/hummingbot/connector/gateway/common_types.py b/hummingbot/connector/gateway/common_types.py index ff70776518..c2301fbcfd 100644 --- a/hummingbot/connector/gateway/common_types.py +++ b/hummingbot/connector/gateway/common_types.py @@ -6,6 +6,7 @@ class Chain(Enum): ETHEREUM = ('ethereum', 'ETH') TEZOS = ('tezos', 'XTZ') + TELOS = ('telos', 'TLOS') def __init__(self, chain: str, native_currency: str): self.chain = chain diff --git a/hummingbot/connector/test_support/exchange_connector_test.py b/hummingbot/connector/test_support/exchange_connector_test.py index d3fd1f0e0a..bdbda65371 100644 --- a/hummingbot/connector/test_support/exchange_connector_test.py +++ b/hummingbot/connector/test_support/exchange_connector_test.py @@ -691,7 +691,7 @@ def test_create_buy_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} {self.trading_pair}." + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}." ) ) @@ -731,7 +731,7 @@ def test_create_sell_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000000')} {self.trading_pair}." + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}." ) ) @@ -1048,19 +1048,21 @@ def test_update_order_status_when_filled(self, mock_api): ) order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] - urls = self.configure_completely_filled_order_status_response( - order=order, - mock_api=mock_api, - callback=lambda *args, **kwargs: request_sent_event.set()) - if self.is_order_fill_http_update_included_in_status_update: trade_url = self.configure_full_fill_trade_response( order=order, - mock_api=mock_api) + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) else: # If the fill events will not be requested with the order status, we need to manually set the event # to allow the ClientOrderTracker to process the last status update order.completely_filled_event.set() + + urls = self.configure_completely_filled_order_status_response( + order=order, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) + self.async_run_with_timeout(self.exchange._update_order_status()) # Execute one more synchronization to ensure the async task that processes the update is finished self.async_run_with_timeout(request_sent_event.wait()) @@ -1233,15 +1235,15 @@ def test_update_order_status_when_order_has_not_changed_and_one_partial_fill(sel ) order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] - order_url = self.configure_partially_filled_order_status_response( - order=order, - mock_api=mock_api) - if self.is_order_fill_http_update_included_in_status_update: trade_url = self.configure_partial_fill_trade_response( order=order, mock_api=mock_api) + order_url = self.configure_partially_filled_order_status_response( + order=order, + mock_api=mock_api) + self.assertTrue(order.is_open) self.async_run_with_timeout(self.exchange._update_order_status()) @@ -1289,15 +1291,15 @@ def test_update_order_status_when_filled_correctly_processed_even_when_trade_fil ) order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] - urls = self.configure_completely_filled_order_status_response( - order=order, - mock_api=mock_api) - if self.is_order_fill_http_update_included_in_status_update: trade_url = self.configure_erroneous_http_fill_trade_response( order=order, mock_api=mock_api) + urls = self.configure_completely_filled_order_status_response( + order=order, + mock_api=mock_api) + # Since the trade fill update will fail we need to manually set the event # to allow the ClientOrderTracker to process the last status update order.completely_filled_event.set() @@ -1560,11 +1562,6 @@ def test_lost_order_included_in_order_fills_update_and_not_in_order_status_updat self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) - self.configure_completely_filled_order_status_response( - order=order, - mock_api=mock_api, - callback=lambda *args, **kwargs: request_sent_event.set()) - if self.is_order_fill_http_update_included_in_status_update: trade_url = self.configure_full_fill_trade_response( order=order, @@ -1576,6 +1573,11 @@ def test_lost_order_included_in_order_fills_update_and_not_in_order_status_updat order.completely_filled_event.set() request_sent_event.set() + self.configure_completely_filled_order_status_response( + order=order, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) + self.async_run_with_timeout(self.exchange._update_order_status()) # Execute one more synchronization to ensure the async task that processes the update is finished self.async_run_with_timeout(request_sent_event.wait()) diff --git a/hummingbot/connector/test_support/perpetual_derivative_test.py b/hummingbot/connector/test_support/perpetual_derivative_test.py index a3e5820646..d524a084be 100644 --- a/hummingbot/connector/test_support/perpetual_derivative_test.py +++ b/hummingbot/connector/test_support/perpetual_derivative_test.py @@ -248,7 +248,8 @@ def test_create_buy_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) @@ -292,7 +293,8 @@ def test_create_sell_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) @@ -338,7 +340,8 @@ def test_create_order_to_close_short_position(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) @@ -381,7 +384,8 @@ def test_create_order_to_close_long_position(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) diff --git a/hummingbot/connector/time_synchronizer.py b/hummingbot/connector/time_synchronizer.py index de3f1157a2..599df2c421 100644 --- a/hummingbot/connector/time_synchronizer.py +++ b/hummingbot/connector/time_synchronizer.py @@ -22,6 +22,7 @@ class TimeSynchronizer: def __init__(self): self._time_offset_ms: Deque[float] = deque(maxlen=5) + self._lock = asyncio.Lock() @classmethod def logger(cls) -> HummingbotLogger: @@ -80,11 +81,12 @@ async def update_server_time_if_not_initialized(self, time_provider: Awaitable): :param time_provider: Awaitable object that returns the current time """ - if not self._time_offset_ms: - await self.update_server_time_offset_with_time_provider(time_provider) - else: - # This is done to avoid the warning message from asyncio framework saying a coroutine was not awaited - time_provider.close() + async with self._lock: + if not self._time_offset_ms: + await self.update_server_time_offset_with_time_provider(time_provider) + else: + # This is done to avoid the warning message from asyncio framework saying a coroutine was not awaited + time_provider.close() def _current_seconds_counter(self): return time.perf_counter() diff --git a/hummingbot/core/clock.pyx b/hummingbot/core/clock.pyx index 071b8f1839..42c2954300 100644 --- a/hummingbot/core/clock.pyx +++ b/hummingbot/core/clock.pyx @@ -30,9 +30,9 @@ cdef class Clock: """ self._clock_mode = clock_mode self._tick_size = tick_size - self._start_time = start_time + self._start_time = start_time if clock_mode is ClockMode.BACKTEST else (time.time() // tick_size) * tick_size self._end_time = end_time - self._current_tick = start_time if clock_mode is ClockMode.BACKTEST else (time.time() // tick_size) * tick_size + self._current_tick = self._start_time self._child_iterators = [] self._current_context = None self._started = False diff --git a/hummingbot/core/data_type/in_flight_order.py b/hummingbot/core/data_type/in_flight_order.py index 23da7c20e8..531e6d3aca 100644 --- a/hummingbot/core/data_type/in_flight_order.py +++ b/hummingbot/core/data_type/in_flight_order.py @@ -296,7 +296,7 @@ async def get_exchange_order_id(self): def cumulative_fee_paid(self, token: str, exchange: Optional['ExchangeBase'] = None) -> Decimal: """ - Returns the total amount of fee paid for each traid update, expressed in the specified token + Returns the total amount of fee paid for each trade update, expressed in the specified token :param token: The token all partial fills' fees should be transformed to before summing them :param exchange: The exchange being used. If specified the logic will try to use the order book to get the rate :return: the cumulative fee paid for all partial fills in the specified token @@ -375,7 +375,8 @@ async def wait_until_processed_by_exchange(self): def build_order_created_message(self) -> str: return ( f"Created {self.order_type.name.upper()} {self.trade_type.name.upper()} order " - f"{self.client_order_id} for {self.amount} {self.trading_pair}." + f"{self.client_order_id} for {self.amount} {self.trading_pair} " + f"at {self.price}." ) @@ -383,5 +384,6 @@ class PerpetualDerivativeInFlightOrder(InFlightOrder): def build_order_created_message(self) -> str: return ( f"Created {self.order_type.name.upper()} {self.trade_type.name.upper()} order " - f"{self.client_order_id} for {self.amount} to {self.position.name.upper()} a {self.trading_pair} position." + f"{self.client_order_id} for {self.amount} to {self.position.name.upper()} a {self.trading_pair} position " + f"at {self.price}." ) diff --git a/hummingbot/core/network_base.py b/hummingbot/core/network_base.py index aad76de240..8fcb8d7c2d 100644 --- a/hummingbot/core/network_base.py +++ b/hummingbot/core/network_base.py @@ -1,9 +1,10 @@ import asyncio import logging from typing import Optional -from hummingbot.logger import HummingbotLogger -from hummingbot.core.utils.async_utils import safe_ensure_future + from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.logger import HummingbotLogger NaN = float("nan") nb_logger = None @@ -79,38 +80,27 @@ async def check_network(self) -> NetworkStatus: async def _check_network_loop(self): while True: last_status = self._network_status - has_unexpected_error = False - try: new_status = await asyncio.wait_for(self.check_network(), timeout=self._check_network_timeout) - except asyncio.CancelledError: - raise - except asyncio.TimeoutError: - self.logger().debug("Check network call has timed out. Network status is not connected.") - new_status = NetworkStatus.NOT_CONNECTED - except Exception: - self.logger().error("Unexpected error while checking for network status.", exc_info=True) - new_status = NetworkStatus.NOT_CONNECTED - has_unexpected_error = True - - try: - self._network_status = new_status if new_status != last_status: - if new_status is NetworkStatus.CONNECTED: + self._network_status = new_status + if self._network_status is NetworkStatus.CONNECTED: self.logger().info(f"Network status has changed to {new_status}. Starting networking...") await self.start_network() else: self.logger().info(f"Network status has changed to {new_status}. Stopping networking...") await self.stop_network() - - if not has_unexpected_error: - await asyncio.sleep(self._check_network_interval) - else: - await asyncio.sleep(self._network_error_wait_time) + await self._sleep(self._check_network_interval) except asyncio.CancelledError: raise - except Exception: - self.logger().error("Unexpected error starting or stopping network.", exc_info=True) + except asyncio.TimeoutError: + self.logger().debug("Check network call has timed out. Network status is not connected.") + self._network_status = NetworkStatus.NOT_CONNECTED + await self._sleep(self._check_network_interval) + except Exception as e: + self.logger().error(f"Unexpected error while checking for network status: {e}", exc_info=True) + self._network_status = NetworkStatus.NOT_CONNECTED + await self._sleep(self._network_error_wait_time) def start(self): self._check_network_task = safe_ensure_future(self._check_network_loop()) @@ -124,3 +114,6 @@ def stop(self): self._network_status = NetworkStatus.STOPPED safe_ensure_future(self.stop_network()) self._started = False + + async def _sleep(self, seconds: float): + await asyncio.sleep(seconds) diff --git a/hummingbot/core/network_iterator.pyx b/hummingbot/core/network_iterator.pyx index 1f2253e3f6..18a1da144f 100644 --- a/hummingbot/core/network_iterator.pyx +++ b/hummingbot/core/network_iterator.pyx @@ -89,36 +89,30 @@ cdef class NetworkIterator(TimeIterator): async def _check_network_loop(self): while True: - new_status = self._network_status last_status = self._network_status - has_unexpected_error = False - try: new_status = await asyncio.wait_for(self.check_network(), timeout=self._check_network_timeout) + if new_status != last_status: + self._network_status = new_status + if self._network_status is NetworkStatus.CONNECTED: + self.logger().info(f"Network status has changed to {new_status}. Starting networking...") + await self.start_network() + else: + self.logger().info(f"Network status has changed to {new_status}. Stopping networking...") + await self.stop_network() + await asyncio.sleep(self._check_network_interval) except asyncio.CancelledError: raise except asyncio.TimeoutError: - self.logger().debug(f"Check network call has timed out. Network status is not connected.") - new_status = NetworkStatus.NOT_CONNECTED - except Exception: - self.logger().error("Unexpected error while checking for network status.", exc_info=True) - new_status = NetworkStatus.NOT_CONNECTED - has_unexpected_error = True - - self._network_status = new_status - if new_status != last_status: - if new_status is NetworkStatus.CONNECTED: - self.logger().info(f"Network status has changed to {new_status}. Starting networking...") - await self.start_network() - else: - self.logger().info(f"Network status has changed to {new_status}. Stopping networking...") - await self.stop_network() - - if not has_unexpected_error: + self.logger().debug("Check network call has timed out. Network status is not connected.") + self._network_status = NetworkStatus.NOT_CONNECTED await asyncio.sleep(self._check_network_interval) - else: + except Exception as e: + self.logger().error(f"Unexpected error while checking for network status: {e}", exc_info=True) + self._network_status = NetworkStatus.NOT_CONNECTED await asyncio.sleep(self._network_error_wait_time) + cdef c_start(self, Clock clock, double timestamp): TimeIterator.c_start(self, clock, timestamp) self._check_network_task = safe_ensure_future(self._check_network_loop()) diff --git a/hummingbot/core/utils/gateway_config_utils.py b/hummingbot/core/utils/gateway_config_utils.py index b48b653a53..84120e6266 100644 --- a/hummingbot/core/utils/gateway_config_utils.py +++ b/hummingbot/core/utils/gateway_config_utils.py @@ -8,6 +8,7 @@ "avalanche": "AVAX", "algorand": "ALGO", "cosmos": "ATOM", + "celo": "CELO", "osmosis": "OSMO", "polygon": "MATIC", "harmony": "ONE", @@ -18,7 +19,8 @@ "xdc": "XDC", "tezos": "XTZ", "xrpl": "XRP", - "kujira": "KUJI" + "kujira": "KUJI", + "telos": "TLOS" } SUPPORTED_CHAINS = set(native_tokens.keys()) diff --git a/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/ascend_ex_spot_candles.py b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/ascend_ex_spot_candles.py index acab2f8ff9..931423bdf6 100644 --- a/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/ascend_ex_spot_candles.py +++ b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/ascend_ex_spot_candles.py @@ -1,12 +1,8 @@ -import asyncio import logging -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional -import numpy as np - -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles import constants as CONSTANTS from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.logger import HummingbotLogger @@ -44,6 +40,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -61,22 +65,30 @@ async def check_network(self) -> NetworkStatus: def get_exchange_trading_pair(self, trading_pair): return trading_pair.replace("-", "/") - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 500): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"symbol": self._ex_trading_pair, "interval": CONSTANTS.INTERVALS[self.interval], "n": limit} + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://ascendex.github.io/ascendex-pro-api/#historical-bar-data + """ + params = { + "symbol": self._ex_trading_pair, + "interval": CONSTANTS.INTERVALS[self.interval], + "n": limit, + } if start_time: - params["from"] = start_time - if end_time: - params["to"] = end_time - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) + params["from"] = start_time * 1000 + params["to"] = (start_time + self.interval_in_seconds * limit) * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: new_hb_candles = [] - for i in candles["data"]: - timestamp_ms = i["data"]["ts"] + for i in data["data"]: + timestamp = self.ensure_timestamp_in_seconds(i["data"]["ts"]) + if timestamp >= end_time: + continue open = i["data"]["o"] high = i["data"]["h"] low = i["data"]["l"] @@ -87,89 +99,30 @@ async def fetch_candles(self, n_trades = 0 taker_buy_base_volume = 0 taker_buy_quote_volume = 0 - new_hb_candles.append([timestamp_ms, open, high, low, close, volume, + new_hb_candles.append([timestamp, open, high, low, close, volume, quote_asset_volume, n_trades, taker_buy_base_volume, taker_buy_quote_volume]) - return np.array(new_hb_candles).astype(float) - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // 1000) + 1 - requests_executed = 0 - while not self.ready: - missing_records = self._candles.maxlen - len(self._candles) - end_timestamp = int(self._candles[0][0]) - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - candles = await self.fetch_candles(end_time=end_timestamp, limit=missing_records + 1) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1][::-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def _subscribe_channels(self, ws: WSAssistant): - """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange - """ - try: - payload = {"op": CONSTANTS.SUB_ENDPOINT_NAME, - "ch": f"bar:{CONSTANTS.INTERVALS[self.interval]}:{self._ex_trading_pair}"} - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - if data.get("m") == "ping": - pong_payloads = {"op": "pong"} - pong_request = WSJSONRequest(payload=pong_payloads) - await websocket_assistant.send(request=pong_request) - if data is not None and data.get("m") == "bar": # data will be None when the websocket is disconnected - timestamp = data["data"]["ts"] - open = data["data"]["o"] - high = data["data"]["h"] - low = data["data"]["l"] - close = data["data"]["c"] - quote_asset_volume = data["data"]["v"] - volume = 0 - n_trades = 0 - taker_buy_base_volume = 0 - taker_buy_quote_volume = 0 - if len(self._candles) == 0: - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - safe_ensure_future(self.fill_historical_candles()) - elif timestamp > int(self._candles[-1][0]): - # TODO: validate also that the diff of timestamp == interval (issue with 1M interval). - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - elif timestamp == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) + return new_hb_candles + + def ws_subscription_payload(self): + payload = {"op": CONSTANTS.SUB_ENDPOINT_NAME, + "ch": f"bar:{CONSTANTS.INTERVALS[self.interval]}:{self._ex_trading_pair}"} + return payload + + def _parse_websocket_message(self, data: dict): + if data.get("m") == "ping": + pong_payloads = {"op": "pong"} + return WSJSONRequest(payload=pong_payloads) + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("m") == "bar": + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(data["data"]["ts"]) + candles_row_dict["open"] = data["data"]["o"] + candles_row_dict["low"] = data["data"]["l"] + candles_row_dict["high"] = data["data"]["h"] + candles_row_dict["close"] = data["data"]["c"] + candles_row_dict["volume"] = 0 + candles_row_dict["quote_asset_volume"] = data["data"]["v"] + candles_row_dict["n_trades"] = 0 + candles_row_dict["taker_buy_base_volume"] = 0 + candles_row_dict["taker_buy_quote_volume"] = 0 + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/constants.py index 2fdbbe5b64..960f517432 100644 --- a/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/constants.py +++ b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/constants.py @@ -25,7 +25,7 @@ "1w": "1w", "1M": "1m" }) - +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 500 ALL_ENDPOINTS_LIMIT = "All" RATE_LIMITS = [ diff --git a/hummingbot/data_feed/candles_feed/binance_perpetual_candles/binance_perpetual_candles.py b/hummingbot/data_feed/candles_feed/binance_perpetual_candles/binance_perpetual_candles.py index 451919f5de..4b89eb1b76 100644 --- a/hummingbot/data_feed/candles_feed/binance_perpetual_candles/binance_perpetual_candles.py +++ b/hummingbot/data_feed/candles_feed/binance_perpetual_candles/binance_perpetual_candles.py @@ -1,12 +1,7 @@ -import asyncio import logging -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional -import numpy as np - -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.data_feed.candles_feed.binance_perpetual_candles import constants as CONSTANTS from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.logger import HummingbotLogger @@ -44,6 +39,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -61,101 +64,51 @@ async def check_network(self) -> NetworkStatus: def get_exchange_trading_pair(self, trading_pair): return trading_pair.replace("-", "") - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 500): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"symbol": self._ex_trading_pair, "interval": self.interval, "limit": limit} - if start_time: - params["startTime"] = start_time - if end_time: - params["endTime"] = end_time - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) - - return np.array(candles)[:, [0, 1, 2, 3, 4, 5, 7, 8, 9, 10]].astype(float) - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // 1000) + 1 - requests_executed = 0 - while not self.ready: - missing_records = self._candles.maxlen - len(self._candles) - end_timestamp = int(self._candles[0][0]) - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - candles = await self.fetch_candles(end_time=end_timestamp, limit=min(1000, missing_records + 1)) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1][::-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def _subscribe_channels(self, ws: WSAssistant): + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange + For API documentation, please refer to: + https://binance-docs.github.io/apidocs/futures/en/#kline-candlestick-data """ - try: - candle_params = [] - candle_params.append(f"{self._ex_trading_pair.lower()}@kline_{self.interval}") - payload = { - "method": "SUBSCRIBE", - "params": candle_params, - "id": 1 - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - if data is not None and data.get("e") == "kline": # data will be None when the websocket is disconnected - timestamp = data["k"]["t"] - open = data["k"]["o"] - low = data["k"]["l"] - high = data["k"]["h"] - close = data["k"]["c"] - volume = data["k"]["v"] - quote_asset_volume = data["k"]["q"] - n_trades = data["k"]["n"] - taker_buy_base_volume = data["k"]["V"] - taker_buy_quote_volume = data["k"]["Q"] - if len(self._candles) == 0: - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - safe_ensure_future(self.fill_historical_candles()) - elif timestamp > int(self._candles[-1][0]): - # TODO: validate also that the diff of timestamp == interval (issue with 1M interval). - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - elif timestamp == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) + params = { + "symbol": self._ex_trading_pair, + "interval": self.interval, + "limit": limit + } + if start_time: + params["startTime"] = start_time * 1000 + if end_time: + params["endTime"] = end_time * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + return [ + [self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[5], row[7], + row[8], row[9], row[10]] + for row in data if self.ensure_timestamp_in_seconds(row[0]) < end_time] + + def ws_subscription_payload(self): + candle_params = [f"{self._ex_trading_pair.lower()}@kline_{self.interval}"] + payload = { + "method": "SUBSCRIBE", + "params": candle_params, + "id": 1 + } + return payload + + def _parse_websocket_message(self, data): + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("e") == "kline": # data will be None when the websocket is disconnected + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(data["k"]["t"]) + candles_row_dict["open"] = data["k"]["o"] + candles_row_dict["low"] = data["k"]["l"] + candles_row_dict["high"] = data["k"]["h"] + candles_row_dict["close"] = data["k"]["c"] + candles_row_dict["volume"] = data["k"]["v"] + candles_row_dict["quote_asset_volume"] = data["k"]["q"] + candles_row_dict["n_trades"] = data["k"]["n"] + candles_row_dict["taker_buy_base_volume"] = data["k"]["V"] + candles_row_dict["taker_buy_quote_volume"] = data["k"]["Q"] + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/binance_perpetual_candles/constants.py b/hummingbot/data_feed/candles_feed/binance_perpetual_candles/constants.py index 3d7132399f..54ec1fe376 100644 --- a/hummingbot/data_feed/candles_feed/binance_perpetual_candles/constants.py +++ b/hummingbot/data_feed/candles_feed/binance_perpetual_candles/constants.py @@ -25,7 +25,7 @@ "1w": 604800, "1M": 2592000 }) - +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 1500 REQUEST_WEIGHT = "REQUEST_WEIGHT" RATE_LIMITS = [ diff --git a/hummingbot/data_feed/candles_feed/binance_spot_candles/binance_spot_candles.py b/hummingbot/data_feed/candles_feed/binance_spot_candles/binance_spot_candles.py index e8d41b53de..bef4583a51 100644 --- a/hummingbot/data_feed/candles_feed/binance_spot_candles/binance_spot_candles.py +++ b/hummingbot/data_feed/candles_feed/binance_spot_candles/binance_spot_candles.py @@ -1,12 +1,7 @@ -import asyncio import logging -from typing import Any, Dict, Optional +from typing import List, Optional -import numpy as np - -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.data_feed.candles_feed.binance_spot_candles import constants as CONSTANTS from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.logger import HummingbotLogger @@ -44,6 +39,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -61,101 +64,45 @@ async def check_network(self) -> NetworkStatus: def get_exchange_trading_pair(self, trading_pair): return trading_pair.replace("-", "") - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 500): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"symbol": self._ex_trading_pair, "interval": self.interval, "limit": limit} - if start_time: - params["startTime"] = start_time + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + params = { + "symbol": self._ex_trading_pair, + "interval": self.interval, + "limit": limit + } if end_time: - params["endTime"] = end_time - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) - - return np.array(candles)[:, [0, 1, 2, 3, 4, 5, 7, 8, 9, 10]].astype(float) - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // 1000) + 1 - requests_executed = 0 - while not self.ready: - missing_records = self._candles.maxlen - len(self._candles) - end_timestamp = int(self._candles[0][0]) - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - candles = await self.fetch_candles(end_time=end_timestamp, limit=missing_records + 1) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1][::-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def _subscribe_channels(self, ws: WSAssistant): - """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange - """ - try: - candle_params = [] - candle_params.append(f"{self._ex_trading_pair.lower()}@kline_{self.interval}") - payload = { - "method": "SUBSCRIBE", - "params": candle_params, - "id": 1 - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - if data is not None and data.get("e") == "kline": # data will be None when the websocket is disconnected - timestamp = data["k"]["t"] - open = data["k"]["o"] - high = data["k"]["h"] - low = data["k"]["l"] - close = data["k"]["c"] - volume = data["k"]["v"] - quote_asset_volume = data["k"]["q"] - n_trades = data["k"]["n"] - taker_buy_base_volume = data["k"]["V"] - taker_buy_quote_volume = data["k"]["Q"] - if len(self._candles) == 0: - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - safe_ensure_future(self.fill_historical_candles()) - elif timestamp > int(self._candles[-1][0]): - # TODO: validate also that the diff of timestamp == interval (issue with 1M interval). - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - elif timestamp == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) + params["endTime"] = end_time * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + return [ + [self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[5], + row[7], row[8], row[9], row[10]] + for row in data if self.ensure_timestamp_in_seconds(row[0]) < end_time] + + def ws_subscription_payload(self): + candle_params = [f"{self._ex_trading_pair.lower()}@kline_{self.interval}"] + payload = { + "method": "SUBSCRIBE", + "params": candle_params, + "id": 1 + } + return payload + + def _parse_websocket_message(self, data: dict): + candles_row_dict = {} + if data is not None and data.get("e") == "kline": # data will be None when the websocket is disconnected + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(data["k"]["t"]) + candles_row_dict["open"] = data["k"]["o"] + candles_row_dict["high"] = data["k"]["h"] + candles_row_dict["low"] = data["k"]["l"] + candles_row_dict["close"] = data["k"]["c"] + candles_row_dict["volume"] = data["k"]["v"] + candles_row_dict["quote_asset_volume"] = data["k"]["q"] + candles_row_dict["n_trades"] = data["k"]["n"] + candles_row_dict["taker_buy_base_volume"] = data["k"]["V"] + candles_row_dict["taker_buy_quote_volume"] = data["k"]["Q"] + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/binance_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/binance_spot_candles/constants.py index ac3bb6650d..659ea0c35e 100644 --- a/hummingbot/data_feed/candles_feed/binance_spot_candles/constants.py +++ b/hummingbot/data_feed/candles_feed/binance_spot_candles/constants.py @@ -26,7 +26,7 @@ "1w": "1w", "1M": "1M" }) - +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 1000 REQUEST_WEIGHT = "REQUEST_WEIGHT" RATE_LIMITS = [ diff --git a/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/__init__.py b/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/__init__.py new file mode 100644 index 0000000000..758dea35a6 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/__init__.py @@ -0,0 +1,3 @@ +from hummingbot.data_feed.candles_feed.bybit_perpetual_candles.bybit_perpetual_candles import BybitPerpetualCandles + +__all__ = ["BybitPerpetualCandles"] diff --git a/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/bybit_perpetual_candles.py b/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/bybit_perpetual_candles.py new file mode 100644 index 0000000000..63c35e5f73 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/bybit_perpetual_candles.py @@ -0,0 +1,120 @@ +import logging +from typing import Any, Dict, List, Optional + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.data_feed.candles_feed.bybit_perpetual_candles import constants as CONSTANTS +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.logger import HummingbotLogger + + +class BybitPerpetualCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): + super().__init__(trading_pair, interval, max_records) + + @property + def name(self): + return f"bybit_perpetual_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return CONSTANTS.WSS_URL + + @property + def health_check_url(self): + return self.rest_url + CONSTANTS.HEALTH_CHECK_ENDPOINT + + @property + def candles_url(self): + return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + await rest_assistant.execute_request(url=self.health_check_url, + throttler_limit_id=CONSTANTS.HEALTH_CHECK_ENDPOINT) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return trading_pair.replace("-", "") + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://bybit-exchange.github.io/docs/v5/market/kline + + startTime and endTime must be used at the same time. + """ + params = { + "category": "linear", + "symbol": self._ex_trading_pair, + "interval": CONSTANTS.INTERVALS[self.interval], + "limit": limit + } + if start_time: + params["startTime"] = start_time * 1000 + if end_time: + params["endTime"] = end_time * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + if data is not None and data.get("result") is not None: + candles = data["result"].get("list") + if candles is not None: + return [[self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[5], + 0., 0., 0., 0.] for row in candles if self.ensure_timestamp_in_seconds(row[0]) < end_time][::-1] + + def ws_subscription_payload(self): + interval = CONSTANTS.INTERVALS[self.interval] + trading_pair = self.get_exchange_trading_pair(self._ex_trading_pair) + candle_params = [f"kline.{interval}.{trading_pair}"] + payload = { + "op": "subscribe", + "args": candle_params, + } + return payload + + def _parse_websocket_message(self, data): + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("data") is not None: + candle = data["data"][0] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candle["start"]) + candles_row_dict["open"] = candle["open"] + candles_row_dict["low"] = candle["low"] + candles_row_dict["high"] = candle["high"] + candles_row_dict["close"] = candle["close"] + candles_row_dict["volume"] = candle["volume"] + candles_row_dict["quote_asset_volume"] = 0. + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/constants.py b/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/constants.py new file mode 100644 index 0000000000..bcee48c955 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/constants.py @@ -0,0 +1,31 @@ +from bidict import bidict + +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://api.bybit.com" +HEALTH_CHECK_ENDPOINT = "/v5/market/time" +CANDLES_ENDPOINT = "/v5/market/kline" + +WSS_URL = "wss://stream.bybit.com/v5/public/linear" + +INTERVALS = bidict({ + "1m": 1, + "3m": 3, + "5m": 5, + "15m": 15, + "30m": 30, + "1h": 60, + "2h": 120, + "4h": 240, + "6h": 360, + "12h": 720, + "1d": "D", + "1w": "W", + "1M": "M" +}) + +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 1000 + +RATE_LIMITS = [ + RateLimit(CANDLES_ENDPOINT, limit=20000, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)]), + RateLimit(HEALTH_CHECK_ENDPOINT, limit=20000, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)])] diff --git a/hummingbot/data_feed/candles_feed/bybit_spot_candles/__init__.py b/hummingbot/data_feed/candles_feed/bybit_spot_candles/__init__.py new file mode 100644 index 0000000000..c2416d81ae --- /dev/null +++ b/hummingbot/data_feed/candles_feed/bybit_spot_candles/__init__.py @@ -0,0 +1,3 @@ +from hummingbot.data_feed.candles_feed.bybit_spot_candles.bybit_spot_candles import BybitSpotCandles + +__all__ = ["BybitSpotCandles"] diff --git a/hummingbot/data_feed/candles_feed/bybit_spot_candles/bybit_spot_candles.py b/hummingbot/data_feed/candles_feed/bybit_spot_candles/bybit_spot_candles.py new file mode 100644 index 0000000000..1576420fac --- /dev/null +++ b/hummingbot/data_feed/candles_feed/bybit_spot_candles/bybit_spot_candles.py @@ -0,0 +1,121 @@ +import logging +from typing import Any, Dict, List, Optional + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.data_feed.candles_feed.bybit_spot_candles import constants as CONSTANTS +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.logger import HummingbotLogger + + +class BybitSpotCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): + super().__init__(trading_pair, interval, max_records) + + @property + def name(self): + return f"bybit_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return CONSTANTS.WSS_URL + + @property + def health_check_url(self): + return self.rest_url + CONSTANTS.HEALTH_CHECK_ENDPOINT + + @property + def candles_url(self): + return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + await rest_assistant.execute_request(url=self.health_check_url, + throttler_limit_id=CONSTANTS.HEALTH_CHECK_ENDPOINT) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return trading_pair.replace("-", "") + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://bybit-exchange.github.io/docs/v5/market/kline + + startTime and endTime must be used at the same time. + """ + params = { + "category": "spot", + "symbol": self._ex_trading_pair, + "interval": CONSTANTS.INTERVALS[self.interval], + "limit": limit + } + if start_time is not None or end_time is not None: + params["startTime"] = start_time if start_time is not None else end_time - limit * self.interval_in_seconds + params["startTime"] = params["startTime"] * 1000 + params["endTime"] = end_time if end_time is not None else start_time + limit * self.interval_in_seconds + params["endTime"] = params["endTime"] * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + if data is not None and data.get("result") is not None: + candles = data["result"].get("list") + if candles is not None: + return [[self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[5], + 0., 0., 0., 0.] for row in candles if self.ensure_timestamp_in_seconds(row[0]) < end_time][::-1] + + def ws_subscription_payload(self): + interval = CONSTANTS.INTERVALS[self.interval] + trading_pair = self.get_exchange_trading_pair(self._trading_pair) + candle_params = [f"kline.{interval}.{trading_pair}"] + payload = { + "op": "subscribe", + "args": candle_params, + } + return payload + + def _parse_websocket_message(self, data): + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("data") is not None: + candle = data["data"][0] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candle["start"]) + candles_row_dict["open"] = candle["open"] + candles_row_dict["low"] = candle["low"] + candles_row_dict["high"] = candle["high"] + candles_row_dict["close"] = candle["close"] + candles_row_dict["volume"] = candle["volume"] + candles_row_dict["quote_asset_volume"] = 0. + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/bybit_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/bybit_spot_candles/constants.py new file mode 100644 index 0000000000..0928547575 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/bybit_spot_candles/constants.py @@ -0,0 +1,31 @@ +from bidict import bidict + +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://api.bybit.com" +HEALTH_CHECK_ENDPOINT = "/v5/market/time" +CANDLES_ENDPOINT = "/v5/market/kline" + +WSS_URL = "wss://stream.bybit.com/v5/public/spot" + +INTERVALS = bidict({ + "1m": 1, + "3m": 3, + "5m": 5, + "15m": 15, + "30m": 30, + "1h": 60, + "2h": 120, + "4h": 240, + "6h": 360, + "12h": 720, + "1d": "D", + "1w": "W", + "1M": "M" +}) + +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 1000 + +RATE_LIMITS = [ + RateLimit(CANDLES_ENDPOINT, limit=20000, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)]), + RateLimit(HEALTH_CHECK_ENDPOINT, limit=20000, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)])] diff --git a/hummingbot/data_feed/candles_feed/candles_base.py b/hummingbot/data_feed/candles_feed/candles_base.py index 969c0bd6f0..bd2591e1f9 100644 --- a/hummingbot/data_feed/candles_feed/candles_base.py +++ b/hummingbot/data_feed/candles_feed/candles_base.py @@ -1,7 +1,7 @@ import asyncio import os from collections import deque -from typing import Optional +from typing import List, Optional import numpy as np import pandas as pd @@ -11,6 +11,7 @@ from hummingbot.core.network_base import NetworkBase from hummingbot.core.network_iterator import NetworkStatus from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory from hummingbot.core.web_assistant.ws_assistant import WSAssistant from hummingbot.data_feed.candles_feed.data_types import HistoricalCandlesConfig @@ -53,6 +54,8 @@ def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 1 self._listen_candles_task: Optional[asyncio.Task] = None self._trading_pair = trading_pair self._ex_trading_pair = self.get_exchange_trading_pair(trading_pair) + self._ws_candle_available = asyncio.Event() + self._ping_timeout = None if interval in self.intervals.keys(): self.interval = interval else: @@ -65,6 +68,7 @@ async def start_network(self): This method starts the network and starts a task for listen_for_subscriptions. """ await self.stop_network() + await self.initialize_exchange_data() self._listen_candles_task = safe_ensure_future(self.listen_for_subscriptions()) async def stop_network(self): @@ -75,6 +79,14 @@ async def stop_network(self): self._listen_candles_task.cancel() self._listen_candles_task = None + async def initialize_exchange_data(self): + """ + This method is used to set up the exchange data before starting the network. + + (I.E. get the trading pair quanto multiplier, special trading pair or symbol notation, etc.) + """ + pass + @property def ready(self): """ @@ -98,6 +110,14 @@ def health_check_url(self): def candles_url(self): raise NotImplementedError + @property + def candles_endpoint(self): + raise NotImplementedError + + @property + def candles_max_result_per_rest_request(self): + raise NotImplementedError + @property def wss_url(self): raise NotImplementedError @@ -113,6 +133,10 @@ def intervals(self): async def check_network(self) -> NetworkStatus: raise NotImplementedError + @property + def interval_in_seconds(self): + return self.get_seconds_from_interval(self.interval) + @property def candles_df(self) -> pd.DataFrame: """ @@ -138,42 +162,131 @@ def load_candles_from_csv(self, data_path: str): async def get_historical_candles(self, config: HistoricalCandlesConfig): try: + await self.initialize_exchange_data() all_candles = [] - current_start_time = config.start_time - while current_start_time <= config.end_time: - fetched_candles = await self.fetch_candles(start_time=current_start_time) + current_end_time = config.end_time + self.interval_in_seconds + current_start_time = config.start_time - self.interval_in_seconds + while current_end_time >= current_start_time: + missing_records = int((current_end_time - current_start_time) / self.interval_in_seconds) + fetched_candles = await self.fetch_candles(end_time=current_end_time, limit=missing_records) if fetched_candles.size <= 1: break all_candles.append(fetched_candles) - last_timestamp = fetched_candles[-1][0] # Assuming the first column is the timestamp - current_start_time = int(last_timestamp) - - final_candles = np.concatenate(all_candles, axis=0) if all_candles else np.array([]) + last_timestamp = self.ensure_timestamp_in_seconds( + fetched_candles[0][0]) # Assuming the first column is the timestamp + current_end_time = last_timestamp - self.interval_in_seconds + self.check_candles_sorted_and_equidistant(all_candles) + final_candles = np.concatenate(all_candles[::-1], axis=0) if all_candles else np.array([]) candles_df = pd.DataFrame(final_candles, columns=self.columns) candles_df.drop_duplicates(subset=["timestamp"], inplace=True) + candles_df = candles_df[ + (candles_df["timestamp"] <= config.end_time) & (candles_df["timestamp"] >= config.start_time)] return candles_df except Exception as e: self.logger().exception(f"Error fetching historical candles: {str(e)}") + def check_candles_sorted_and_equidistant(self, candles: np.ndarray): + """ + This method checks if the given candles are sorted by timestamp in ascending order and equidistant. + :param candles: numpy array with the candles + """ + timestamps = [candle[0] for candle in candles] + if len(self._candles) <= 1: + return + if not np.all(np.diff(timestamps) >= 0): + self.logger().warning("Candles are not sorted by timestamp in ascending order.") + self._reset_candles() + return + timestamp_steps = np.unique(np.diff(timestamps)) + interval_in_seconds = self.get_seconds_from_interval(self.interval) + if not np.all(timestamp_steps == interval_in_seconds): + self.logger().warning("Candles are malformed. Restarting...") + self._reset_candles() + return + + def _reset_candles(self): + self._ws_candle_available.clear() + self._candles.clear() + async def fetch_candles(self, start_time: Optional[int] = None, end_time: Optional[int] = None, - limit: Optional[int] = 500): + limit: Optional[int] = None): + if start_time is None and end_time is None: + raise ValueError("Either the start time or end time must be specified.") + if limit is None: + limit = self.candles_max_result_per_rest_request - 1 + + candles_to_fetch = min(self.candles_max_result_per_rest_request - 1, limit) + if end_time is None: + end_time = start_time + self.interval_in_seconds * candles_to_fetch + if start_time is None: + start_time = end_time - self.interval_in_seconds * candles_to_fetch + + params = self._get_rest_candles_params(start_time, end_time) + headers = self._get_rest_candles_headers() + rest_assistant = await self._api_factory.get_rest_assistant() + candles = await rest_assistant.execute_request(url=self.candles_url, + throttler_limit_id=self.candles_endpoint, + params=params, + headers=headers) + arr = self._parse_rest_candles(candles, end_time) + return np.array(arr).astype(float) + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = None) -> dict: """ - This is an abstract method that must be implemented by a subclass to fetch candles from the exchange API. - :param start_time: start time to fetch candles - :param end_time: end time to fetch candles - :param limit: quantity of candles - :return: numpy array with the candlesticks + This method returns the parameters for the candles REST request. + + :param start_time: the start time of the candles data to fetch + :param end_time: the end time of the candles data to fetch """ raise NotImplementedError - async def fill_historical_candles(self): + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: """ - This is an abstract method that must be implemented by a subclass to fill the _candles deque with historical candles. + This method parses the candles data fetched from the REST API. + + - Timestamp must be in seconds + - The array must be sorted by timestamp in ascending order. Oldest first, newest last. + - The array must be in the format: [timestamp, open, high, low, close, volume, quote_asset_volume, n_trades, + taker_buy_base_volume, taker_buy_quote_volume] + + :param data: the candles data fetched from the REST API """ raise NotImplementedError + def _get_rest_candles_headers(self): + """ + This method returns the headers for the candles REST request. + """ + pass + + async def fill_historical_candles(self): + """ + This method fills the historical candles in the _candles deque until it reaches the maximum length. + """ + while not self.ready: + await self._ws_candle_available.wait() + try: + end_timestamp = int(self._candles[0][0]) + missing_records = self._candles.maxlen - len(self._candles) + candles: np.ndarray = await self.fetch_candles(end_time=end_timestamp, limit=missing_records) + records_to_add = min(missing_records, len(candles)) + self._candles.extendleft(candles[-records_to_add:][::-1]) + except asyncio.CancelledError: + raise + except ValueError: + raise + except Exception: + self.logger().exception( + "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", + ) + await self._sleep(1.0) + self.check_candles_sorted_and_equidistant(self._candles) + async def listen_for_subscriptions(self): """ Connects to the candlestick websocket endpoint and listens to the messages sent by the @@ -199,21 +312,102 @@ async def listen_for_subscriptions(self): async def _connected_websocket_assistant(self) -> WSAssistant: ws: WSAssistant = await self._api_factory.get_ws_assistant() - await ws.connect(ws_url=self.wss_url, - ping_timeout=30) + await ws.connect(ws_url=self.wss_url, ping_timeout=self._ping_timeout) return ws + @property + def _ping_payload(self): + return None + async def _subscribe_channels(self, ws: WSAssistant): """ Subscribes to the candles events through the provided websocket connection. :param ws: the websocket assistant used to connect to the exchange """ + try: + subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=self.ws_subscription_payload()) + await ws.send(subscribe_candles_request) + self.logger().info("Subscribed to public klines...") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred subscribing to public klines...", + exc_info=True + ) + raise + + def ws_subscription_payload(self): + """ + This method returns the subscription payload for the websocket connection. + """ raise NotImplementedError + async def _process_websocket_messages_task(self, websocket_assistant: WSAssistant): + # TODO: Isolate ping pong logic + async for ws_response in websocket_assistant.iter_messages(): + data = ws_response.data + parsed_message = self._parse_websocket_message(data) + # parsed messages may be ping or pong messages + if isinstance(parsed_message, WSJSONRequest): + await websocket_assistant.send(request=parsed_message) + elif isinstance(parsed_message, dict): + candles_row = np.array([parsed_message["timestamp"], + parsed_message["open"], + parsed_message["high"], + parsed_message["low"], + parsed_message["close"], + parsed_message["volume"], + parsed_message["quote_asset_volume"], + parsed_message["n_trades"], + parsed_message["taker_buy_base_volume"], + parsed_message["taker_buy_quote_volume"]]).astype(float) + if len(self._candles) == 0: + self._candles.append(candles_row) + self._ws_candle_available.set() + safe_ensure_future(self.fill_historical_candles()) + else: + latest_timestamp = int(self._candles[-1][0]) + current_timestamp = int(parsed_message["timestamp"]) + if current_timestamp > latest_timestamp: + self._candles.append(candles_row) + elif current_timestamp == latest_timestamp: + self._candles[-1] = candles_row + async def _process_websocket_messages(self, websocket_assistant: WSAssistant): + while True: + try: + await asyncio.wait_for(self._process_websocket_messages_task(websocket_assistant=websocket_assistant), + timeout=self._ping_timeout) + except asyncio.TimeoutError: + if self._ping_timeout is not None: + ping_request = WSJSONRequest(payload=self._ping_payload) + await websocket_assistant.send(request=ping_request) + + def _parse_websocket_message(self, data: dict): + """ + This method must be implemented by a subclass to parse the websocket message into a dictionary with the + candlestick data. + + The extracted data is stored in a dict with the following keys: + - timestamp: The timestamp of the candlestick in seconds. + - open: The opening price of the candlestick. + - high: The highest price of the candlestick. + - low: The lowest price of the candlestick. + - close: The closing price of the candlestick. + - volume: The volume of the candlestick. + - quote_asset_volume: The quote asset volume of the candlestick. + - n_trades: The number of trades of the candlestick. + - taker_buy_base_volume: The taker buy base volume of the candlestick. + - taker_buy_quote_volume: The taker buy quote volume of the candlestick. + + :param data: the websocket message data + :return: dictionary with the candlestick data + """ raise NotImplementedError - async def _sleep(self, delay): + @staticmethod + async def _sleep(delay): """ Function added only to facilitate patching the sleep in unit tests without affecting the asyncio module """ @@ -230,3 +424,30 @@ def get_seconds_from_interval(self, interval: str) -> int: :return: number of seconds """ return self.interval_to_seconds[interval] + + @staticmethod + def ensure_timestamp_in_seconds(timestamp: float) -> float: + """ + Ensure the given timestamp is in seconds. + + Args: + - timestamp (int): The input timestamp which could be in seconds, milliseconds, or microseconds. + + Returns: + - int: The timestamp in seconds. + + Raises: + - ValueError: If the timestamp is not in a recognized format. + """ + timestamp_int = int(float(timestamp)) + if timestamp_int >= 1e18: # Nanoseconds + return timestamp_int / 1e9 + elif timestamp_int >= 1e15: # Microseconds + return timestamp_int / 1e6 + elif timestamp_int >= 1e12: # Milliseconds + return timestamp_int / 1e3 + elif timestamp_int >= 1e9: # Seconds + return timestamp_int + else: + raise ValueError( + "Timestamp is not in a recognized format. Must be in seconds, milliseconds, microseconds or nanoseconds.") diff --git a/hummingbot/data_feed/candles_feed/candles_factory.py b/hummingbot/data_feed/candles_feed/candles_factory.py index 492218de3c..f8d61ee3ab 100644 --- a/hummingbot/data_feed/candles_feed/candles_factory.py +++ b/hummingbot/data_feed/candles_feed/candles_factory.py @@ -3,12 +3,21 @@ from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.ascend_ex_spot_candles import AscendExSpotCandles from hummingbot.data_feed.candles_feed.binance_perpetual_candles import BinancePerpetualCandles from hummingbot.data_feed.candles_feed.binance_spot_candles import BinanceSpotCandles +from hummingbot.data_feed.candles_feed.bybit_perpetual_candles.bybit_perpetual_candles import BybitPerpetualCandles +from hummingbot.data_feed.candles_feed.bybit_spot_candles.bybit_spot_candles import BybitSpotCandles from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.data_feed.candles_feed.gate_io_perpetual_candles import GateioPerpetualCandles from hummingbot.data_feed.candles_feed.gate_io_spot_candles import GateioSpotCandles +from hummingbot.data_feed.candles_feed.hyperliquid_perpetual_candles.hyperliquid_perpetual_candles import ( + HyperliquidPerpetualCandles, +) +from hummingbot.data_feed.candles_feed.hyperliquid_spot_candles.hyperliquid_spot_candles import HyperliquidSpotCandles from hummingbot.data_feed.candles_feed.kraken_spot_candles.kraken_spot_candles import KrakenSpotCandles +from hummingbot.data_feed.candles_feed.kucoin_perpetual_candles.kucoin_perpetual_candles import KucoinPerpetualCandles from hummingbot.data_feed.candles_feed.kucoin_spot_candles.kucoin_spot_candles import KucoinSpotCandles +from hummingbot.data_feed.candles_feed.mexc_perpetual_candles.mexc_perpetual_candles import MexcPerpetualCandles +from hummingbot.data_feed.candles_feed.mexc_spot_candles.mexc_spot_candles import MexcSpotCandles from hummingbot.data_feed.candles_feed.okx_perpetual_candles.okx_perpetual_candles import OKXPerpetualCandles from hummingbot.data_feed.candles_feed.okx_spot_candles.okx_spot_candles import OKXSpotCandles @@ -33,10 +42,17 @@ class CandlesFactory: "gate_io": GateioSpotCandles, "gate_io_perpetual": GateioPerpetualCandles, "kucoin": KucoinSpotCandles, + "kucoin_perpetual": KucoinPerpetualCandles, "ascend_ex": AscendExSpotCandles, "okx_perpetual": OKXPerpetualCandles, "okx": OKXSpotCandles, - "kraken": KrakenSpotCandles + "kraken": KrakenSpotCandles, + "mexc": MexcSpotCandles, + "mexc_perpetual": MexcPerpetualCandles, + "bybit": BybitSpotCandles, + "bybit_perpetual": BybitPerpetualCandles, + "hyperliquid": HyperliquidSpotCandles, + "hyperliquid_perpetual": HyperliquidPerpetualCandles } @classmethod diff --git a/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/constants.py b/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/constants.py index ab8e935bd3..14f1afd2c0 100644 --- a/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/constants.py +++ b/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/constants.py @@ -22,6 +22,7 @@ "1d": "1d", "7d": "7d", }) +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 2000 PUBLIC_URL_POINTS_LIMIT_ID = "PublicPoints" RATE_LIMITS = [ diff --git a/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/gate_io_perpetual_candles.py b/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/gate_io_perpetual_candles.py index 4ddf6bf851..56cbd4060b 100644 --- a/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/gate_io_perpetual_candles.py +++ b/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/gate_io_perpetual_candles.py @@ -1,13 +1,8 @@ -import asyncio import logging import time -from typing import Any, Dict, Optional +from typing import List, Optional -import numpy as np - -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.data_feed.candles_feed.gate_io_perpetual_candles import constants as CONSTANTS from hummingbot.logger import HummingbotLogger @@ -46,6 +41,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -54,13 +57,8 @@ def rate_limits(self): def intervals(self): return CONSTANTS.INTERVALS - async def start_network(self): - """ - This method starts the network and starts a task for listen_for_subscriptions. - """ - await self.stop_network() + async def initialize_exchange_data(self): await self.get_exchange_trading_pair_quanto_multiplier() - self._listen_candles_task = safe_ensure_future(self.listen_for_subscriptions()) async def check_network(self) -> NetworkStatus: rest_assistant = await self._api_factory.get_rest_assistant() @@ -81,117 +79,60 @@ async def get_exchange_trading_pair_quanto_multiplier(self): self.quanto_multiplier = quanto_multiplier return quanto_multiplier - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 500): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"contract": self._ex_trading_pair, "interval": self.interval, "limit": limit} - - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://www.gate.io/docs/developers/apiv4/#get-futures-candlesticks + """ + return { + "contract": self._ex_trading_pair, + "interval": self.interval, + "from": start_time, + "to": end_time + } + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: new_hb_candles = [] - for i in candles: - timestamp_ms = i.get("t") * 1e3 + for i in data: + timestamp = i.get("t") + if timestamp == end_time: + continue open = i.get("o") high = i.get("h") low = i.get("l") close = i.get("c") volume = i.get("v") * self.quanto_multiplier quote_asset_volume = i.get("sum") - # no data field n_trades = 0 taker_buy_base_volume = 0 taker_buy_quote_volume = 0 - new_hb_candles.append([timestamp_ms, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume]) - return np.array(new_hb_candles).astype(float) - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // 1000) + 1 - requests_executed = 0 - while not self.ready: - missing_records = self._candles.maxlen - len(self._candles) - end_timestamp = int(int(self._candles[0][0]) * 1e-3) - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - candles = await self.fetch_candles(end_time=end_timestamp, limit=missing_records + 1) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1][::-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def _subscribe_channels(self, ws: WSAssistant): - """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange - """ - try: - payload = { - "time": int(time.time()), - "channel": CONSTANTS.WS_CANDLES_ENDPOINT, - "event": "subscribe", - "payload": [self.interval, self._ex_trading_pair] - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - if self.interval == '1m': - self.logger().warning("The 1m K-line on gateioperpetual is currently not accurate due to discrepancies between the official ws and rs data...") - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - - if data.get("event") == "update" and data.get("channel") == "futures.candlesticks": - for i in data["result"]: - timestamp_ms = int(i["t"] * 1e3) - open = i["o"] - high = i["h"] - low = i["l"] - close = i["c"] - volume = i["v"] * self.quanto_multiplier - # no data field - quote_asset_volume = 0 - n_trades = 0 - taker_buy_base_volume = 0 - taker_buy_quote_volume = 0 - if len(self._candles) == 0: - self._candles.append(np.array([timestamp_ms, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - safe_ensure_future(self.fill_historical_candles()) - elif timestamp_ms > int(self._candles[-1][0]): - # TODO: validate also that the diff of timestamp == interval (issue with 1w, 30d interval). - self._candles.append(np.array([timestamp_ms, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - elif timestamp_ms == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(np.array([timestamp_ms, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) + new_hb_candles.append([self.ensure_timestamp_in_seconds(timestamp), open, high, low, close, volume, + quote_asset_volume, n_trades, taker_buy_base_volume, taker_buy_quote_volume]) + return new_hb_candles + + def ws_subscription_payload(self): + return { + "time": int(time.time()), + "channel": CONSTANTS.WS_CANDLES_ENDPOINT, + "event": "subscribe", + "payload": [self.interval, self._ex_trading_pair] + } + + def _parse_websocket_message(self, data: dict): + candles_row_dict = {} + if data.get("event") == "update" and data.get("channel") == "futures.candlesticks": + for i in data["result"]: + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(i["t"]) + candles_row_dict["open"] = i["o"] + candles_row_dict["high"] = i["h"] + candles_row_dict["low"] = i["l"] + candles_row_dict["close"] = i["c"] + candles_row_dict["volume"] = i["v"] * self.quanto_multiplier + candles_row_dict["quote_asset_volume"] = i.get("sum", 0) + candles_row_dict["n_trades"] = 0 + candles_row_dict["taker_buy_base_volume"] = 0 + candles_row_dict["taker_buy_quote_volume"] = 0 + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/gate_io_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/gate_io_spot_candles/constants.py index a6a936aa54..2881dbced9 100644 --- a/hummingbot/data_feed/candles_feed/gate_io_spot_candles/constants.py +++ b/hummingbot/data_feed/candles_feed/gate_io_spot_candles/constants.py @@ -23,12 +23,15 @@ "7d": "7d", "30d": "30d", }) +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 1000 +MAX_CANDLES_AGO = 10_000 PUBLIC_URL_POINTS_LIMIT_ID = "PublicPoints" +PUBLIC_ENDPOINT_LIMIT = 200 RATE_LIMITS = [ - RateLimit(limit_id=PUBLIC_URL_POINTS_LIMIT_ID, limit=900, time_interval=1), - RateLimit(limit_id=HEALTH_CHECK_ENDPOINT, limit=900, time_interval=1, + RateLimit(limit_id=PUBLIC_URL_POINTS_LIMIT_ID, limit=PUBLIC_ENDPOINT_LIMIT, time_interval=2), + RateLimit(limit_id=HEALTH_CHECK_ENDPOINT, limit=PUBLIC_ENDPOINT_LIMIT, time_interval=2, linked_limits=[LinkedLimitWeightPair(PUBLIC_URL_POINTS_LIMIT_ID)]), - RateLimit(limit_id=CANDLES_ENDPOINT, limit=900, time_interval=1, + RateLimit(limit_id=CANDLES_ENDPOINT, limit=PUBLIC_ENDPOINT_LIMIT, time_interval=2, linked_limits=[LinkedLimitWeightPair(PUBLIC_URL_POINTS_LIMIT_ID)]), ] diff --git a/hummingbot/data_feed/candles_feed/gate_io_spot_candles/gate_io_spot_candles.py b/hummingbot/data_feed/candles_feed/gate_io_spot_candles/gate_io_spot_candles.py index 2a73eb5905..ad0a709edc 100644 --- a/hummingbot/data_feed/candles_feed/gate_io_spot_candles/gate_io_spot_candles.py +++ b/hummingbot/data_feed/candles_feed/gate_io_spot_candles/gate_io_spot_candles.py @@ -1,13 +1,8 @@ -import asyncio import logging import time -from typing import Any, Dict, Optional +from typing import List, Optional -import numpy as np - -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.data_feed.candles_feed.gate_io_spot_candles import constants as CONSTANTS from hummingbot.logger import HummingbotLogger @@ -45,6 +40,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -62,22 +65,36 @@ async def check_network(self) -> NetworkStatus: def get_exchange_trading_pair(self, trading_pair): return trading_pair.replace("-", "_") - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 500): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"currency_pair": self._ex_trading_pair, "interval": self.interval, "limit": limit} - if start_time: - params["from"] = start_time - if end_time: - params["to"] = end_time - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://www.gate.io/docs/developers/apiv4/en/#market-candlesticks + + This API only accepts a limit of 10000 candles ago. + """ + if start_time is None: + start_time = end_time + if end_time is None: + end_time = start_time + candles_ago = (int(time.time()) - start_time) // self.interval_in_seconds + if candles_ago > CONSTANTS.MAX_CANDLES_AGO: + raise ValueError("Gate.io REST API does not support fetching more than 10000 candles ago.") + return { + "currency_pair": self._ex_trading_pair, + "interval": self.interval, + "from": start_time, + "to": end_time + } + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: new_hb_candles = [] - for i in candles: - timestamp_ms = i[0] + "000" + for i in data: + timestamp = self.ensure_timestamp_in_seconds(i[0]) + if timestamp == end_time: + continue open = i[5] high = i[3] low = i[4] @@ -88,90 +105,30 @@ async def fetch_candles(self, n_trades = 0 taker_buy_base_volume = 0 taker_buy_quote_volume = 0 - new_hb_candles.append([timestamp_ms, open, high, low, close, volume, + new_hb_candles.append([timestamp, open, high, low, close, volume, quote_asset_volume, n_trades, taker_buy_base_volume, taker_buy_quote_volume]) - return np.array(new_hb_candles).astype(float) - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // 1000) + 1 - requests_executed = 0 - while not self.ready: - missing_records = self._candles.maxlen - len(self._candles) - end_timestamp = int(int(self._candles[0][0]) * 1e-3) - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - candles = await self.fetch_candles(end_time=end_timestamp, limit=missing_records + 1) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1][::-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def _subscribe_channels(self, ws: WSAssistant): - """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange - """ - try: - payload = { - "time": int(time.time()), - "channel": CONSTANTS.WS_CANDLES_ENDPOINT, - "event": "subscribe", - "payload": [self.interval, self._ex_trading_pair] - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - if data.get("event") == "update" and data.get("channel") == "spot.candlesticks": - timestamp_ms = int(data["result"]["t"] + "000") - open = data["result"]["o"] - high = data["result"]["h"] - low = data["result"]["l"] - close = data["result"]["c"] - volume = data["result"]["v"] - quote_asset_volume = data["result"]["a"] - # no data field - n_trades = 0 - taker_buy_base_volume = 0 - taker_buy_quote_volume = 0 - if len(self._candles) == 0: - self._candles.append(np.array([timestamp_ms, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - safe_ensure_future(self.fill_historical_candles()) - elif timestamp_ms > int(self._candles[-1][0]): - # TODO: validate also that the diff of timestamp == interval (issue with 30d interval). - self._candles.append(np.array([timestamp_ms, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - elif timestamp_ms == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(np.array([timestamp_ms, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) + return new_hb_candles + + def ws_subscription_payload(self): + return { + "time": int(time.time()), + "channel": CONSTANTS.WS_CANDLES_ENDPOINT, + "event": "subscribe", + "payload": [self.interval, self._ex_trading_pair] + } + + def _parse_websocket_message(self, data: dict): + candles_row_dict = {} + if data.get("event") == "update" and data.get("channel") == "spot.candlesticks": + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(data["result"]["t"]) + candles_row_dict["open"] = data["result"]["o"] + candles_row_dict["high"] = data["result"]["h"] + candles_row_dict["low"] = data["result"]["l"] + candles_row_dict["close"] = data["result"]["c"] + candles_row_dict["volume"] = data["result"]["a"] + candles_row_dict["quote_asset_volume"] = data["result"]["v"] + candles_row_dict["n_trades"] = 0 + candles_row_dict["taker_buy_base_volume"] = 0 + candles_row_dict["taker_buy_quote_volume"] = 0 + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/__init__.py b/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/__init__.py new file mode 100644 index 0000000000..7bcc2f4bc3 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/__init__.py @@ -0,0 +1,5 @@ +from hummingbot.data_feed.candles_feed.hyperliquid_perpetual_candles.hyperliquid_perpetual_candles import ( + HyperliquidPerpetualCandles, +) + +__all__ = ["HyperliquidPerpetualCandles"] diff --git a/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/constants.py b/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/constants.py new file mode 100644 index 0000000000..d5d5168c3e --- /dev/null +++ b/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/constants.py @@ -0,0 +1,31 @@ +from bidict import bidict + +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://api.hyperliquid.xyz/info" +HEALTH_CHECK_PAYLOAD = {"type": "meta"} +CANDLES_ENDPOINT = "candleSnapshot" + +WSS_URL = "wss://api.hyperliquid.xyz/ws" + +INTERVALS = bidict({ + "1m": "1m", + "3m": "3m", + "5m": "5m", + "15m": "15m", + "30m": "30m", + "1h": "1h", + "2h": "2h", + "4h": "4h", + "6h": "6h", + "12h": "12h", + "1d": "1d", + "1w": "1w", + "1M": "1M", +}) + +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 500 + +RATE_LIMITS = [ + RateLimit(REST_URL, limit=1200, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)]) +] diff --git a/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/hyperliquid_perpetual_candles.py b/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/hyperliquid_perpetual_candles.py new file mode 100644 index 0000000000..c62fea6b4b --- /dev/null +++ b/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/hyperliquid_perpetual_candles.py @@ -0,0 +1,145 @@ +import logging +from typing import Any, Dict, List, Optional + +import numpy as np + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.web_assistant.connections.data_types import RESTMethod +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.data_feed.candles_feed.hyperliquid_spot_candles import constants as CONSTANTS +from hummingbot.logger import HummingbotLogger + + +class HyperliquidPerpetualCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): + self._tokens = None + self._base_asset = trading_pair.split("-")[0] + super().__init__(trading_pair, interval, max_records) + + @property + def name(self): + return f"hyperliquid_perpetual_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return CONSTANTS.WSS_URL + + @property + def health_check_url(self): + return self.rest_url + + @property + def candles_url(self): + return self.rest_url + + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + self._tokens = await rest_assistant.execute_request(url=self.rest_url, + method=RESTMethod.POST, + throttler_limit_id=self.rest_url, + data=CONSTANTS.HEALTH_CHECK_PAYLOAD) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return trading_pair.replace("-", "") + + async def fetch_candles(self, start_time: Optional[int] = None, end_time: Optional[int] = None, + limit: Optional[int] = None) -> List[List[float]]: + if limit is None: + limit = self.candles_max_result_per_rest_request - 1 + + candles_to_fetch = min(self.candles_max_result_per_rest_request - 1, limit) + reqs = { + "interval": CONSTANTS.INTERVALS[self.interval], + "coin": self._base_asset, + } + if start_time is not None or end_time is not None: + reqs["startTime"] = start_time if start_time is not None else end_time - candles_to_fetch * self.interval_in_seconds + reqs["startTime"] = reqs["startTime"] * 1000 + reqs["endTime"] = end_time if end_time is not None else start_time + candles_to_fetch * self.interval_in_seconds + reqs["endTime"] = reqs["endTime"] * 1000 + payload = { + "type": "candleSnapshot", + "req": reqs + } + headers = self._get_rest_candles_headers() + rest_assistant = await self._api_factory.get_rest_assistant() + candles = await rest_assistant.execute_request(url=self.candles_url, + throttler_limit_id=self.rest_url, + data=payload, + headers=headers, + method=RESTMethod.POST) + arr = self._parse_rest_candles(candles, end_time) + return np.array(arr).astype(float) + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = None) -> dict: + pass + + def _get_rest_candles_headers(self): + return {"Content-Type": "application/json"} + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + if len(data) > 0: + return [ + [self.ensure_timestamp_in_seconds(row["t"]), row["o"], row["h"], row["l"], row["c"], row["v"], 0., + row["n"], 0., 0.] for row in data if self.ensure_timestamp_in_seconds(row["t"]) < end_time + ] + + def ws_subscription_payload(self): + interval = CONSTANTS.INTERVALS[self.interval] + payload = { + "method": "subscribe", + "subscription": { + "type": "candle", + "coin": self._base_asset, + "interval": interval + }, + } + return payload + + def _parse_websocket_message(self, data): + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("channel") == "candle": + candle = data["data"] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candle["t"]) + candles_row_dict["open"] = candle["o"] + candles_row_dict["low"] = candle["l"] + candles_row_dict["high"] = candle["h"] + candles_row_dict["close"] = candle["c"] + candles_row_dict["volume"] = candle["v"] + candles_row_dict["quote_asset_volume"] = 0. + candles_row_dict["n_trades"] = candle["n"] + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/__init__.py b/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/__init__.py new file mode 100644 index 0000000000..70cd3ceeeb --- /dev/null +++ b/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/__init__.py @@ -0,0 +1,3 @@ +from hummingbot.data_feed.candles_feed.hyperliquid_spot_candles.hyperliquid_spot_candles import HyperliquidSpotCandles + +__all__ = ["HyperliquidSpotCandles"] diff --git a/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/constants.py new file mode 100644 index 0000000000..18532a1561 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/constants.py @@ -0,0 +1,31 @@ +from bidict import bidict + +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://api.hyperliquid.xyz/info" +HEALTH_CHECK_PAYLOAD = {"type": "spotMeta"} +CANDLES_ENDPOINT = "candleSnapshot" + +WSS_URL = "wss://api.hyperliquid.xyz/ws" + +INTERVALS = bidict({ + "1m": "1m", + "3m": "3m", + "5m": "5m", + "15m": "15m", + "30m": "30m", + "1h": "1h", + "2h": "2h", + "4h": "4h", + "6h": "6h", + "12h": "12h", + "1d": "1d", + "1w": "1w", + "1M": "1M", +}) + +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 500 + +RATE_LIMITS = [ + RateLimit(REST_URL, limit=1200, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)]) +] diff --git a/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/hyperliquid_spot_candles.py b/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/hyperliquid_spot_candles.py new file mode 100644 index 0000000000..c1bc995ead --- /dev/null +++ b/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/hyperliquid_spot_candles.py @@ -0,0 +1,163 @@ +import asyncio +import logging +from typing import Any, Dict, List, Optional + +import numpy as np + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.web_assistant.connections.data_types import RESTMethod +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.data_feed.candles_feed.hyperliquid_spot_candles import constants as CONSTANTS +from hummingbot.logger import HummingbotLogger + + +class HyperliquidSpotCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): + self._universe = None + self._coins_dict = None + self._base_asset = trading_pair.split("-")[0] + self._universe_ready = asyncio.Event() + super().__init__(trading_pair, interval, max_records) + + @property + def name(self): + return f"hyperliquid_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return CONSTANTS.WSS_URL + + @property + def health_check_url(self): + return self.rest_url + + @property + def candles_url(self): + return self.rest_url + + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + await rest_assistant.execute_request(url=self.rest_url, + method=RESTMethod.POST, + throttler_limit_id=self.rest_url, + data=CONSTANTS.HEALTH_CHECK_PAYLOAD) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return trading_pair.replace("-", "") + + async def fetch_candles(self, start_time: Optional[int] = None, end_time: Optional[int] = None, + limit: Optional[int] = None) -> List[List[float]]: + if start_time is None and end_time is None: + raise ValueError("Either the start time or end time must be specified.") + + if limit is None: + limit = self.candles_max_result_per_rest_request - 1 + candles_to_fetch = min(self.candles_max_result_per_rest_request - 1, limit) + + reqs = { + "interval": CONSTANTS.INTERVALS[self.interval], + "coin": self._coins_dict[self._base_asset], + } + if start_time: + reqs["startTime"] = start_time * 1000 + else: + reqs["startTime"] = (end_time - candles_to_fetch * self.interval_in_seconds) * 1000 + payload = { + "type": "candleSnapshot", + "req": reqs + } + headers = self._get_rest_candles_headers() + rest_assistant = await self._api_factory.get_rest_assistant() + candles = await rest_assistant.execute_request(url=self.candles_url, + throttler_limit_id=self.rest_url, + data=payload, + headers=headers, + method=RESTMethod.POST) + arr = self._parse_rest_candles(candles, end_time) + return np.array(arr).astype(float) + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = None) -> dict: + pass + + def _get_rest_candles_headers(self): + return {"Content-Type": "application/json"} + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + if len(data) > 0: + return [ + [self.ensure_timestamp_in_seconds(row["t"]), row["o"], row["h"], row["l"], row["c"], row["v"], 0., + row["n"], 0., 0.] for row in data if self.ensure_timestamp_in_seconds(row["t"]) < end_time + ] + + def ws_subscription_payload(self): + interval = CONSTANTS.INTERVALS[self.interval] + payload = { + "method": "subscribe", + "subscription": { + "type": "candle", + "coin": self._coins_dict[self._base_asset], + "interval": interval + }, + } + return payload + + def _parse_websocket_message(self, data): + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("channel") == "candle": + candle = data["data"] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candle["t"]) + candles_row_dict["open"] = candle["o"] + candles_row_dict["low"] = candle["l"] + candles_row_dict["high"] = candle["h"] + candles_row_dict["close"] = candle["c"] + candles_row_dict["volume"] = candle["v"] + candles_row_dict["quote_asset_volume"] = 0. + candles_row_dict["n_trades"] = candle["n"] + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict + + async def initialize_exchange_data(self): + await self._initialize_coins_dict() + + async def _initialize_coins_dict(self): + rest_assistant = await self._api_factory.get_rest_assistant() + self._universe = await rest_assistant.execute_request(url=self.rest_url, + method=RESTMethod.POST, + throttler_limit_id=self.rest_url, + data=CONSTANTS.HEALTH_CHECK_PAYLOAD) + universe = {token["tokens"][0]: token["name"] for token in self._universe["universe"]} + tokens = {token["index"]: token["name"] for token in self._universe["tokens"]} + self._coins_dict = {tokens[index]: universe[index] for index in universe.keys()} diff --git a/hummingbot/data_feed/candles_feed/kraken_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/kraken_spot_candles/constants.py index 837875b8b7..a017c4d6f1 100644 --- a/hummingbot/data_feed/candles_feed/kraken_spot_candles/constants.py +++ b/hummingbot/data_feed/candles_feed/kraken_spot_candles/constants.py @@ -23,6 +23,7 @@ "1d": "1440", "1w": "10080", }) +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = MAX_CANDLES_AGO = 720 PUBLIC_ENDPOINT_LIMIT_ID = "PublicPoints" RATE_LIMITS = [ RateLimit( diff --git a/hummingbot/data_feed/candles_feed/kraken_spot_candles/kraken_spot_candles.py b/hummingbot/data_feed/candles_feed/kraken_spot_candles/kraken_spot_candles.py index fbfe11f459..e82c86a63d 100644 --- a/hummingbot/data_feed/candles_feed/kraken_spot_candles/kraken_spot_candles.py +++ b/hummingbot/data_feed/candles_feed/kraken_spot_candles/kraken_spot_candles.py @@ -1,14 +1,8 @@ -import asyncio import logging -from copy import deepcopy +import time from typing import List, Optional -import numpy as np -import pandas as pd - from hummingbot.core.network_iterator import NetworkStatus -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.data_feed.candles_feed.kraken_spot_candles import constants as CONSTANTS from hummingbot.logger import HummingbotLogger @@ -48,6 +42,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -56,19 +58,14 @@ def rate_limits(self): def intervals(self): return CONSTANTS.INTERVALS - @property - def candles_df(self) -> pd.DataFrame: - df = pd.DataFrame(self._candles, columns=self.columns, dtype=float) - df["timestamp"] = df["timestamp"] * 1000 - return df.sort_values(by="timestamp", ascending=True) - async def check_network(self) -> NetworkStatus: rest_assistant = await self._api_factory.get_rest_assistant() await rest_assistant.execute_request(url=self.health_check_url, throttler_limit_id=CONSTANTS.HEALTH_CHECK_ENDPOINT) return NetworkStatus.CONNECTED - def convert_to_exchange_symbol(self, symbol: str) -> str: + @staticmethod + def convert_to_exchange_symbol(symbol: str) -> str: inverted_kraken_to_hb_map = {v: k for k, v in CONSTANTS.KRAKEN_TO_HB_MAP.items()} return inverted_kraken_to_hb_map.get(symbol, symbol) @@ -89,128 +86,62 @@ def get_exchange_trading_pair(self, hb_trading_pair: str, delimiter: str = "") - exchange_trading_pair = f"{base}{delimiter}{quote}" return exchange_trading_pair - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 720): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"pair": self._ex_trading_pair, "interval": CONSTANTS.INTERVALS[self.interval], "since": start_time} - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) - - data: List = next(iter(candles["result"].values())) + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://docs.kraken.com/rest/#tag/Spot-Market-Data/operation/getOHLCData + This endpoint allows you to return up to 3600 candles ago. + """ + candles_ago = (int(time.time()) - start_time) // self.interval_in_seconds + if candles_ago > CONSTANTS.MAX_CANDLES_AGO: + raise ValueError("Kraken REST API does not support fetching more than 720 candles ago.") + return {"pair": self._ex_trading_pair, "interval": CONSTANTS.INTERVALS[self.interval], + "since": start_time} + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + data: List = next(iter(data["result"].values())) new_hb_candles = [] for i in data: - timestamp = int(float(i[0])) + timestamp = self.ensure_timestamp_in_seconds(float(i[0])) open = i[1] high = i[2] low = i[3] close = i[4] volume = i[6] - # vwap = i[5] Volume weighted average price within interval quote_asset_volume = float(volume) * float(i[5]) - # no data field n_trades = 0 taker_buy_base_volume = 0 taker_buy_quote_volume = 0 new_hb_candles.append([timestamp, open, high, low, close, volume, quote_asset_volume, n_trades, taker_buy_base_volume, taker_buy_quote_volume]) - return np.array(new_hb_candles).astype(float) - - async def fill_historical_candles(self): - # Note: the last entry in the OHLC array is for the current, not-yet-committed frame and will always be present, regardless of the value of since. - max_request_needed = (self._candles.maxlen // 720) + 1 - requests_executed = 0 - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - end_timestamp = int(self._candles[0][0]) + 1 - start_time = end_timestamp - (720 * self.get_seconds_from_interval(self.interval)) + 1 - candles = await self.fetch_candles(start_time=start_time, end_time=end_timestamp) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - # self._candles.extendleft(candles[::-1][-(missing_records + 1):-1]) - self._candles.extendleft(candles[-(missing_records + 1):-1][::-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - - async def _subscribe_channels(self, ws: WSAssistant): - """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange - """ - try: - payload = { - "event": "subscribe", - "pair": [self.get_exchange_trading_pair(self._trading_pair, '/')], - "subscription": {"name": CONSTANTS.WS_CANDLES_ENDPOINT, - "interval": int(CONSTANTS.INTERVALS[self.interval])} - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: List = ws_response.data - if not (type(data) is dict and "event" in data.keys() and - data["event"] in ["heartbeat", "systemStatus", "subscriptionStatus"]): - if data[-2][:4] == "ohlc": - timestamp = int(float(data[1][1])) - int(CONSTANTS.INTERVALS[self.interval]) * 60 - open = data[1][2] - high = data[1][3] - low = data[1][4] - close = data[1][5] - volume = data[1][7] - # vwap = data[1][6] Volume weighted average price within interval - quote_asset_volume = float(volume) * float(data[1][6]) - # no data field - n_trades = 0 - taker_buy_base_volume = 0 - taker_buy_quote_volume = 0 - if len(self._candles) == 0: - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - await self.fill_historical_candles() - elif timestamp > int(self._candles[-1][0]): - # TODO: validate also that the diff of timestamp == interval (issue with 30d interval). - interval = int(CONSTANTS.INTERVALS[self.interval]) * 60 - total_interval_time = timestamp - int(self._candles[-1][0]) - the_number_of_interval = total_interval_time // interval - if the_number_of_interval >= 2: - for i in range(1, the_number_of_interval): - old_data = deepcopy(self._candles[-1]) - new_timestamp = int(self._candles[-1][0]) + interval - old_data[0] = new_timestamp - self._candles.append(old_data) - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) - elif timestamp == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume])) + return [candle for candle in new_hb_candles if candle[0] < end_time] + + def ws_subscription_payload(self): + return { + "event": "subscribe", + "pair": [self.get_exchange_trading_pair(self._trading_pair, '/')], + "subscription": {"name": CONSTANTS.WS_CANDLES_ENDPOINT, + "interval": int(CONSTANTS.INTERVALS[self.interval])} + } + + def _parse_websocket_message(self, data: dict): + candles_row_dict = {} + if not (type(data) is dict and "event" in data.keys() and + data["event"] in ["heartbeat", "systemStatus", "subscriptionStatus"]): + if data[-2][:4] == "ohlc": + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(data[1][1]) - self.interval_in_seconds + candles_row_dict["open"] = data[1][2] + candles_row_dict["high"] = data[1][3] + candles_row_dict["low"] = data[1][4] + candles_row_dict["close"] = data[1][5] + candles_row_dict["volume"] = data[1][7] + candles_row_dict["quote_asset_volume"] = float(data[1][7]) * float(data[1][6]) + candles_row_dict["n_trades"] = 0 + candles_row_dict["taker_buy_base_volume"] = 0 + candles_row_dict["taker_buy_quote_volume"] = 0 + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/__init__.py b/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/__init__.py new file mode 100644 index 0000000000..d689a9132f --- /dev/null +++ b/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/__init__.py @@ -0,0 +1,3 @@ +from hummingbot.data_feed.candles_feed.kucoin_perpetual_candles.kucoin_perpetual_candles import KucoinPerpetualCandles + +__all__ = ["KucoinPerpetualCandles"] diff --git a/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/constants.py b/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/constants.py new file mode 100644 index 0000000000..662ee808f7 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/constants.py @@ -0,0 +1,64 @@ +from bidict import bidict + +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://api-futures.kucoin.com" +HEALTH_CHECK_ENDPOINT = "/api/v1/timestamp" +CANDLES_ENDPOINT = "/api/v1/kline/query" +SYMBOLS_ENDPOINT = "/api/v1/contracts/active" + +HB_TO_KUCOIN_MAP = { + "BTC": "XBT", +} + +PUBLIC_WS_DATA_PATH_URL = "/api/v1/bullet-public" + +KLINE_PUSH_WEB_SOCKET_TOPIC = "/contractMarket/limitCandle" + +INTERVALS = bidict({ + "1m": "1min", + "3m": "3min", + "5m": "5min", + "15m": "15min", + "30m": "30min", + "1h": "1hour", + "2h": "2hour", + "4h": "4hour", + "6h": "6hour", + "8h": "8hour", + "12h": "12hour", + "1d": "1day", + "1w": "1week", + "1M": "1month", +}) + +GRANULARITIES = bidict({ + "1m": 1, + "5m": 5, + "15m": 15, + "30m": 30, + "1h": 60, + "2h": 120, + "4h": 240, + "6h": 480, + "8h": 720, + "12h": 1440, + "1d": 10080, +}) +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 500 +REQUEST_WEIGHT = "REQUEST_WEIGHT" +MAX_REQUEST = 2000 +TIME_INTERVAL = 30 + + +RATE_LIMITS = [ + RateLimit(limit_id=REQUEST_WEIGHT, limit=MAX_REQUEST, time_interval=TIME_INTERVAL), + RateLimit(limit_id=CANDLES_ENDPOINT, limit=MAX_REQUEST, time_interval=TIME_INTERVAL, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=3)]), + RateLimit(limit_id=SYMBOLS_ENDPOINT, limit=MAX_REQUEST, time_interval=TIME_INTERVAL, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=3)]), + RateLimit(limit_id=HEALTH_CHECK_ENDPOINT, limit=MAX_REQUEST, time_interval=TIME_INTERVAL, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=2)]), + RateLimit(limit_id=PUBLIC_WS_DATA_PATH_URL, limit=MAX_REQUEST, time_interval=TIME_INTERVAL, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=10)]), +] diff --git a/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/kucoin_perpetual_candles.py b/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/kucoin_perpetual_candles.py new file mode 100644 index 0000000000..d670e4d09e --- /dev/null +++ b/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/kucoin_perpetual_candles.py @@ -0,0 +1,185 @@ +import logging +import time +from typing import Any, Dict, List, Optional + +import pandas as pd + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.utils.tracking_nonce import get_tracking_nonce +from hummingbot.core.web_assistant.connections.data_types import RESTMethod +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.data_feed.candles_feed.kucoin_perpetual_candles import constants as CONSTANTS +from hummingbot.logger import HummingbotLogger + + +class KucoinPerpetualCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + _last_ws_message_sent_timestamp = 0 + _ping_interval = 0 + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1min", max_records: int = 150): + self.symbols_dict = {} + self.hb_base_asset = trading_pair.split("-")[0] + self.quote_asset = trading_pair.split("-")[1] + self.kucoin_base_asset = self.get_kucoin_base_asset() + super().__init__(trading_pair, interval, max_records) + + def get_kucoin_base_asset(self): + for hb_asset, kucoin_value in CONSTANTS.HB_TO_KUCOIN_MAP.items(): + return kucoin_value if hb_asset == self.hb_base_asset else self.hb_base_asset + + @property + def name(self): + return f"kucoin_perpetual_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return f"{self._ws_url}?token={self._ws_token}" + + @property + def health_check_url(self): + return self.rest_url + CONSTANTS.HEALTH_CHECK_ENDPOINT + + @property + def candles_url(self): + return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + + @property + def symbols_url(self): + return self.rest_url + CONSTANTS.SYMBOLS_ENDPOINT + + @property + def public_ws_url(self): + return self.rest_url + CONSTANTS.PUBLIC_WS_DATA_PATH_URL + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + @property + def candles_df(self) -> pd.DataFrame: + df = pd.DataFrame(self._candles, columns=self.columns, dtype=float) + return df.sort_values(by="timestamp", ascending=True) + + @property + def _ping_payload(self): + return { + "type": "ping", + } + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + await rest_assistant.execute_request(url=self.health_check_url, + throttler_limit_id=CONSTANTS.HEALTH_CHECK_ENDPOINT) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return f"{self.kucoin_base_asset}-{self.quote_asset}" + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = None) -> dict: + """ + For API documentation, please refer to: + https://www.kucoin.com/docs/rest/futures-trading/market-data/get-klines + """ + params = { + "symbol": self.symbols_dict[f"{self.kucoin_base_asset}-{self.quote_asset}"], + "granularity": CONSTANTS.GRANULARITIES[self.interval], + } + if end_time: + params["to"] = end_time * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + return [[self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[5], 0., 0., 0., 0.] + for row in data['data'] if self.ensure_timestamp_in_seconds(row[0]) < end_time] + + def ws_subscription_payload(self): + topic_candle = f"{self.symbols_dict[self._ex_trading_pair]}_{CONSTANTS.INTERVALS[self.interval]}" + payload = { + "id": str(get_tracking_nonce()), + "type": "subscribe", + "topic": f"/contractMarket/limitCandle:{topic_candle}", + "privateChannel": False, + "response": True, + } + return payload + + def _parse_websocket_message(self, data: dict): + candles_row_dict: Dict[str, Any] = {} + if data.get("data") is not None: + if "candles" in data["data"]: + candles = data["data"]["candles"] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(int(candles[0])) + candles_row_dict["open"] = candles[1] + candles_row_dict["close"] = candles[2] + candles_row_dict["high"] = candles[3] + candles_row_dict["low"] = candles[4] + candles_row_dict["volume"] = candles[5] + candles_row_dict["quote_asset_volume"] = 0. + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict + + @staticmethod + def _time(): + return time.time() + + async def initialize_exchange_data(self) -> Dict[str, Any]: + await self._get_symbols_dict() + await self._get_ws_token() + + async def _get_symbols_dict(self): + try: + rest_assistant = await self._api_factory.get_rest_assistant() + response = await rest_assistant.execute_request(url=self.symbols_url, + throttler_limit_id=CONSTANTS.SYMBOLS_ENDPOINT) + symbols = response["data"] + symbols_dict = {} + for symbol in symbols: + symbols_dict[f"{symbol['baseCurrency']}-{symbol['quoteCurrency']}"] = symbol["symbol"] + self.symbols_dict = symbols_dict + except Exception: + self.logger().error("Error fetching symbols from Kucoin.") + raise + + async def _get_ws_token(self): + try: + rest_assistant = await self._api_factory.get_rest_assistant() + connection_info = await rest_assistant.execute_request( + url=self.public_ws_url, + method=RESTMethod.POST, + throttler_limit_id=CONSTANTS.PUBLIC_WS_DATA_PATH_URL, + ) + + self._ws_url = connection_info["data"]["instanceServers"][0]["endpoint"] + self._ping_timeout = int(connection_info["data"]["instanceServers"][0]["pingTimeout"]) * 1e-3 + self._ws_token = connection_info["data"]["token"] + except Exception: + self.logger().error("Error fetching WS token from Kucoin.") + raise diff --git a/hummingbot/data_feed/candles_feed/kucoin_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/kucoin_spot_candles/constants.py index 0ce530ad89..22aa65f08f 100644 --- a/hummingbot/data_feed/candles_feed/kucoin_spot_candles/constants.py +++ b/hummingbot/data_feed/candles_feed/kucoin_spot_candles/constants.py @@ -1,8 +1,6 @@ -import sys - from bidict import bidict -from hummingbot.core.api_throttler.data_types import RateLimit +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit REST_URL = "https://api.kucoin.com" HEALTH_CHECK_ENDPOINT = "/api/v1/timestamp" @@ -13,6 +11,7 @@ INTERVALS = bidict({ "1m": "1min", "3m": "3min", + "5m": "5min", "15m": "15min", "30m": "30min", "1h": "1hour", @@ -23,12 +22,20 @@ "12h": "12hour", "1d": "1day", "1w": "1week", + "1M": "1month" }) +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 1500 +MAX_REQUEST = 4000 +TIME_INTERVAL = 30 REQUEST_WEIGHT = "REQUEST_WEIGHT" -NO_LIMIT = sys.maxsize RATE_LIMITS = [ - RateLimit(limit_id=PUBLIC_WS_DATA_PATH_URL, limit=NO_LIMIT, time_interval=1), - RateLimit(CANDLES_ENDPOINT, limit=30, time_interval=60), - RateLimit(HEALTH_CHECK_ENDPOINT, limit=30, time_interval=60)] + RateLimit(limit_id=REQUEST_WEIGHT, limit=MAX_REQUEST, time_interval=TIME_INTERVAL), + RateLimit(limit_id=CANDLES_ENDPOINT, limit=MAX_REQUEST, time_interval=TIME_INTERVAL, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=3)]), + RateLimit(limit_id=HEALTH_CHECK_ENDPOINT, limit=MAX_REQUEST, time_interval=TIME_INTERVAL, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=3)]), + RateLimit(limit_id=PUBLIC_WS_DATA_PATH_URL, limit=MAX_REQUEST, time_interval=TIME_INTERVAL, + linked_limits=[LinkedLimitWeightPair(REQUEST_WEIGHT, weight=10)]), +] diff --git a/hummingbot/data_feed/candles_feed/kucoin_spot_candles/kucoin_spot_candles.py b/hummingbot/data_feed/candles_feed/kucoin_spot_candles/kucoin_spot_candles.py index d497d4872d..f7f47061d9 100644 --- a/hummingbot/data_feed/candles_feed/kucoin_spot_candles/kucoin_spot_candles.py +++ b/hummingbot/data_feed/candles_feed/kucoin_spot_candles/kucoin_spot_candles.py @@ -1,15 +1,12 @@ -import asyncio import logging import time -from typing import Any, Dict, Optional +from typing import List, Optional -import numpy as np import pandas as pd -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.core.utils.tracking_nonce import get_tracking_nonce -from hummingbot.core.web_assistant.connections.data_types import RESTMethod, WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.core.web_assistant.connections.data_types import RESTMethod from hummingbot.data_feed.candles_feed.candles_base import CandlesBase from hummingbot.data_feed.candles_feed.kucoin_spot_candles import constants as CONSTANTS from hummingbot.logger import HummingbotLogger @@ -28,6 +25,8 @@ def logger(cls) -> HummingbotLogger: def __init__(self, trading_pair: str, interval: str = "1min", max_records: int = 150): super().__init__(trading_pair, interval, max_records) + self._ws_url = None + self._ws_token = None @property def name(self): @@ -39,7 +38,7 @@ def rest_url(self): @property def wss_url(self): - return None + return f"{self._ws_url}?token={self._ws_token}" @property def health_check_url(self): @@ -49,6 +48,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def public_ws_url(self): return self.rest_url + CONSTANTS.PUBLIC_WS_DATA_PATH_URL @@ -66,6 +73,12 @@ def candles_df(self) -> pd.DataFrame: df = pd.DataFrame(self._candles, columns=self.columns, dtype=float) return df.sort_values(by="timestamp", ascending=True) + @property + def _ping_payload(self): + return { + "type": "ping" + } + async def check_network(self) -> NetworkStatus: rest_assistant = await self._api_factory.get_rest_assistant() await rest_assistant.execute_request(url=self.health_check_url, @@ -75,124 +88,52 @@ async def check_network(self) -> NetworkStatus: def get_exchange_trading_pair(self, trading_pair): return trading_pair - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 1500): - rest_assistant = await self._api_factory.get_rest_assistant() + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://www.kucoin.com/docs/rest/spot-trading/market-data/get-klines + """ params = {"symbol": self._ex_trading_pair, "type": CONSTANTS.INTERVALS[self.interval]} if start_time: - params["startAt"] = start_time // 1000 + params["startAt"] = start_time if end_time: - params["endAt"] = end_time // 1000 - else: - params["endAt"] = start_time // 1000 + (limit * self.get_seconds_from_interval(self.interval)) - - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) - candles = np.array([[row[0], row[1], row[3], row[4], row[2], row[5], row[6], 0., 0., 0.] for row in candles['data']]).astype(float) - candles[:, 0] = candles[:, 0] * 1000 - return candles[::-1] - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // 1500) + 1 - requests_executed = 0 - while not self.ready: - # missing_records = self._candles.maxlen - len(self._candles) - try: - if requests_executed < max_request_needed: - end_timestamp = int(self._candles[-1][0] + 1000) - # we have to add one more since, the last row is not going to be included - start_time = (end_timestamp - (1500 * self.get_seconds_from_interval(self.interval)) * 1000) + 1000 - candles = await self.fetch_candles(end_time=end_timestamp, start_time=start_time, limit=1500) - # we are computing agaefin the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def _subscribe_channels(self, ws: WSAssistant): - """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange - """ - try: - payload = { - "id": str(get_tracking_nonce()), - "type": "subscribe", - "topic": f"/market/candles:{self._ex_trading_pair}_{CONSTANTS.INTERVALS[self.interval]}", - "privateChannel": False, - "response": False, - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - while True: - try: - seconds_until_next_ping = self._ping_interval - (self._time() - self._last_ws_message_sent_timestamp) - await asyncio.wait_for(self._process_websocket_messages_from_candles(websocket_assistant=websocket_assistant), - timeout=seconds_until_next_ping) - except asyncio.TimeoutError: - payload = { - "id": str(get_tracking_nonce()), - "type": "ping", - } - ping_request = WSJSONRequest(payload=payload) - self._last_ws_message_sent_timestamp = self._time() - await websocket_assistant.send(request=ping_request) - - async def _process_websocket_messages_from_candles(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - if data is not None and data.get( - "subject") == "trade.candles.update": # data will be None when the websocket is disconnected - candles = data["data"]["candles"] - timestamp = int(candles[0]) * 1000 - open = candles[1] - close = candles[2] - high = candles[3] - low = candles[4] - volume = candles[5] - quote_asset_volume = candles[6] - n_trades = 0. - taker_buy_base_volume = 0. - taker_buy_quote_volume = 0. - candles_array = np.array([timestamp, open, high, low, close, volume, quote_asset_volume, n_trades, - taker_buy_base_volume, taker_buy_quote_volume]).astype(float) - if len(self._candles) == 0: - self._candles.append(candles_array) - safe_ensure_future(self.fill_historical_candles()) - elif timestamp > int(self._candles[-1][0]): - # TODO: validate also that the diff of timestamp == interval (issue with 1M interval). - self._candles.append(candles_array) - elif timestamp == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(candles_array) - - async def _connected_websocket_assistant(self) -> WSAssistant: + params["endAt"] = end_time + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + return [[self.ensure_timestamp_in_seconds(row[0]), row[1], row[3], row[4], row[2], row[5], row[6], 0., 0., 0.] + for row in data['data']][::-1] + + def ws_subscription_payload(self): + return { + "id": str(get_tracking_nonce()), + "type": "subscribe", + "topic": f"/market/candles:{self._ex_trading_pair}_{CONSTANTS.INTERVALS[self.interval]}", + "privateChannel": False, + "response": False, + } + + def _parse_websocket_message(self, data: dict): + candles_row_dict = {} + if data is not None and data.get( + "subject") == "trade.candles.update": # data will be None when the websocket is disconnected + candles = data["data"]["candles"] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candles[0]) + candles_row_dict["open"] = candles[1] + candles_row_dict["close"] = candles[2] + candles_row_dict["high"] = candles[3] + candles_row_dict["low"] = candles[4] + candles_row_dict["volume"] = candles[5] + candles_row_dict["quote_asset_volume"] = candles[6] + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict + + async def initialize_exchange_data(self): rest_assistant = await self._api_factory.get_rest_assistant() connection_info = await rest_assistant.execute_request( url=self.public_ws_url, @@ -200,13 +141,10 @@ async def _connected_websocket_assistant(self) -> WSAssistant: throttler_limit_id=CONSTANTS.PUBLIC_WS_DATA_PATH_URL, ) - ws_url = connection_info["data"]["instanceServers"][0]["endpoint"] - self._ping_interval = int(connection_info["data"]["instanceServers"][0]["pingInterval"]) * 0.8 * 1e-3 - token = connection_info["data"]["token"] - - ws: WSAssistant = await self._api_factory.get_ws_assistant() - await ws.connect(ws_url=f"{ws_url}?token={token}", message_timeout=self._ping_interval) - return ws + self._ws_url = connection_info["data"]["instanceServers"][0]["endpoint"] + self._ping_timeout = int(connection_info["data"]["instanceServers"][0]["pingTimeout"]) * 1e-3 + self._ws_token = connection_info["data"]["token"] - def _time(self): + @staticmethod + def _time(): return time.time() diff --git a/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/__init__.py b/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/__init__.py new file mode 100644 index 0000000000..142c1e1dd0 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/__init__.py @@ -0,0 +1,3 @@ +from hummingbot.data_feed.candles_feed.mexc_perpetual_candles.mexc_perpetual_candles import MexcPerpetualCandles + +__all__ = ["MexcPerpetualCandles"] diff --git a/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/constants.py b/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/constants.py new file mode 100644 index 0000000000..2d038e38b6 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/constants.py @@ -0,0 +1,26 @@ +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://contract.mexc.com" +HEALTH_CHECK_ENDPOINT = "/api/v1/contract/ping" +CANDLES_ENDPOINT = "/api/v1/contract/kline" + +WSS_URL = "wss://contract.mexc.com/edge" + +INTERVALS = { + "1m": "Min1", + "5m": "Min5", + "15m": "Min15", + "30m": "Min30", + "1h": "Min60", + "4h": "Hour4", + "8h": "Hour8", + "1d": "Day1", + "1w": "Week1", + "1M": "Month1" +} + +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 2000 + +RATE_LIMITS = [ + RateLimit(CANDLES_ENDPOINT, limit=20, time_interval=2, linked_limits=[LinkedLimitWeightPair("raw", 1)]), + RateLimit(HEALTH_CHECK_ENDPOINT, limit=20, time_interval=2, linked_limits=[LinkedLimitWeightPair("raw", 1)])] diff --git a/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/mexc_perpetual_candles.py b/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/mexc_perpetual_candles.py new file mode 100644 index 0000000000..1df7f7a029 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/mexc_perpetual_candles.py @@ -0,0 +1,116 @@ +import logging +from typing import Any, Dict, List, Optional + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.data_feed.candles_feed.mexc_perpetual_candles import constants as CONSTANTS +from hummingbot.logger import HummingbotLogger + + +class MexcPerpetualCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): + super().__init__(trading_pair, interval, max_records) + + @property + def name(self): + return f"mexc_perpetual_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return CONSTANTS.WSS_URL + + @property + def health_check_url(self): + return self.rest_url + CONSTANTS.HEALTH_CHECK_ENDPOINT + + @property + def candles_url(self): + return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + "/" + self._ex_trading_pair + + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + await rest_assistant.execute_request(url=self.health_check_url, + throttler_limit_id=CONSTANTS.HEALTH_CHECK_ENDPOINT) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return trading_pair.replace("-", "_") + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://mexcdevelop.github.io/apidocs/spot_v3_en/#kline-candlestick-data + + startTime and endTime must be used at the same time. + """ + params = { + "interval": CONSTANTS.INTERVALS[self.interval], + } + if start_time: + params["startTime"] = start_time * 1000 + if end_time: + params["endTime"] = end_time * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + content = data.get("data") + if content is not None: + ohlc = list(zip(content["time"], content["open"], content["high"], content["low"], content["close"], + content["vol"], content["amount"])) + return [[self.ensure_timestamp_in_seconds(c[0]), c[1], c[2], c[3], c[4], c[5], c[6], 0., 0., 0.] for c in ohlc if c[0] < end_time] + + def ws_subscription_payload(self): + return { + "method": "sub.kline", + "param": { + "symbol": self._ex_trading_pair, + "interval": CONSTANTS.INTERVALS[self.interval], + } + } + + def _parse_websocket_message(self, data): + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("data") is not None and data.get("channel", "") == "push.kline": + candle = data["data"] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candle["t"]) + candles_row_dict["open"] = candle["o"] + candles_row_dict["low"] = candle["l"] + candles_row_dict["high"] = candle["h"] + candles_row_dict["close"] = candle["c"] + candles_row_dict["volume"] = candle["q"] + candles_row_dict["quote_asset_volume"] = candle["a"] + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/mexc_spot_candles/__init__.py b/hummingbot/data_feed/candles_feed/mexc_spot_candles/__init__.py new file mode 100644 index 0000000000..b6663fc6d6 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/mexc_spot_candles/__init__.py @@ -0,0 +1,3 @@ +from hummingbot.data_feed.candles_feed.mexc_spot_candles.mexc_spot_candles import MexcSpotCandles + +__all__ = ["MexcSpotCandles"] diff --git a/hummingbot/data_feed/candles_feed/mexc_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/mexc_spot_candles/constants.py new file mode 100644 index 0000000000..2e8a945a4d --- /dev/null +++ b/hummingbot/data_feed/candles_feed/mexc_spot_candles/constants.py @@ -0,0 +1,40 @@ +from bidict import bidict + +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://api.mexc.com" +HEALTH_CHECK_ENDPOINT = "/api/v3/ping" +CANDLES_ENDPOINT = "/api/v3/klines" + +WSS_URL = "wss://wbs.mexc.com/ws" + +INTERVALS = bidict({ + "1m": "1m", + "5m": "5m", + "15m": "15m", + "30m": "30m", + "1h": "60m", + "4h": "4h", + "1d": "1d", + "1w": "1W", + "1M": "1M" +}) + +WS_INTERVALS = { + "1m": "Min1", + "5m": "Min5", + "15m": "Min15", + "30m": "Min30", + "1h": "Min60", + "4h": "Hour4", + "8h": "Hour8", + "1d": "Day1", + "1w": "Week1", + "1M": "Month1" +} + +MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST = 1000 + +RATE_LIMITS = [ + RateLimit(CANDLES_ENDPOINT, limit=20000, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)]), + RateLimit(HEALTH_CHECK_ENDPOINT, limit=20000, time_interval=60, linked_limits=[LinkedLimitWeightPair("raw", 1)])] diff --git a/hummingbot/data_feed/candles_feed/mexc_spot_candles/mexc_spot_candles.py b/hummingbot/data_feed/candles_feed/mexc_spot_candles/mexc_spot_candles.py new file mode 100644 index 0000000000..7f74cc312f --- /dev/null +++ b/hummingbot/data_feed/candles_feed/mexc_spot_candles/mexc_spot_candles.py @@ -0,0 +1,121 @@ +import logging +from typing import Any, Dict, List, Optional + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.data_feed.candles_feed.mexc_spot_candles import constants as CONSTANTS +from hummingbot.logger import HummingbotLogger + + +class MexcSpotCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): + super().__init__(trading_pair, interval, max_records) + + @property + def name(self): + return f"mexc_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return CONSTANTS.WSS_URL + + @property + def health_check_url(self): + return self.rest_url + CONSTANTS.HEALTH_CHECK_ENDPOINT + + @property + def candles_url(self): + return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + await rest_assistant.execute_request(url=self.health_check_url, + throttler_limit_id=CONSTANTS.HEALTH_CHECK_ENDPOINT) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return trading_pair.replace("-", "") + + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + """ + For API documentation, please refer to: + https://mexcdevelop.github.io/apidocs/spot_v3_en/#kline-candlestick-data + + startTime and endTime must be used at the same time. + """ + params = { + "symbol": self._ex_trading_pair, + "interval": CONSTANTS.INTERVALS[self.interval], + "limit": limit + } + if start_time: + params["startTime"] = start_time * 1000 + if end_time: + params["endTime"] = end_time * 1000 + return params + + def _get_rest_candles_headers(self): + return {"Content-Type": "application/json"} + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + return [ + [self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[5], row[7], + 0., 0., 0.] + for row in data if self.ensure_timestamp_in_seconds(row[0]) < end_time] + + def ws_subscription_payload(self): + trading_pair = self.get_exchange_trading_pair(self._trading_pair) + interval = CONSTANTS.WS_INTERVALS[self.interval] + candle_params = [f"spot@public.kline.v3.api@{trading_pair}@{interval}"] + payload = { + "method": "SUBSCRIPTION", + "params": candle_params, + } + return payload + + def _parse_websocket_message(self, data): + candles_row_dict: Dict[str, Any] = {} + if data is not None and data.get("d") is not None: + candle = data["d"]["k"] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candle["t"]) + candles_row_dict["open"] = candle["o"] + candles_row_dict["low"] = candle["l"] + candles_row_dict["high"] = candle["h"] + candles_row_dict["close"] = candle["c"] + candles_row_dict["volume"] = candle["v"] + candles_row_dict["quote_asset_volume"] = 0. + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/okx_perpetual_candles/okx_perpetual_candles.py b/hummingbot/data_feed/candles_feed/okx_perpetual_candles/okx_perpetual_candles.py index e6809399aa..c54fb571d7 100644 --- a/hummingbot/data_feed/candles_feed/okx_perpetual_candles/okx_perpetual_candles.py +++ b/hummingbot/data_feed/candles_feed/okx_perpetual_candles/okx_perpetual_candles.py @@ -1,15 +1,8 @@ -import asyncio import logging -from typing import Any, Dict, Optional +from typing import List, Optional -import numpy as np -import pandas as pd - -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.data_feed.candles_feed.candles_base import CandlesBase -from hummingbot.data_feed.candles_feed.data_types import HistoricalCandlesConfig from hummingbot.data_feed.candles_feed.okx_perpetual_candles import constants as CONSTANTS from hummingbot.logger import HummingbotLogger @@ -23,8 +16,7 @@ def logger(cls) -> HummingbotLogger: cls._logger = logging.getLogger(__name__) return cls._logger - def __init__(self, trading_pair: str, interval: str = "1m", - max_records: int = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST): + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): super().__init__(trading_pair, interval, max_records) self.interval_to_milliseconds_dict = { "1s": 1000, @@ -66,6 +58,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -83,120 +83,45 @@ async def check_network(self) -> NetworkStatus: def get_exchange_trading_pair(self, trading_pair): return f"{trading_pair}-SWAP" - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 100): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"instId": self._ex_trading_pair, "bar": CONSTANTS.INTERVALS[self.interval], "limit": limit} + def _get_rest_candles_params(self, start_time: Optional[int] = None, end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: + params = { + "instId": self._ex_trading_pair, + "bar": CONSTANTS.INTERVALS[self.interval] + } if end_time: - params["after"] = end_time + params["after"] = end_time * 1000 if start_time: - params["before"] = start_time - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) - - arr = [[row[0], row[1], row[2], row[3], row[4], row[6], row[7], 0., 0., 0.] for row in candles["data"]] - return np.array(arr).astype(float) - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) + 1 - requests_executed = 0 - while not self.ready: - missing_records = self._candles.maxlen - len(self._candles) - end_timestamp = int(self._candles[0][0]) - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - candles = await self.fetch_candles(end_time=end_timestamp, limit=missing_records + 1) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def get_historical_candles(self, config: HistoricalCandlesConfig): - try: - all_candles = [] - current_start_time = config.start_time - while current_start_time <= config.end_time: - current_end_time = current_start_time + self.interval_to_milliseconds_dict[config.interval] * CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST - fetched_candles = await self.fetch_candles(end_time=current_end_time) - if fetched_candles.size == 0: - break - - all_candles.append(fetched_candles[::-1]) - last_timestamp = fetched_candles[0][0] # Assuming the first column is the timestamp - current_start_time = int(last_timestamp) - - final_candles = np.concatenate(all_candles, axis=0) if all_candles else np.array([]) - candles_df = pd.DataFrame(final_candles, columns=self.columns) - candles_df.drop_duplicates(subset=["timestamp"], inplace=True) - return candles_df - except Exception as e: - self.logger().exception(f"Error fetching historical candles: {str(e)}") - - async def _subscribe_channels(self, ws: WSAssistant): - """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange - """ - try: - candle_args = [] - candle_args.append({"channel": f"candle{CONSTANTS.INTERVALS[self.interval]}", "instId": self._ex_trading_pair}) - payload = { - "op": "subscribe", - "args": candle_args - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - if data is not None and "data" in data: # data will be None when the websocket is disconnected - candles = data["data"][0] - timestamp = candles[0] - open = candles[1] - high = candles[2] - low = candles[3] - close = candles[4] - volume = candles[6] - quote_asset_volume = candles[7] - n_trades = 0. - taker_buy_base_volume = 0. - taker_buy_quote_volume = 0. - - candles_row = np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume]).astype(float) - if len(self._candles) == 0: - self._candles.append(candles_row) - safe_ensure_future(self.fill_historical_candles()) - elif int(timestamp) > int(self._candles[-1][0]): - self._candles.append(candles_row) - elif int(timestamp) == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(candles_row) + params["before"] = start_time * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + return [ + [ + self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[6], row[7], 0., 0., 0. + ] + for row in data["data"] if self.ensure_timestamp_in_seconds(row[0]) < end_time + ][::-1] + + def ws_subscription_payload(self): + candle_args = [{"channel": f"candle{CONSTANTS.INTERVALS[self.interval]}", "instId": self._ex_trading_pair}] + return { + "op": "subscribe", + "args": candle_args + } + + def _parse_websocket_message(self, data: dict): + candles_row_dict = {} + if data is not None and "data" in data: # data will be None when the websocket is disconnected + candles = data["data"][0] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candles[0]) + candles_row_dict["open"] = candles[1] + candles_row_dict["high"] = candles[2] + candles_row_dict["low"] = candles[3] + candles_row_dict["close"] = candles[4] + candles_row_dict["volume"] = candles[6] + candles_row_dict["quote_asset_volume"] = candles[7] + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict diff --git a/hummingbot/data_feed/candles_feed/okx_spot_candles/okx_spot_candles.py b/hummingbot/data_feed/candles_feed/okx_spot_candles/okx_spot_candles.py index 938edf2c25..4e4d6b1073 100644 --- a/hummingbot/data_feed/candles_feed/okx_spot_candles/okx_spot_candles.py +++ b/hummingbot/data_feed/candles_feed/okx_spot_candles/okx_spot_candles.py @@ -1,15 +1,8 @@ -import asyncio import logging -from typing import Any, Dict, Optional +from typing import List, Optional -import numpy as np -import pandas as pd - -from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future -from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest -from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.core.network_iterator import NetworkStatus from hummingbot.data_feed.candles_feed.candles_base import CandlesBase -from hummingbot.data_feed.candles_feed.data_types import HistoricalCandlesConfig from hummingbot.data_feed.candles_feed.okx_spot_candles import constants as CONSTANTS from hummingbot.logger import HummingbotLogger @@ -23,28 +16,8 @@ def logger(cls) -> HummingbotLogger: cls._logger = logging.getLogger(__name__) return cls._logger - def __init__(self, trading_pair: str, interval: str = "1m", - max_records: int = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST): + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): super().__init__(trading_pair, interval, max_records) - self.interval_to_milliseconds_dict = { - "1s": 1000, - "1m": 60000, - "3m": 180000, - "5m": 300000, - "15m": 900000, - "30m": 1800000, - "1h": 3600000, - "2h": 7200000, - "4h": 14400000, - "6h": 21600000, - "8h": 28800000, - "12h": 43200000, - "1d": 86400000, - "3d": 259200000, - "1w": 604800000, - "1M": 2592000000, - "3M": 7776000000 - } @property def name(self): @@ -66,6 +39,14 @@ def health_check_url(self): def candles_url(self): return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + @property + def candles_endpoint(self): + return CONSTANTS.CANDLES_ENDPOINT + + @property + def candles_max_result_per_rest_request(self): + return CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST + @property def rate_limits(self): return CONSTANTS.RATE_LIMITS @@ -83,120 +64,45 @@ async def check_network(self) -> NetworkStatus: def get_exchange_trading_pair(self, trading_pair): return trading_pair - async def fetch_candles(self, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = 100): - rest_assistant = await self._api_factory.get_rest_assistant() - params = {"instId": self._ex_trading_pair, "bar": CONSTANTS.INTERVALS[self.interval], "limit": limit} - if end_time: - params["after"] = end_time - if start_time: - params["before"] = start_time - candles = await rest_assistant.execute_request(url=self.candles_url, - throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, - params=params) - - arr = [[row[0], row[1], row[2], row[3], row[4], row[5], row[6], 0., 0., 0.] for row in candles["data"]] - return np.array(arr).astype(float) - - async def fill_historical_candles(self): - max_request_needed = (self._candles.maxlen // CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) + 1 - requests_executed = 0 - while not self.ready: - missing_records = self._candles.maxlen - len(self._candles) - end_timestamp = int(self._candles[0][0]) - try: - if requests_executed < max_request_needed: - # we have to add one more since, the last row is not going to be included - candles = await self.fetch_candles(end_time=end_timestamp, limit=missing_records + 1) - # we are computing again the quantity of records again since the websocket process is able to - # modify the deque and if we extend it, the new observations are going to be dropped. - missing_records = self._candles.maxlen - len(self._candles) - self._candles.extendleft(candles[-(missing_records + 1):-1]) - requests_executed += 1 - else: - self.logger().error(f"There is no data available for the quantity of " - f"candles requested for {self.name}.") - raise - except asyncio.CancelledError: - raise - except Exception: - self.logger().exception( - "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", - ) - await self._sleep(1.0) - - async def get_historical_candles(self, config: HistoricalCandlesConfig): - try: - all_candles = [] - current_start_time = config.start_time - while current_start_time <= config.end_time: - current_end_time = current_start_time + self.interval_to_milliseconds_dict[config.interval] * CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST - fetched_candles = await self.fetch_candles(end_time=current_end_time) - if fetched_candles.size == 0: - break - - all_candles.append(fetched_candles[::-1]) - last_timestamp = fetched_candles[0][0] # Assuming the first column is the timestamp - current_start_time = int(last_timestamp) - - final_candles = np.concatenate(all_candles, axis=0) if all_candles else np.array([]) - candles_df = pd.DataFrame(final_candles, columns=self.columns) - candles_df.drop_duplicates(subset=["timestamp"], inplace=True) - return candles_df - except Exception as e: - self.logger().exception(f"Error fetching historical candles: {str(e)}") - - async def _subscribe_channels(self, ws: WSAssistant): + def _get_rest_candles_params(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST) -> dict: """ - Subscribes to the candles events through the provided websocket connection. - :param ws: the websocket assistant used to connect to the exchange + For API documentation, please refer to: + https://www.okx.com/docs-v5/en/?shell#order-book-trading-market-data-get-candlesticks-history + + This endpoint allows you to return up to 3600 candles ago. """ - try: - candle_args = [] - candle_args.append({"channel": f"candle{CONSTANTS.INTERVALS[self.interval]}", "instId": self._ex_trading_pair}) - payload = { - "op": "subscribe", - "args": candle_args - } - subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) - - await ws.send(subscribe_candles_request) - self.logger().info("Subscribed to public klines...") - except asyncio.CancelledError: - raise - except Exception: - self.logger().error( - "Unexpected error occurred subscribing to public klines...", - exc_info=True - ) - raise - - async def _process_websocket_messages(self, websocket_assistant: WSAssistant): - async for ws_response in websocket_assistant.iter_messages(): - data: Dict[str, Any] = ws_response.data - if data is not None and "data" in data: # data will be None when the websocket is disconnected - candles = data["data"][0] - timestamp = candles[0] - open = candles[1] - high = candles[2] - low = candles[3] - close = candles[4] - volume = candles[5] - quote_asset_volume = candles[6] - n_trades = 0. - taker_buy_base_volume = 0. - taker_buy_quote_volume = 0. - - candles_row = np.array([timestamp, open, high, low, close, volume, - quote_asset_volume, n_trades, taker_buy_base_volume, - taker_buy_quote_volume]).astype(float) - if len(self._candles) == 0: - self._candles.append(candles_row) - safe_ensure_future(self.fill_historical_candles()) - elif int(timestamp) > int(self._candles[-1][0]): - self._candles.append(candles_row) - elif int(timestamp) == int(self._candles[-1][0]): - self._candles.pop() - self._candles.append(candles_row) + params = {"instId": self._ex_trading_pair, "bar": CONSTANTS.INTERVALS[self.interval]} + if start_time: + params["before"] = start_time * 1000 + params["after"] = end_time * 1000 + return params + + def _parse_rest_candles(self, data: dict, end_time: Optional[int] = None) -> List[List[float]]: + return [[self.ensure_timestamp_in_seconds(row[0]), row[1], row[2], row[3], row[4], row[5], row[6], 0., 0., 0.] + for row in data["data"]][::-1] + + def ws_subscription_payload(self): + candle_args = [{"channel": f"candle{CONSTANTS.INTERVALS[self.interval]}", "instId": self._ex_trading_pair}] + return { + "op": "subscribe", + "args": candle_args + } + + def _parse_websocket_message(self, data: dict): + candles_row_dict = {} + if data is not None and "data" in data: # data will be None when the websocket is disconnected + candles = data["data"][0] + candles_row_dict["timestamp"] = self.ensure_timestamp_in_seconds(candles[0]) + candles_row_dict["open"] = candles[1] + candles_row_dict["high"] = candles[2] + candles_row_dict["low"] = candles[3] + candles_row_dict["close"] = candles[4] + candles_row_dict["volume"] = candles[5] + candles_row_dict["quote_asset_volume"] = candles[6] + candles_row_dict["n_trades"] = 0. + candles_row_dict["taker_buy_base_volume"] = 0. + candles_row_dict["taker_buy_quote_volume"] = 0. + return candles_row_dict diff --git a/hummingbot/pmm_script/pmm_script_base.py b/hummingbot/pmm_script/pmm_script_base.py deleted file mode 100644 index b4815f0b3a..0000000000 --- a/hummingbot/pmm_script/pmm_script_base.py +++ /dev/null @@ -1,236 +0,0 @@ -import asyncio -import traceback -from decimal import Decimal -from multiprocessing import Queue -from operator import itemgetter -from statistics import mean, median -from typing import Any, Callable, Dict, List, Optional - -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - SellOrderCompletedEvent -) -from .pmm_script_interface import ( - CallLog, - CallNotify, - OnCommand, - OnStatus, - OnTick, - PMMParameters, - PMMMarketInfo, - ScriptError -) - - -class PMMScriptBase: - """ - PMMScriptBase provides functionality which a script can use to interact with the main HB application. - A user defined script should derive from this base class to get all its functionality. - """ - def __init__(self): - self._parent_queue: Queue = None - self._child_queue: Queue = None - self._queue_check_interval: float = 0.0 - self.mid_prices: List[Decimal] = [] - self.max_mid_prices_length: int = 86400 # 60 * 60 * 24 = 1 day of prices - self.pmm_parameters: PMMParameters = None - self.pmm_market_info: PMMMarketInfo = None - # all_total_balances stores balances in {exchange: {token: balance}} format - # for example {"binance": {"BTC": Decimal("0.1"), "ETH": Decimal("20"}} - self.all_total_balances: Dict[str, Dict[str, Decimal]] = None - # all_available_balances has the same data structure as all_total_balances - self.all_available_balances: Dict[str, Dict[str, Decimal]] = None - - def assign_init(self, parent_queue: Queue, child_queue: Queue, queue_check_interval: float): - self._parent_queue = parent_queue - self._child_queue = child_queue - self._queue_check_interval = queue_check_interval - - @property - def mid_price(self): - """ - The current market mid price (the average of top bid and top ask) - """ - return self.mid_prices[-1] - - async def run(self): - asyncio.ensure_future(self.listen_to_parent()) - - async def listen_to_parent(self): - while True: - try: - if self._parent_queue.empty(): - await asyncio.sleep(self._queue_check_interval) - continue - item = self._parent_queue.get() - # print(f"child gets {str(item)}") - if item is None: - # print("child exiting..") - asyncio.get_event_loop().stop() - break - if isinstance(item, OnTick): - self.mid_prices.append(item.mid_price) - if len(self.mid_prices) > self.max_mid_prices_length: - self.mid_prices = self.mid_prices[len(self.mid_prices) - self.max_mid_prices_length:] - self.pmm_parameters = item.pmm_parameters - self.all_total_balances = item.all_total_balances - self.all_available_balances = item.all_available_balances - self.on_tick() - elif isinstance(item, BuyOrderCompletedEvent): - self.on_buy_order_completed(item) - elif isinstance(item, SellOrderCompletedEvent): - self.on_sell_order_completed(item) - elif isinstance(item, OnStatus): - status_msg = self.on_status() - if status_msg: - self.notify(f"Script status: {status_msg}") - elif isinstance(item, OnCommand): - self.on_command(item.cmd, item.args) - elif isinstance(item, PMMMarketInfo): - self.pmm_market_info = item - except asyncio.CancelledError: - raise - except Exception as e: - # Capturing traceback here and put it as part of ScriptError, which can then be reported in the parent - # process. - tb = "".join(traceback.TracebackException.from_exception(e).format()) - self._child_queue.put(ScriptError(e, tb)) - - def notify(self, msg: str): - """ - Notifies the user, the message will appear on top left panel of HB application. - If Telegram integration enabled, the message will also be sent to the telegram user. - :param msg: The message. - """ - self._child_queue.put(CallNotify(msg)) - - def log(self, msg: str): - """ - Logs message to the strategy log file and display it on Running Logs section of HB. - :param msg: The message. - """ - self._child_queue.put(CallLog(msg)) - - def avg_mid_price(self, interval: int, length: int) -> Optional[Decimal]: - """ - Calculates average (mean) of the stored mid prices. - Mid prices are stored for each tick (second). - Examples: To get the average of the last 100 minutes mid prices = avg_mid_price(60, 100) - :param interval: The interval (in seconds) in which to sample the mid prices. - :param length: The number of the samples to calculate the average. - :returns None if there is not enough samples, otherwise the average mid price. - """ - samples = self.take_samples(self.mid_prices, interval, length) - if samples is None: - return None - return mean(samples) - - def avg_price_volatility(self, interval: int, length: int) -> Optional[Decimal]: - """ - Calculates average (mean) price volatility, volatility is a price change compared to the previous - cycle regardless of its direction, e.g. if price changes -3% (or 3%), the volatility is 3%. - Examples: To get the average of the last 10 changes on a minute interval = avg_price_volatility(60, 10) - :param interval: The interval (in seconds) in which to sample the mid prices. - :param length: The number of the samples to calculate the average. - :returns None if there is not enough samples, otherwise the average mid price change. - """ - return self.locate_central_price_volatility(interval, length, mean) - - def median_price_volatility(self, interval: int, length: int) -> Optional[Decimal]: - """ - Calculates the median (middle value) price volatility, volatility is a price change compared to the previous - cycle regardless of its direction, e.g. if price changes -3% (or 3%), the volatility is 3%. - Examples: To get the median of the last 10 changes on a minute interval = median_price_volatility(60, 10) - :param interval: The interval (in seconds) in which to sample the mid prices. - :param length: The number of the samples to calculate the average. - :returns None if there is not enough samples, otherwise the median mid price change. - """ - return self.locate_central_price_volatility(interval, length, median) - - def locate_central_price_volatility(self, interval: int, length: int, locate_function: Callable) \ - -> Optional[Decimal]: - """ - Calculates central location of the price volatility, volatility is a price change compared to the previous cycle - regardless of its direction, e.g. if price changes -3% (or 3%), the volatility is 3%. - Examples: To get mean of the last 10 changes on a minute interval locate_central_price_volatility(60, 10, mean) - :param interval: The interval in which to sample the mid prices. - :param length: The number of the samples. - :param locate_function: The function used to calculate the central location, e.g. mean, median, geometric_mean - and many more which are supported by statistics library. - :returns None if there is not enough samples, otherwise the central location of mid price change. - """ - # We need sample size of length + 1, as we need a previous value to calculate the change - samples = self.take_samples(self.mid_prices, interval, length + 1) - if samples is None: - return None - changes = [] - for index in range(1, len(samples)): - changes.append(max(samples[index], samples[index - 1]) / min(samples[index], samples[index - 1]) - 1) - return locate_function(changes) - - @staticmethod - def round_by_step(a_number: Decimal, step_size: Decimal): - """ - Rounds the number down by the step size, e.g. round_by_step(1.8, 0.25) = 1.75 - :param a_number: A number to round - :param step_size: The step size. - :returns rounded number. - """ - return (a_number // step_size) * step_size - - @staticmethod - def take_samples(a_list: List[Any], interval: int, length: int) -> Optional[List[any]]: - """ - Takes samples out of a given list where the last item is the most recent, - Examples: a list = [1, 2, 3, 4, 5, 6, 7] an interval of 3 and length of 2 will return you [4, 7], - for an interval of 2 and length of 4, you'll get [1, 3, 5, 7] - :param a_list: A list which to take samples from - :param interval: The interval at which to take sample, starting from the last item on the list. - :param length: The number of the samples. - :returns None if there is not enough samples to satisfy length, otherwise the sample list. - """ - index_list = list(range(len(a_list) - 1, -1, -1 * interval)) - index_list = sorted(index_list) - index_list = index_list[-1 * length:] - if len(index_list) < length: - return None - if len(index_list) == 1: - # return a list with just 1 item in it. - return [a_list[index_list[0]]] - samples = list(itemgetter(*index_list)(a_list)) - return samples - - def on_tick(self): - """ - Is called upon OnTick message received, which is every second on normal HB configuration. - It is intended to be implemented by the derived class of this class. - """ - pass - - def on_buy_order_completed(self, event: BuyOrderCompletedEvent): - """ - Is called upon a buy order is completely filled. - It is intended to be implemented by the derived class of this class. - """ - pass - - def on_sell_order_completed(self, event: SellOrderCompletedEvent): - """ - Is called upon a sell order is completely filled. - It is intended to be implemented by the derived class of this class. - """ - pass - - def on_status(self) -> str: - """ - Is called upon `status` command is issued on the Hummingbot application. - It is intended to be implemented by the derived class of this class. - :returns status message. - """ - return f"{self.__class__.__name__} is active." - - def on_command(self, cmd: str, args: List[str]): - """ - Called when 'script' command is issued on the Hummingbot application - """ - pass diff --git a/hummingbot/pmm_script/pmm_script_interface.py b/hummingbot/pmm_script/pmm_script_interface.py deleted file mode 100644 index e125bf0afc..0000000000 --- a/hummingbot/pmm_script/pmm_script_interface.py +++ /dev/null @@ -1,165 +0,0 @@ -from decimal import Decimal -from typing import Dict, List - -child_queue = None - - -def set_child_queue(queue): - global child_queue - child_queue = queue - - -class StrategyParameter(object): - """ - A strategy parameter class that is used as a property for the collection class with its get and set method. - The set method detects if there is a value change it will put itself into the child queue. - """ - def __init__(self, attr): - self.name = attr - self.attr = "_" + attr - self.updated_value = None - - def __get__(self, obj, objtype): - return getattr(obj, self.attr) - - def __set__(self, obj, value): - global child_queue - old_value = getattr(obj, self.attr) - if old_value is not None and old_value != value: - self.updated_value = value - child_queue.put(self) - setattr(obj, self.attr, value) - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.__dict__)}" - - -class PMMParameters: - """ - A collection of pure market making strategy parameters which are configurable through script. - The members names need to match the property names of PureMarketMakingStrategy. - """ - def __init__(self): - self._buy_levels = None - self._sell_levels = None - self._order_levels = None - self._bid_spread = None - self._ask_spread = None - self._order_amount = None - self._order_level_spread = None - self._order_level_amount = None - self._order_refresh_time = None - self._order_refresh_tolerance_pct = None - self._filled_order_delay = None - self._hanging_orders_enabled = None - self._hanging_orders_cancel_pct = None - - # These below parameters are yet to open for the script - - self._inventory_skew_enabled = None - self._inventory_target_base_pct = None - self._inventory_range_multiplier = None - self._order_override = None - - # self._order_optimization_enabled = None - # self._ask_order_optimization_depth = None - # self._bid_order_optimization_depth = None - # self._add_transaction_costs_to_orders = None - # self._price_ceiling = None - # self._price_floor = None - # self._ping_pong_enabled = None - # self._minimum_spread = None - - buy_levels = StrategyParameter("buy_levels") - sell_levels = StrategyParameter("sell_levels") - order_levels = StrategyParameter("order_levels") - bid_spread = StrategyParameter("bid_spread") - ask_spread = StrategyParameter("ask_spread") - order_amount = StrategyParameter("order_amount") - order_level_spread = StrategyParameter("order_level_spread") - order_level_amount = StrategyParameter("order_level_amount") - order_refresh_time = StrategyParameter("order_refresh_time") - order_refresh_tolerance_pct = StrategyParameter("order_refresh_tolerance_pct") - filled_order_delay = StrategyParameter("filled_order_delay") - hanging_orders_enabled = StrategyParameter("hanging_orders_enabled") - hanging_orders_cancel_pct = StrategyParameter("hanging_orders_cancel_pct") - - inventory_skew_enabled = StrategyParameter("inventory_skew_enabled") - inventory_target_base_pct = StrategyParameter("inventory_target_base_pct") - inventory_range_multiplier = StrategyParameter("inventory_range_multiplier") - order_override = StrategyParameter("order_override") - - # order_optimization_enabled = PMMParameter("order_optimization_enabled") - # ask_order_optimization_depth = PMMParameter("ask_order_optimization_depth") - # bid_order_optimization_depth = PMMParameter("bid_order_optimization_depth") - # add_transaction_costs_to_orders = PMMParameter("add_transaction_costs_to_orders") - # price_ceiling = PMMParameter("price_ceiling") - # price_floor = PMMParameter("price_floor") - # ping_pong_enabled = PMMParameter("ping_pong_enabled") - # minimum_spread = PMMParameter("minimum_spread") - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.__dict__)}" - - -class PMMMarketInfo: - def __init__(self, exchange: str, - trading_pair: str,): - self.exchange = exchange - self.trading_pair = trading_pair - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.__dict__)}" - - -class OnTick: - def __init__(self, mid_price: Decimal, - pmm_parameters: PMMParameters, - all_total_balances: Dict[str, Dict[str, Decimal]], - all_available_balances: Dict[str, Dict[str, Decimal]], - ): - self.mid_price = mid_price - self.pmm_parameters = pmm_parameters - self.all_total_balances = all_total_balances - self.all_available_balances = all_available_balances - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.__dict__)}" - - -class OnStatus: - pass - - -class CallNotify: - def __init__(self, msg): - self.msg = msg - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.__dict__)}" - - -class CallLog: - def __init__(self, msg): - self.msg = msg - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.__dict__)}" - - -class OnCommand: - def __init__(self, cmd: str, args: List[str]): - self.cmd = cmd - self.args = args - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.__dict__)}" - - -class ScriptError: - def __init__(self, error: Exception, traceback: str): - self.error = error - self.traceback = traceback - - def __repr__(self): - return f"{self.__class__.__name__} {str(self.error)} \nTrace back: {self.traceback}" diff --git a/hummingbot/pmm_script/pmm_script_iterator.pxd b/hummingbot/pmm_script/pmm_script_iterator.pxd deleted file mode 100644 index dff288a884..0000000000 --- a/hummingbot/pmm_script/pmm_script_iterator.pxd +++ /dev/null @@ -1,21 +0,0 @@ -# distutils: language=c++ - -from hummingbot.core.time_iterator cimport TimeIterator - - -cdef class PMMScriptIterator(TimeIterator): - cdef: - str _script_file_path - object _strategy - object _markets - double _queue_check_interval - object _event_pairs - object _did_complete_buy_order_forwarder - object _did_complete_sell_order_forwarder - object _script_module - object _parent_queue - object _child_queue - object _ev_loop - object _script_process - object _listen_to_child_task - bint _is_unit_testing_mode diff --git a/hummingbot/pmm_script/pmm_script_iterator.pyx b/hummingbot/pmm_script/pmm_script_iterator.pyx deleted file mode 100644 index 59b57439a1..0000000000 --- a/hummingbot/pmm_script/pmm_script_iterator.pyx +++ /dev/null @@ -1,159 +0,0 @@ -# distutils: language=c++ - -import asyncio -import logging -from multiprocessing import Process, Queue -from pathlib import Path -from typing import List - -from hummingbot.connector.exchange_base import ExchangeBase -from hummingbot.core.clock cimport Clock -from hummingbot.core.clock import Clock -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - MarketEvent, - SellOrderCompletedEvent, -) -from hummingbot.core.event.event_forwarder import SourceInfoEventForwarder -from hummingbot.core.utils.async_utils import safe_ensure_future -from hummingbot.pmm_script.pmm_script_interface import ( - CallLog, - CallNotify, - OnTick, - OnCommand, - OnStatus, - PMMParameters, - PMMMarketInfo, - ScriptError, - StrategyParameter, -) -from hummingbot.pmm_script.pmm_script_process import run_pmm_script -from hummingbot.strategy.pure_market_making import PureMarketMakingStrategy - -sir_logger = None - - -cdef class PMMScriptIterator(TimeIterator): - @classmethod - def logger(cls): - global sir_logger - if sir_logger is None: - sir_logger = logging.getLogger(__name__) - return sir_logger - - def __init__(self, - script_file_path: Path, - markets: List[ExchangeBase], - strategy: PureMarketMakingStrategy, - queue_check_interval: float = 0.01, - is_unit_testing_mode: bool = False): - super().__init__() - self._markets = markets - self._strategy = strategy - self._is_unit_testing_mode = is_unit_testing_mode - self._queue_check_interval = queue_check_interval - self._did_complete_buy_order_forwarder = SourceInfoEventForwarder(self._did_complete_buy_order) - self._did_complete_sell_order_forwarder = SourceInfoEventForwarder(self._did_complete_sell_order) - self._event_pairs = [ - (MarketEvent.BuyOrderCompleted, self._did_complete_buy_order_forwarder), - (MarketEvent.SellOrderCompleted, self._did_complete_sell_order_forwarder) - ] - self._ev_loop = asyncio.get_event_loop() - self._parent_queue = Queue() - self._child_queue = Queue() - self._listen_to_child_task = safe_ensure_future(self.listen_to_child_queue(), loop=self._ev_loop) - - self._script_process = Process( - target=run_pmm_script, - args=(str(script_file_path), self._parent_queue, self._child_queue, queue_check_interval,) - ) - self.logger().info(f"starting PMM script in {script_file_path}") - self._script_process.start() - - @property - def strategy(self): - return self._strategy - - cdef c_start(self, Clock clock, double timestamp): - TimeIterator.c_start(self, clock, timestamp) - for market in self._markets: - for event_pair in self._event_pairs: - market.add_listener(event_pair[0], event_pair[1]) - self._parent_queue.put(PMMMarketInfo(self._strategy.market_info.market.name, - self._strategy.trading_pair)) - - cdef c_stop(self, Clock clock): - TimeIterator.c_stop(self, clock) - self._parent_queue.put(None) - self._child_queue.put(None) - self._script_process.join() - if self._listen_to_child_task is not None: - self._listen_to_child_task.cancel() - - cdef c_tick(self, double timestamp): - TimeIterator.c_tick(self, timestamp) - if not self._strategy.all_markets_ready(): - return - cdef object pmm_strategy = PMMParameters() - for attr in PMMParameters.__dict__.keys(): - if attr[:1] != '_': - param_value = getattr(self._strategy, attr) - setattr(pmm_strategy, attr, param_value) - cdef object on_tick = OnTick(self.strategy.get_mid_price(), pmm_strategy, - self.all_total_balances(), self.all_available_balances()) - self._parent_queue.put(on_tick) - - def _did_complete_buy_order(self, - event_tag: int, - market: ExchangeBase, - event: BuyOrderCompletedEvent): - self._parent_queue.put(event) - - def _did_complete_sell_order(self, - event_tag: int, - market: ExchangeBase, - event: SellOrderCompletedEvent): - self._parent_queue.put(event) - - async def listen_to_child_queue(self): - while True: - try: - if self._child_queue.empty(): - await asyncio.sleep(self._queue_check_interval) - continue - item = self._child_queue.get() - if item is None: - break - if isinstance(item, StrategyParameter): - self.logger().info(f"received: {str(item)}") - setattr(self._strategy, item.name, item.updated_value) - elif isinstance(item, CallNotify) and not self._is_unit_testing_mode: - # ignore this on unit testing as the below import will mess up unit testing. - from hummingbot.client.hummingbot_application import HummingbotApplication - HummingbotApplication.main_application().notify(item.msg) - elif isinstance(item, CallLog): - self.logger().info(f"script - {item.msg}") - elif isinstance(item, ScriptError): - self.logger().info(f"{item}") - except asyncio.CancelledError: - raise - except Exception: - self.logger().info("Unexpected error listening to child queue.", exc_info=True) - - def request_status(self): - self._parent_queue.put(OnStatus()) - - def request_command(self, cmd: str, args: List[str]): - self._parent_queue.put(OnCommand(cmd, args)) - - def all_total_balances(self): - all_bals = {m.name: m.get_all_balances() for m in self._markets} - return {exchange: {token: bal for token, bal in bals.items() if bal > 0} for exchange, bals in all_bals.items()} - - def all_available_balances(self): - all_bals = self.all_total_balances() - ret_val = {} - for exchange, balances in all_bals.items(): - connector = [c for c in self._markets if c.name == exchange][0] - ret_val[exchange] = {token: connector.get_available_balance(token) for token in balances.keys()} - return ret_val diff --git a/hummingbot/pmm_script/pmm_script_process.py b/hummingbot/pmm_script/pmm_script_process.py deleted file mode 100644 index 3d76ccb903..0000000000 --- a/hummingbot/pmm_script/pmm_script_process.py +++ /dev/null @@ -1,36 +0,0 @@ -import asyncio -import importlib -import inspect -import os - -from multiprocessing import Queue -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase -from hummingbot.pmm_script.pmm_script_interface import CallNotify, set_child_queue - - -def run_pmm_script(script_file_name: str, parent_queue: Queue, child_queue: Queue, queue_check_interval: float): - try: - script_class = import_pmm_script_sub_class(script_file_name) - script = script_class() - script.assign_init(parent_queue, child_queue, queue_check_interval) - set_child_queue(child_queue) - policy = asyncio.get_event_loop_policy() - policy.set_event_loop(policy.new_event_loop()) - ev_loop = asyncio.get_event_loop() - ev_loop.create_task(script.run()) - ev_loop.run_forever() - ev_loop.close() - except Exception as ex: - child_queue.put(CallNotify(f'Failed to start script {script_file_name}:')) - child_queue.put(CallNotify(f'{ex}')) - - -def import_pmm_script_sub_class(script_file_name: str): - name = os.path.basename(script_file_name).split(".")[0] - spec = importlib.util.spec_from_file_location(name, script_file_name) - module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) - for x in dir(module): - obj = getattr(module, x) - if inspect.isclass(obj) and issubclass(obj, PMMScriptBase) and obj.__name__ != "PMMScriptBase": - return obj diff --git a/hummingbot/strategy/amm_arb/start.py b/hummingbot/strategy/amm_arb/start.py index 6061242c43..b2df205c22 100644 --- a/hummingbot/strategy/amm_arb/start.py +++ b/hummingbot/strategy/amm_arb/start.py @@ -3,6 +3,7 @@ from hummingbot.client.settings import AllConnectorSettings from hummingbot.connector.gateway.amm.gateway_evm_amm import GatewayEVMAMM +from hummingbot.connector.gateway.amm.gateway_telos_amm import GatewayTelosAMM from hummingbot.connector.gateway.amm.gateway_tezos_amm import GatewayTezosAMM from hummingbot.connector.gateway.common_types import Chain from hummingbot.connector.gateway.gateway_price_shim import GatewayPriceShim @@ -57,6 +58,8 @@ def start(self): amm_connector: GatewayEVMAMM = cast(GatewayEVMAMM, amm_market_info.market) elif Chain.TEZOS.chain == amm_market_info.market.chain: amm_connector: GatewayTezosAMM = cast(GatewayTezosAMM, amm_market_info.market) + elif Chain.TELOS.chain == amm_market_info.market.chain: + amm_connector: GatewayTelosAMM = cast(GatewayTelosAMM, amm_market_info.market) else: raise ValueError(f"Unsupported chain: {amm_market_info.market.chain}") GatewayPriceShim.get_instance().patch_prices( diff --git a/hummingbot/strategy/directional_strategy_base.py b/hummingbot/strategy/directional_strategy_base.py index dc27f843ea..6be2a6893a 100644 --- a/hummingbot/strategy/directional_strategy_base.py +++ b/hummingbot/strategy/directional_strategy_base.py @@ -33,7 +33,7 @@ class DirectionalStrategyBase(ScriptStrategyBase): stored_executors (List[PositionExecutor]): List of closed position executors that have been stored. stop_loss (float): The stop loss percentage. take_profit (float): The take profit percentage. - time_limit (int): The time limit for the position. + time_limit (int): The time limit for the position in seconds. open_order_type (OrderType): The order type for opening the position. open_order_slippage_buffer (float): The slippage buffer for the opening order. take_profit_order_type (OrderType): The order type for the take profit order. @@ -46,6 +46,7 @@ class DirectionalStrategyBase(ScriptStrategyBase): leverage (float): The leverage to be used. order_amount_usd (Decimal): The order amount in USD. markets (Dict[str, Set[str]]): Dictionary mapping exchanges to trading pairs. + cooldown_after_execution (int): Cooldown between position executions, in seconds. """ directional_strategy_name: str # Define the trading pair and exchange that we want to use and the csv where we are going to store the entries diff --git a/hummingbot/strategy/script_strategy_base.py b/hummingbot/strategy/script_strategy_base.py index 5beb1632cd..ecc2654f52 100644 --- a/hummingbot/strategy/script_strategy_base.py +++ b/hummingbot/strategy/script_strategy_base.py @@ -81,7 +81,7 @@ def on_tick(self): """ pass - def on_stop(self): + async def on_stop(self): pass def buy(self, @@ -104,7 +104,7 @@ def buy(self, :return: The client assigned id for the new order """ market_pair = self._market_trading_pair_tuple(connector_name, trading_pair) - self.logger().info(f"Creating {trading_pair} buy order: price: {price} amount: {amount}.") + self.logger().debug(f"Creating {trading_pair} buy order: price: {price} amount: {amount}.") return self.buy_with_specific_market(market_pair, amount, order_type, price, position_action=position_action) def sell(self, @@ -127,7 +127,7 @@ def sell(self, :return: The client assigned id for the new order """ market_pair = self._market_trading_pair_tuple(connector_name, trading_pair) - self.logger().info(f"Creating {trading_pair} sell order: price: {price} amount: {amount}.") + self.logger().debug(f"Creating {trading_pair} sell order: price: {price} amount: {amount}.") return self.sell_with_specific_market(market_pair, amount, order_type, price, position_action=position_action) def cancel(self, diff --git a/hummingbot/strategy/strategy_v2_base.py b/hummingbot/strategy/strategy_v2_base.py index 7f6478aedf..c1813db0cb 100644 --- a/hummingbot/strategy/strategy_v2_base.py +++ b/hummingbot/strategy/strategy_v2_base.py @@ -176,6 +176,7 @@ class StrategyV2Base(ScriptStrategyBase): markets: Dict[str, Set[str]] _last_config_update_ts: float = 0 closed_executors_buffer: int = 100 + max_executors_close_attempts: int = 10 @classmethod def init_markets(cls, config: StrategyV2ConfigBase): @@ -272,12 +273,17 @@ def update_executors_info(self): def is_perpetual(connector: str) -> bool: return "perpetual" in connector - def on_stop(self): + async def on_stop(self): self.executor_orchestrator.stop() self.market_data_provider.stop() self.listen_to_executor_actions_task.cancel() for controller in self.controllers.values(): controller.stop() + for i in range(self.max_executors_close_attempts): + if all([executor.is_done for executor in self.get_all_executors()]): + continue + await asyncio.sleep(1) + self.executor_orchestrator.store_all_executors() def on_tick(self): self.update_executors_info() diff --git a/hummingbot/strategy_v2/backtesting/backtesting_engine_base.py b/hummingbot/strategy_v2/backtesting/backtesting_engine_base.py index 10b097cdae..e38f698803 100644 --- a/hummingbot/strategy_v2/backtesting/backtesting_engine_base.py +++ b/hummingbot/strategy_v2/backtesting/backtesting_engine_base.py @@ -47,9 +47,12 @@ def load_controller_config(cls, return config_data @classmethod - def get_controller_config_instance_from_yml(cls, config_path: str) -> ControllerConfigBase: - config_data = cls.load_controller_config(config_path) - return cls.get_controller_config_instance_from_dict(config_data) + def get_controller_config_instance_from_yml(cls, + config_path: str, + controllers_conf_dir_path: str = settings.CONTROLLERS_CONF_DIR_PATH, + controllers_module: str = settings.CONTROLLERS_MODULE) -> ControllerConfigBase: + config_data = cls.load_controller_config(config_path, controllers_conf_dir_path) + return cls.get_controller_config_instance_from_dict(config_data, controllers_module) @classmethod def get_controller_config_instance_from_dict(cls, @@ -88,7 +91,7 @@ async def run_backtesting(self, await self.initialize_backtesting_data_provider() await self.controller.update_processed_data() executors_info = self.simulate_execution(trade_cost=trade_cost) - results = self.summarize_results(executors_info) + results = self.summarize_results(executors_info, controller_config.total_amount_quote) return { "executors": executors_info, "results": results, @@ -247,7 +250,7 @@ def handle_stop_action(self, action: StopExecutorAction, timestamp: pd.Timestamp self.active_executor_simulations.remove(executor) @staticmethod - def summarize_results(executors_info, total_amount_quote=1000): + def summarize_results(executors_info: Dict, total_amount_quote: float = 1000): if len(executors_info) > 0: executors_df = pd.DataFrame([ei.to_dict() for ei in executors_info]) net_pnl_quote = executors_df["net_pnl_quote"].sum() diff --git a/hummingbot/strategy_v2/backtesting/executors_simulator/position_executor_simulator.py b/hummingbot/strategy_v2/backtesting/executors_simulator/position_executor_simulator.py index dda5b9df2e..8204d299dc 100644 --- a/hummingbot/strategy_v2/backtesting/executors_simulator/position_executor_simulator.py +++ b/hummingbot/strategy_v2/backtesting/executors_simulator/position_executor_simulator.py @@ -20,6 +20,11 @@ def simulate(self, df: pd.DataFrame, config: PositionExecutorConfig, trade_cost: # Set up barriers tp = Decimal(config.triple_barrier_config.take_profit) if config.triple_barrier_config.take_profit else None sl = Decimal(config.triple_barrier_config.stop_loss) if config.triple_barrier_config.stop_loss else None + trailing_sl_trigger_pct = None + trailing_sl_delta_pct = None + if config.triple_barrier_config.trailing_stop: + trailing_sl_trigger_pct = config.triple_barrier_config.trailing_stop.activation_price + trailing_sl_delta_pct = config.triple_barrier_config.trailing_stop.trailing_delta tl = config.triple_barrier_config.time_limit if config.triple_barrier_config.time_limit else None tl_timestamp = config.timestamp + tl if tl else last_timestamp @@ -45,16 +50,25 @@ def simulate(self, df: pd.DataFrame, config: PositionExecutorConfig, trade_cost: df_filtered['net_pnl_quote'] = df_filtered['net_pnl_pct'] * df_filtered['filled_amount_quote'] df_filtered['cum_fees_quote'] = trade_cost * df_filtered['filled_amount_quote'] + # Make sure the trailing stop pct rises linearly to the net p/l pct when above the trailing stop trigger pct (if any) + if trailing_sl_trigger_pct is not None and trailing_sl_delta_pct is not None: + df_filtered.loc[(df_filtered['net_pnl_pct'] > trailing_sl_trigger_pct).cummax(), 'ts'] = ( + df_filtered['net_pnl_pct'] - float(trailing_sl_delta_pct) + ).cummax() + # Determine the earliest close event first_tp_timestamp = df_filtered[df_filtered['net_pnl_pct'] > tp]['timestamp'].min() if tp else None first_sl_timestamp = df_filtered[df_filtered['net_pnl_pct'] < -sl]['timestamp'].min() if sl else None - close_timestamp = min([timestamp for timestamp in [first_tp_timestamp, first_sl_timestamp, tl_timestamp] if not pd.isna(timestamp)]) + first_trailing_sl_timestamp = df_filtered[(~df_filtered['ts'].isna()) & (df_filtered['net_pnl_pct'] < df_filtered['ts'])]['timestamp'].min() if trailing_sl_delta_pct and trailing_sl_trigger_pct else None + close_timestamp = min([timestamp for timestamp in [first_tp_timestamp, first_sl_timestamp, tl_timestamp, first_trailing_sl_timestamp] if not pd.isna(timestamp)]) # Determine the close type if close_timestamp == first_tp_timestamp: close_type = CloseType.TAKE_PROFIT elif close_timestamp == first_sl_timestamp: close_type = CloseType.STOP_LOSS + elif close_timestamp == first_trailing_sl_timestamp: + close_type = CloseType.TRAILING_STOP else: close_type = CloseType.TIME_LIMIT diff --git a/hummingbot/strategy_v2/controllers/controller_base.py b/hummingbot/strategy_v2/controllers/controller_base.py index 5c8ad90291..9d54abe328 100644 --- a/hummingbot/strategy_v2/controllers/controller_base.py +++ b/hummingbot/strategy_v2/controllers/controller_base.py @@ -3,6 +3,7 @@ import asyncio import importlib import inspect +from decimal import Decimal from typing import Callable, Dict, List, Set from pydantic import Field, validator @@ -36,6 +37,12 @@ class ControllerConfigBase(BaseClientModel): )) controller_name: str controller_type: str = "generic" + total_amount_quote: Decimal = Field( + default=100, + client_data=ClientFieldData( + is_updatable=True, + prompt_on_new=True, + prompt=lambda mi: "Enter the total amount in quote asset to use for trading (e.g., 1000):")) manual_kill_switch: bool = Field(default=None, client_data=ClientFieldData(is_updatable=True, prompt_on_new=False)) candles_config: List[CandlesConfig] = Field( default="binance_perpetual.WLD-USDT.1m.500", diff --git a/hummingbot/strategy_v2/controllers/directional_trading_controller_base.py b/hummingbot/strategy_v2/controllers/directional_trading_controller_base.py index c86f732d00..0005b51435 100644 --- a/hummingbot/strategy_v2/controllers/directional_trading_controller_base.py +++ b/hummingbot/strategy_v2/controllers/directional_trading_controller_base.py @@ -31,19 +31,11 @@ class DirectionalTradingControllerConfigBase(ControllerConfigBase): client_data=ClientFieldData( prompt_on_new=True, prompt=lambda mi: "Enter the trading pair to trade on (e.g., WLD-USDT):")) - - total_amount_quote: Decimal = Field( - default=100.0, - client_data=ClientFieldData( - prompt_on_new=True, - prompt=lambda mi: "Enter the amount of quote asset to use per executor (e.g., 100):")) - max_executors_per_side: int = Field( default=2, client_data=ClientFieldData( prompt_on_new=True, prompt=lambda mi: "Enter the maximum number of executors per side (e.g., 2):")) - cooldown_time: int = Field( default=60 * 5, gt=0, client_data=ClientFieldData( diff --git a/hummingbot/strategy_v2/controllers/market_making_controller_base.py b/hummingbot/strategy_v2/controllers/market_making_controller_base.py index 6c340fd34f..491db1ef28 100644 --- a/hummingbot/strategy_v2/controllers/market_making_controller_base.py +++ b/hummingbot/strategy_v2/controllers/market_making_controller_base.py @@ -26,12 +26,6 @@ class MarketMakingControllerConfigBase(ControllerConfigBase): client_data=ClientFieldData( prompt_on_new=True, prompt=lambda mi: "Enter the trading pair to trade on (e.g., WLD-USDT):")) - total_amount_quote: float = Field( - default=100, - client_data=ClientFieldData( - is_updatable=True, - prompt_on_new=True, - prompt=lambda mi: "Enter the total amount in quote asset to use for trading (e.g., 1000):")) buy_spreads: List[float] = Field( default="0.01,0.02", client_data=ClientFieldData( @@ -44,13 +38,13 @@ class MarketMakingControllerConfigBase(ControllerConfigBase): is_updatable=True, prompt_on_new=True, prompt=lambda mi: "Enter a comma-separated list of sell spreads (e.g., '0.01, 0.02'):")) - buy_amounts_pct: Union[List[float], None] = Field( + buy_amounts_pct: Union[List[Decimal], None] = Field( default=None, client_data=ClientFieldData( is_updatable=True, prompt_on_new=False, prompt=lambda mi: "Enter a comma-separated list of buy amounts as percentages (e.g., '50, 50'), or leave blank to distribute equally:")) - sell_amounts_pct: Union[List[float], None] = Field( + sell_amounts_pct: Union[List[Decimal], None] = Field( default=None, client_data=ClientFieldData( is_updatable=True, diff --git a/hummingbot/strategy_v2/executors/executor_base.py b/hummingbot/strategy_v2/executors/executor_base.py index bbeba31305..d400fea302 100644 --- a/hummingbot/strategy_v2/executors/executor_base.py +++ b/hummingbot/strategy_v2/executors/executor_base.py @@ -160,6 +160,7 @@ def stop(self): """ Stops the executor and unregisters the events. """ + self.close_timestamp = self._strategy.current_timestamp super().stop() self.unregister_events() @@ -234,9 +235,7 @@ def get_in_flight_order(self, connector_name: str, order_id: str): :param order_id: The ID of the order. :return: The in-flight order. """ - connector = self.connectors[connector_name] - order = connector._order_tracker.fetch_order(client_order_id=order_id) - return order + return self.connectors[connector_name]._order_tracker.fetch_order(client_order_id=order_id) def register_events(self): """ diff --git a/hummingbot/strategy_v2/executors/executor_orchestrator.py b/hummingbot/strategy_v2/executors/executor_orchestrator.py index a3060fc1f5..f8546f814d 100644 --- a/hummingbot/strategy_v2/executors/executor_orchestrator.py +++ b/hummingbot/strategy_v2/executors/executor_orchestrator.py @@ -52,7 +52,8 @@ def stop(self): for executor in executors_list: if not executor.is_closed: executor.early_stop() - # then we store all executors + + def store_all_executors(self): for controller_id, executors_list in self.executors.items(): for executor in executors_list: MarketsRecorder.get_instance().store_or_update_executor(executor) @@ -167,6 +168,7 @@ def generate_performance_report(self, controller_id: str) -> PerformanceReport: for executor in combined_executors: close_type = executor.close_type if close_type == CloseType.FAILED: + close_type_counts[close_type] = close_type_counts.get(close_type, 0) + 1 continue elif close_type is not None: if close_type in close_type_counts: diff --git a/hummingbot/strategy_v2/executors/position_executor/position_executor.py b/hummingbot/strategy_v2/executors/position_executor/position_executor.py index 22ab3f945d..03670b54d4 100644 --- a/hummingbot/strategy_v2/executors/position_executor/position_executor.py +++ b/hummingbot/strategy_v2/executors/position_executor/position_executor.py @@ -48,8 +48,14 @@ def __init__(self, strategy: ScriptStrategyBase, config: PositionExecutorConfig, error = "Only market orders are supported for time_limit and stop_loss" self.logger().error(error) raise ValueError(error) - super().__init__(strategy=strategy, config=config, connectors=[config.connector_name], update_interval=update_interval) + super().__init__(strategy=strategy, config=config, connectors=[config.connector_name], + update_interval=update_interval) + if not config.entry_price: + open_order_price_type = PriceType.BestBid if config.side == TradeType.BUY else PriceType.BestAsk + config.entry_price = self.get_price(config.connector_name, config.trading_pair, + price_type=open_order_price_type) self.config: PositionExecutorConfig = config + self.trading_rules = self.get_trading_rules(self.config.connector_name, self.config.trading_pair) # Order tracking self._open_order: Optional[TrackedOrder] = None @@ -160,11 +166,15 @@ def entry_price(self) -> Decimal: """ if self._open_order and self._open_order.is_done: return self._open_order.average_executed_price - elif self.config.entry_price: - return self.config.entry_price + elif self.config.triple_barrier_config.open_order_type == OrderType.LIMIT_MAKER: + if self.config.side == TradeType.BUY: + best_bid = self.get_price(self.config.connector_name, self.config.trading_pair, PriceType.BestBid) + return min(self.config.entry_price, best_bid) + else: + best_ask = self.get_price(self.config.connector_name, self.config.trading_pair, PriceType.BestAsk) + return max(self.config.entry_price, best_ask) else: - price_type = PriceType.BestAsk if self.config.side == TradeType.BUY else PriceType.BestBid - return self.get_price(self.config.connector_name, self.config.trading_pair, price_type=price_type) + return self.config.entry_price @property def close_price(self) -> Decimal: @@ -226,7 +236,8 @@ def get_net_pnl_pct(self) -> Decimal: :return: The net pnl percentage. """ - return self.net_pnl_quote / self.open_filled_amount_quote if self.open_filled_amount_quote != Decimal("0") else Decimal("0") + return self.net_pnl_quote / self.open_filled_amount_quote if self.open_filled_amount_quote != Decimal( + "0") else Decimal("0") @property def end_time(self) -> Optional[float]: @@ -246,8 +257,18 @@ def take_profit_price(self): :return: The take profit price. """ - take_profit_price = self.entry_price * (1 + self.config.triple_barrier_config.take_profit) \ - if self.config.side == TradeType.BUY else self.entry_price * (1 - self.config.triple_barrier_config.take_profit) + if self.config.side == TradeType.BUY: + take_profit_price = self.entry_price * (1 + self.config.triple_barrier_config.take_profit) + if self.config.triple_barrier_config.take_profit_order_type == OrderType.LIMIT_MAKER: + take_profit_price = max(take_profit_price, + self.get_price(self.config.connector_name, self.config.trading_pair, + PriceType.BestAsk)) + else: + take_profit_price = self.entry_price * (1 - self.config.triple_barrier_config.take_profit) + if self.config.triple_barrier_config.take_profit_order_type == OrderType.LIMIT_MAKER: + take_profit_price = min(take_profit_price, + self.get_price(self.config.connector_name, self.config.trading_pair, + PriceType.BestBid)) return take_profit_price async def control_task(self): @@ -271,8 +292,7 @@ def open_orders_completed(self): """ open_order_condition = not self._open_order or self._open_order.is_done take_profit_condition = not self._take_profit_limit_order or self._take_profit_limit_order.is_done - failed_orders_condition = not self._failed_orders or all([order.is_done for order in self._failed_orders]) - return open_order_condition and take_profit_condition and failed_orders_condition + return open_order_condition and take_profit_condition async def control_shutdown_process(self): """ @@ -280,24 +300,44 @@ async def control_shutdown_process(self): :return: None """ - if math.isclose(self.open_filled_amount, self.close_filled_amount, rel_tol=1e-2): - if self.open_orders_completed(): - self.stop() - else: - self.cancel_open_orders() - self._current_retries += 1 - elif self._close_order: - if self._current_retries < self._max_retries / 2: - self.logger().info(f"Waiting for close order to be filled --> Filled amount: {self.close_filled_amount} | Open amount: {self.open_filled_amount}") + self.close_timestamp = self._strategy.current_timestamp + open_orders_completed = self.open_orders_completed() + order_execution_completed = self.open_and_close_volume_match() + if open_orders_completed and order_execution_completed: + self.stop() + else: + await self.control_close_order() + self.cancel_open_orders() + self._current_retries += 1 + await asyncio.sleep(2.0) + + def open_and_close_volume_match(self): + if self.open_filled_amount == Decimal("0"): + return True + else: + return self._close_order and self._close_order.is_filled + + async def control_close_order(self): + """ + This method is responsible for controlling the close order. If the close order is filled and the open orders are + completed, it stops the executor. If the close order is not placed, it places the close order. If the close order + is not filled, it waits for the close order to be filled and requests the order information to the connector. + """ + if self._close_order: + in_flight_order = self.get_in_flight_order(self.config.connector_name, + self._close_order.order_id) if not self._close_order.order else self._close_order.order + if in_flight_order: + self._close_order.order = in_flight_order + connector = self.connectors[self.config.connector_name] + await connector._update_orders_with_error_handler( + orders=[in_flight_order], + error_handler=connector._handle_update_error_for_lost_order) + self.logger().info("Waiting for close order to be filled") else: - self.logger().info("No fill on close order, will be retried.") - self.cancel_close_order() - self._current_retries += 1 + self._failed_orders.append(self._close_order) + self._close_order = None else: - self.logger().info(f"Open amount: {self.open_filled_amount}, Close amount: {self.close_filled_amount}") self.place_close_order_and_cancel_open_orders(close_type=self.close_type) - self._current_retries += 1 - await asyncio.sleep(1.0) def evaluate_max_retries(self): """ @@ -381,9 +421,11 @@ def control_barriers(self): :return: None """ - self.control_stop_loss() - self.control_trailing_stop() - self.control_take_profit() + if self._open_order.is_filled and self.open_filled_amount >= self.trading_rules.min_order_size \ + and self.open_filled_amount_quote >= self.trading_rules.min_notional_size: + self.control_stop_loss() + self.control_trailing_stop() + self.control_take_profit() self.control_time_limit() def place_close_order_and_cancel_open_orders(self, close_type: CloseType, price: Decimal = Decimal("NaN")): @@ -396,9 +438,9 @@ def place_close_order_and_cancel_open_orders(self, close_type: CloseType, price: :param price: The price to be used in the close order. :return: None """ + self.cancel_open_orders() delta_amount_to_close = self.open_filled_amount - self.close_filled_amount - trading_rules = self.get_trading_rules(self.config.connector_name, self.config.trading_pair) - if delta_amount_to_close > trading_rules.min_order_size: + if delta_amount_to_close > self.trading_rules.min_order_size: order_id = self.place_order( connector_name=self.config.connector_name, trading_pair=self.config.trading_pair, @@ -410,9 +452,7 @@ def place_close_order_and_cancel_open_orders(self, close_type: CloseType, price: ) self._close_order = TrackedOrder(order_id=order_id) self.logger().debug(f"Placing close order --> Filled amount: {self.open_filled_amount}") - self.cancel_open_orders() self.close_type = close_type - self.close_timestamp = self._strategy.current_timestamp self._status = RunnableStatus.SHUTTING_DOWN def cancel_open_orders(self): @@ -446,12 +486,13 @@ def control_take_profit(self): :return: None """ - if self.open_filled_amount > Decimal("0") and self.config.triple_barrier_config.take_profit: + if self.config.triple_barrier_config.take_profit: if self.config.triple_barrier_config.take_profit_order_type.is_limit_type(): if not self._take_profit_limit_order: self.place_take_profit_limit_order() - elif self._take_profit_limit_order.order and not math.isclose(self._take_profit_limit_order.order.amount, - self._open_order.executed_amount_base): + elif self._take_profit_limit_order.order and not math.isclose( + self._take_profit_limit_order.order.amount, + self._open_order.executed_amount_base): self.renew_take_profit_order() elif self.net_pnl_pct >= self.config.triple_barrier_config.take_profit: self.place_close_order_and_cancel_open_orders(close_type=CloseType.TAKE_PROFIT) @@ -549,12 +590,13 @@ def update_tracked_orders_with_order_id(self, order_id: str): :param order_id: The order_id to be used as a reference. :return: None """ + in_flight_order = self.get_in_flight_order(self.config.connector_name, order_id) if self._open_order and self._open_order.order_id == order_id: - self._open_order.order = self.get_in_flight_order(self.config.connector_name, order_id) + self._open_order.order = in_flight_order elif self._close_order and self._close_order.order_id == order_id: - self._close_order.order = self.get_in_flight_order(self.config.connector_name, order_id) + self._close_order.order = in_flight_order elif self._take_profit_limit_order and self._take_profit_limit_order.order_id == order_id: - self._take_profit_limit_order.order = self.get_in_flight_order(self.config.connector_name, order_id) + self._take_profit_limit_order.order = in_flight_order def process_order_created_event(self, _, market, event: Union[BuyOrderCreatedEvent, SellOrderCreatedEvent]): """ @@ -568,13 +610,12 @@ def process_order_completed_event(self, _, market, event: Union[BuyOrderComplete This method is responsible for processing the order completed event. Here we will check if the id is one of the tracked orders and update the state """ - if self._close_order and self._close_order.order_id == event.order_id: - self.close_timestamp = event.timestamp - elif self._take_profit_limit_order and self._take_profit_limit_order.order_id == event.order_id: + self._total_executed_amount_backup += event.base_asset_amount + self.update_tracked_orders_with_order_id(event.order_id) + + if self._take_profit_limit_order and self._take_profit_limit_order.order_id == event.order_id: self.close_type = CloseType.TAKE_PROFIT - self.close_timestamp = event.timestamp self._close_order = self._take_profit_limit_order - self.cancel_open_orders() self._status = RunnableStatus.SHUTTING_DOWN def process_order_filled_event(self, _, market, event: OrderFilledEvent): @@ -583,7 +624,6 @@ def process_order_filled_event(self, _, market, event: OrderFilledEvent): _total_executed_amount_backup, that can be used if the InFlightOrder is not available. """ - self._total_executed_amount_backup += event.amount self.update_tracked_orders_with_order_id(event.order_id) def process_order_canceled_event(self, _, market: ConnectorBase, event: OrderCancelledEvent): diff --git a/hummingbot/strategy_v2/models/executors.py b/hummingbot/strategy_v2/models/executors.py index 4c413a3872..0b8f0a7699 100644 --- a/hummingbot/strategy_v2/models/executors.py +++ b/hummingbot/strategy_v2/models/executors.py @@ -66,3 +66,17 @@ def is_done(self): return self.order.is_done else: return False + + @property + def is_open(self): + if self.order: + return self.order.is_open + else: + return False + + @property + def is_filled(self): + if self.order: + return self.order.is_filled + else: + return False diff --git a/hummingbot/templates/conf_fee_overrides_TEMPLATE.yml b/hummingbot/templates/conf_fee_overrides_TEMPLATE.yml index 0c28f8805b..b9793a2587 100644 --- a/hummingbot/templates/conf_fee_overrides_TEMPLATE.yml +++ b/hummingbot/templates/conf_fee_overrides_TEMPLATE.yml @@ -62,6 +62,12 @@ bitmart_maker_percent_fee: bitmart_percent_fee_token: bitmart_taker_fixed_fees: bitmart_taker_percent_fee: +bitstamp_buy_percent_fee_deducted_from_returns: +bitstamp_maker_fixed_fees: +bitstamp_maker_percent_fee: +bitstamp_percent_fee_token: +bitstamp_taker_fixed_fees: +bitstamp_taker_percent_fee: btc_markets_percent_fee_token: btc_markets_maker_percent_fee: btc_markets_taker_percent_fee: @@ -84,12 +90,6 @@ coinbase_advanced_trade_maker_percent_fee: coinbase_advanced_trade_percent_fee_token: coinbase_advanced_trade_taker_fixed_fees: coinbase_advanced_trade_taker_percent_fee: -coinbase_pro_buy_percent_fee_deducted_from_returns: -coinbase_pro_maker_fixed_fees: -coinbase_pro_maker_percent_fee: -coinbase_pro_percent_fee_token: -coinbase_pro_taker_fixed_fees: -coinbase_pro_taker_percent_fee: dydx_perpetual_buy_percent_fee_deducted_from_returns: dydx_perpetual_maker_fixed_fees: dydx_perpetual_maker_percent_fee: diff --git a/install b/install index 713bdd9967..3cbc9e85b8 100755 --- a/install +++ b/install @@ -12,12 +12,38 @@ fi if [ "${CONDA_EXE}_" == "_" ]; then echo "Please install Anaconda w/ Python 3.7+ first" - echo "See: https://www.anaconda.com/distribution/" + echo "See: https://www.anaconda.com/download" exit 1 fi CONDA_BIN=$(dirname ${CONDA_EXE}) -ENV_FILE=setup/environment.yml +ENV_FILE_1="setup/environment.yml" +ENV_FILE_2="setup/environment_dydx.yml" + +# The default is ENV_FILE_1 +ENV_FILE=$ENV_FILE_1 + +# Parse command line arguments +USE_DYDX=false +while [[ $# -gt 0 ]]; do + key="$1" + case $key in + --dydx) + USE_DYDX=true + shift + ;; + *) + # Handle other unidentified parameters + shift + ;; + esac +done + +# If you use the --dydx flag, switch to ENV_FILE_2 +if [ "$USE_DYDX" = true ]; then + ENV_FILE=$ENV_FILE_2 + echo "install dydx version." +fi if ${CONDA_EXE} env list | egrep -qe "^hummingbot"; then ${CONDA_EXE} env update -f $ENV_FILE diff --git a/installation/README.md b/installation/README.md deleted file mode 100644 index 97a2bfa175..0000000000 --- a/installation/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Installation - -Formerly, this folder contains automated scripts for installing Hummingbot via Docker and from source. It has now been deprecated in favor of newer, updated instructions. - -### Via Docker - -Visit the [Deploy Examples](https://github.com/hummingbot/deploy-examples) repo for instructions for deploying Hummingbot in various configurations with Docker. - -### From Source - -For advanced users and developers who would like to access and modify the Hummingbot program files, installation from source is preferred. - -Visit the [Installation](https://docs.hummingbot.io/installation/) section in the Hummingbot docs for platform-specific guides. \ No newline at end of file diff --git a/models/random_forest.joblib b/models/random_forest.joblib deleted file mode 100644 index a9d984b924..0000000000 Binary files a/models/random_forest.joblib and /dev/null differ diff --git a/models/random_forest_pre_processor.joblib b/models/random_forest_pre_processor.joblib deleted file mode 100644 index c40c6490c0..0000000000 Binary files a/models/random_forest_pre_processor.joblib and /dev/null differ diff --git a/pmm_scripts/dynamic_price_band_script.py b/pmm_scripts/dynamic_price_band_script.py deleted file mode 100644 index 6c26f01d0a..0000000000 --- a/pmm_scripts/dynamic_price_band_script.py +++ /dev/null @@ -1,45 +0,0 @@ -from decimal import Decimal - -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - -s_decimal_1 = Decimal("1") - - -class DynamicPriceBandPMMScript(PMMScriptBase): - """ - Demonstrates how to set a band around a mid price moving average, the strategy is to stop buying when the mid price - reaches the upper bound of the band and to stop selling when the mid price breaches the lower bound. - """ - - # Let's set the upper bound of the band to 5% away from the mid price moving average - band_upper_bound_pct = Decimal("0.05") - # Let's set the lower bound of the band to 3% away from the mid price moving average - band_lower_bound_pct = Decimal("0.03") - # Let's sample mid prices once every 10 seconds - avg_interval = 10 - # Let's average the last 5 samples - avg_length = 5 - - def __init__(self): - super().__init__() - - def on_tick(self): - avg_mid_price = self.avg_mid_price(self.avg_interval, self.avg_length) - # The avg can be None when the bot just started as there are not enough mid prices to sample values from. - if avg_mid_price is None: - return - upper_bound = avg_mid_price * (s_decimal_1 + self.band_upper_bound_pct) - lower_bound = avg_mid_price * (s_decimal_1 - self.band_lower_bound_pct) - # When mid_price reaches the upper bound, we expect the price to bounce back as such we don't want be a buyer - # (as we can probably buy back at a cheaper price later). - # If you anticipate the opposite, i.e. the price breaks out on a run away move, you can protect your inventory - # by stop selling (setting the sell_levels to 0). - if self.mid_price >= upper_bound: - self.pmm_parameters.buy_levels = 0 - else: - self.pmm_parameters.buy_levels = self.pmm_parameters.order_levels - # When mid_price reaches the lower bound, we don't want to be a seller. - if self.mid_price <= lower_bound: - self.pmm_parameters.sell_levels = 0 - else: - self.pmm_parameters.sell_levels = self.pmm_parameters.order_levels diff --git a/pmm_scripts/hello_world_script.py b/pmm_scripts/hello_world_script.py deleted file mode 100644 index 5013324e38..0000000000 --- a/pmm_scripts/hello_world_script.py +++ /dev/null @@ -1,21 +0,0 @@ -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - - -class HelloWorldPMMScript(PMMScriptBase): - """ - Demonstrates how to send messages using notify and log functions. It also shows how errors and commands are handled. - """ - - def on_tick(self): - if len(self.mid_prices) < 3: - self.notify("Hello Hummingbots World!") - self.log("Hello world logged.") - elif 3 <= len(self.mid_prices) < 5: - # This below statement will cause ZeroDivisionError, Hummingbot will later report this on the log screen. - _ = 1 / 0 - - def on_command(self, cmd, args): - if cmd == 'ping': - self.notify('pong!') - else: - self.notify(f'Unrecognised command: {cmd}') diff --git a/pmm_scripts/inv_skew_using_spread_script.py b/pmm_scripts/inv_skew_using_spread_script.py deleted file mode 100644 index c98fa78c31..0000000000 --- a/pmm_scripts/inv_skew_using_spread_script.py +++ /dev/null @@ -1,91 +0,0 @@ -from decimal import Decimal - -from hummingbot.core.event.events import BuyOrderCompletedEvent, SellOrderCompletedEvent -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - -# Enter the inventory % threshold. When the % of an asset goes below this value, the script will change the spread value -inv_pct_limit = 0.10 -# Enter the spread value to be used if the inventory % threshold is reached -new_spread = Decimal("0.005") - - -class InventorySkewUsingSpread(PMMScriptBase): - - def __init__(self): - super().__init__() - # Declaration of variables used by the script - self.base_asset = None - self.quote_asset = None - self.base_balance = Decimal("0.0000") - self.quote_balance = Decimal("0.0000") - self.original_bid_spread = None - self.original_ask_spread = None - self.base_inv_value = Decimal("0.0000") - self.quote_pct = Decimal("0.0000") - self.base_pct = Decimal("0.0000") - self.ask_skew_active = False - self.bid_skew_active = False - self.total_inv_value = None - - def on_tick(self): - - # Separate and store the assets of the market the bot is working on - if self.base_asset is None or self.quote_asset is None: - self.base_asset, self.quote_asset = self.pmm_market_info.trading_pair.split("-") - - # Check what is the current balance of each asset - self.base_balance = self.all_total_balances[f"{self.pmm_market_info.exchange}"].get(self.base_asset, self.base_balance) - self.quote_balance = self.all_total_balances[f"{self.pmm_market_info.exchange}"].get(self.quote_asset, self.quote_balance) - - # At the script start, the values of the original configuration bid and ask spread is stored for later use - if self.original_bid_spread is None or self.original_ask_spread is None: - self.original_bid_spread = self.pmm_parameters.bid_spread - self.original_ask_spread = self.pmm_parameters.ask_spread - - if self.ask_skew_active is False: - self.original_ask_spread = self.pmm_parameters.ask_spread - - if self.bid_skew_active is False: - self.original_bid_spread = self.pmm_parameters.bid_spread - - # calculate the total % value and it's proportion - self.base_inv_value = self.base_balance * self.mid_price - self.total_inv_value = self.base_inv_value + self.quote_balance - self.base_pct = self.base_inv_value / self.total_inv_value - self.quote_pct = self.quote_balance / self.total_inv_value - - # check if the inventory value % of an asset is below the chosen threshold to define what spread will be used - if self.quote_pct < inv_pct_limit: - self.ask_skew_active = True - self.pmm_parameters.ask_spread = new_spread - # self.log(f"{self.base_asset} inventory % below {inv_pct_limit:.2%}. Changing ask_spread to {new_spread:.2%}") - else: - self.ask_skew_active = False - self.pmm_parameters.ask_spread = self.original_ask_spread - if self.base_pct < inv_pct_limit: - self.bid_skew_active = True - self.pmm_parameters.bid_spread = new_spread - # self.log(f"{self.quote_asset} inventory % below {inv_pct_limit:.2%}. Changing bid_spread to {new_spread:.2%}") - else: - self.bid_skew_active = False - self.pmm_parameters.bid_spread = self.original_bid_spread - - return - - def on_buy_order_completed(self, event: BuyOrderCompletedEvent): - return - - def on_sell_order_completed(self, event: SellOrderCompletedEvent): - return - - def on_status(self) -> str: - # Show the current values when using the `status` command - return f"\n"\ - f"original bid spread = {self.original_bid_spread:.2%} \n" \ - f"original ask spread = {self.original_ask_spread:.2%} \n" \ - f"ask skew active? = {self.ask_skew_active} \n" \ - f"current ask spread = {self.pmm_parameters.ask_spread:.2%} \n" \ - f"bid skew active? = {self.bid_skew_active} \n" \ - f"current bid spread = {self.pmm_parameters.bid_spread:.2%}" - - # --------------- diff --git a/pmm_scripts/ping_pong_script.py b/pmm_scripts/ping_pong_script.py deleted file mode 100644 index 79ff9d0b09..0000000000 --- a/pmm_scripts/ping_pong_script.py +++ /dev/null @@ -1,41 +0,0 @@ -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - SellOrderCompletedEvent -) -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - - -class PingPongPMMScript(PMMScriptBase): - """ - Demonstrates how to set up a ping pong trading strategy which alternates buy and sell orders. - If a buy order is filled, there will be one less buy order submitted at the next refresh cycle. - If a sell order is filled, there will be one less sell order submitted at the next refresh cycle. - The balance is positive if there are more completed buy orders than sell orders. - """ - - def __init__(self): - super().__init__() - self.ping_pong_balance = 0 - - def on_tick(self): - strategy = self.pmm_parameters - buys = strategy.order_levels - sells = strategy.order_levels - if self.ping_pong_balance > 0: - buys -= self.ping_pong_balance - buys = max(0, buys) - elif self.ping_pong_balance < 0: - sells -= abs(self.ping_pong_balance) - sells = max(0, sells) - strategy.buy_levels = buys - strategy.sell_levels = sells - - def on_buy_order_completed(self, event: BuyOrderCompletedEvent): - self.ping_pong_balance += 1 - - def on_sell_order_completed(self, event: SellOrderCompletedEvent): - self.ping_pong_balance -= 1 - - def on_status(self): - # return the current balance here to be displayed when status command is executed. - return f"ping_pong_balance: {self.ping_pong_balance}" diff --git a/pmm_scripts/price_band_script.py b/pmm_scripts/price_band_script.py deleted file mode 100644 index 4257c0a8de..0000000000 --- a/pmm_scripts/price_band_script.py +++ /dev/null @@ -1,29 +0,0 @@ -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - - -class PriceBandPMMScript(PMMScriptBase): - """ - Demonstrates how to set a fixed band, the strategy is to stop buying when the mid price reaches the upper bound - of the band and to stop selling when the mid price breaches the lower bound. - """ - - band_upper_bound = 105 - band_lower_bound = 95 - - def __init__(self): - super().__init__() - - def on_tick(self): - # When mid_price reaches the upper bound, we expect the price to bounce back as such we don't want be a buyer - # (as we can probably buy back at a cheaper price later). - # If you anticipate the opposite, i.e. the price breaks out on a run away move, you can protect your inventory - # by stop selling (setting the sell_levels to 0). - if self.mid_price >= self.band_upper_bound: - self.pmm_parameters.buy_levels = 0 - else: - self.pmm_parameters.buy_levels = self.pmm_parameters.order_levels - # When mid_price breaches the lower bound, we don't want to be a seller. - if self.mid_price <= self.band_lower_bound: - self.pmm_parameters.sell_levels = 0 - else: - self.pmm_parameters.sell_levels = self.pmm_parameters.order_levels diff --git a/pmm_scripts/spreads_adjusted_on_volatility_script.py b/pmm_scripts/spreads_adjusted_on_volatility_script.py deleted file mode 100644 index 43c847d14a..0000000000 --- a/pmm_scripts/spreads_adjusted_on_volatility_script.py +++ /dev/null @@ -1,108 +0,0 @@ -import time -from datetime import datetime -from decimal import Decimal -from os.path import join, realpath - -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - -s_decimal_1 = Decimal("1") -LOGS_PATH = realpath(join(__file__, "../../logs/")) -SCRIPT_LOG_FILE = f"{LOGS_PATH}/logs_script.log" - - -def log_to_file(file_name, message): - with open(file_name, "a+") as f: - f.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " - " + message + "\n") - - -class SpreadsAdjustedOnVolatility(PMMScriptBase): - """ - Demonstrates how to adjust bid and ask spreads based on price volatility. - The volatility, in this example, is simply a price change compared to the previous cycle regardless of its - direction, e.g. if price changes -3% (or 3%), the volatility is 3%. - To update our pure market making spreads, we're gonna smooth out the volatility by averaging it over a short period - (short_period), and we need a benchmark to compare its value against. In this example the benchmark is a median - long period price volatility (you can also use a fixed number, e.g. 3% - if you expect this to be the norm for your - market). - For example, if our bid_spread and ask_spread are at 0.8%, and the median long term volatility is 1.5%. - Recently the volatility jumps to 2.6% (on short term average), we're gonna adjust both our bid and ask spreads to - 1.9% (the original spread - 0.8% plus the volatility delta - 1.1%). Then after a short while the volatility drops - back to 1.5%, our spreads are now adjusted back to 0.8%. - """ - - # Let's set interval and sample sizes as below. - # These numbers are for testing purposes only (in reality, they should be larger numbers) - # interval is a interim which to pick historical mid price samples from, if you set it to 5, the first sample is - # the last (current) mid price, the second sample is a past mid price 5 seconds before the last, and so on. - interval = 5 - # short_period is how many interval to pick the samples for the average short term volatility calculation, - # for short_period of 3, this is 3 samples (5 seconds interval), of the last 15 seconds - short_period = 3 - # long_period is how many interval to pick the samples for the median long term volatility calculation, - # for long_period of 10, this is 10 samples (5 seconds interval), of the last 50 seconds - long_period = 10 - last_stats_logged = 0 - - def __init__(self): - super().__init__() - self.original_bid_spread = None - self.original_ask_spread = None - self.avg_short_volatility = None - self.median_long_volatility = None - - def volatility_msg(self, include_mid_price=False): - if self.avg_short_volatility is None or self.median_long_volatility is None: - return "short_volatility: N/A long_volatility: N/A" - mid_price_msg = f" mid_price: {self.mid_price:<15}" if include_mid_price else "" - return f"short_volatility: {self.avg_short_volatility:.2%} " \ - f"long_volatility: {self.median_long_volatility:.2%}{mid_price_msg}" - - def on_tick(self): - # First, let's keep the original spreads. - if self.original_bid_spread is None: - self.original_bid_spread = self.pmm_parameters.bid_spread - self.original_ask_spread = self.pmm_parameters.ask_spread - - # Average volatility (price change) over a short period of time, this is to detect recent sudden changes. - self.avg_short_volatility = self.avg_price_volatility(self.interval, self.short_period) - # Median volatility over a long period of time, this is to find the market norm volatility. - # We use median (instead of average) to find the middle volatility value - this is to avoid recent - # spike affecting the average value. - self.median_long_volatility = self.median_price_volatility(self.interval, self.long_period) - - # If the bot just got started, we'll not have these numbers yet as there is not enough mid_price sample size. - # We'll start to have these numbers after interval * long_term_period. - if self.avg_short_volatility is None or self.median_long_volatility is None: - return - - # Let's log some stats once every 5 minutes - if time.time() - self.last_stats_logged > 60 * 5: - log_to_file(SCRIPT_LOG_FILE, self.volatility_msg(True)) - self.last_stats_logged = time.time() - - # This volatility delta will be used to adjust spreads. - delta = self.avg_short_volatility - self.median_long_volatility - # Let's round the delta into 0.25% increment to ignore noise and to avoid adjusting the spreads too often. - spread_adjustment = self.round_by_step(delta, Decimal("0.0025")) - # Show the user on what's going, you can remove this statement to stop the notification. - # self.notify(f"avg_short_volatility: {avg_short_volatility} median_long_volatility: {median_long_volatility} " - # f"spread_adjustment: {spread_adjustment}") - new_bid_spread = self.original_bid_spread + spread_adjustment - # Let's not set the spreads below the originals, this is to avoid having spreads to be too close - # to the mid price. - new_bid_spread = max(self.original_bid_spread, new_bid_spread) - old_bid_spread = self.pmm_parameters.bid_spread - if new_bid_spread != self.pmm_parameters.bid_spread: - self.pmm_parameters.bid_spread = new_bid_spread - - new_ask_spread = self.original_ask_spread + spread_adjustment - new_ask_spread = max(self.original_ask_spread, new_ask_spread) - if new_ask_spread != self.pmm_parameters.ask_spread: - self.pmm_parameters.ask_spread = new_ask_spread - if old_bid_spread != new_bid_spread: - log_to_file(SCRIPT_LOG_FILE, self.volatility_msg(True)) - log_to_file(SCRIPT_LOG_FILE, f"spreads adjustment: Old Value: {old_bid_spread:.2%} " - f"New Value: {new_bid_spread:.2%}") - - def on_status(self) -> str: - return self.volatility_msg() diff --git a/pmm_scripts/update_parameters_test_script.py b/pmm_scripts/update_parameters_test_script.py deleted file mode 100644 index 1b0957be5c..0000000000 --- a/pmm_scripts/update_parameters_test_script.py +++ /dev/null @@ -1,34 +0,0 @@ -from decimal import Decimal -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - - -class UpdateParametersTestPMMScript(PMMScriptBase): - """ - This PMM script is intended for unit testing purpose only. - """ - - def __init__(self): - super().__init__() - self._has_updated = False - - def on_tick(self): - if len(self.mid_prices) >= 5 and not self._has_updated: - self.pmm_parameters.buy_levels = 1 - self.pmm_parameters.sell_levels = 2 - self.pmm_parameters.order_levels = 3 - self.pmm_parameters.bid_spread = Decimal("0.1") - self.pmm_parameters.ask_spread = Decimal("0.2") - self.pmm_parameters.hanging_orders_cancel_pct = Decimal("0.3") - self.pmm_parameters.hanging_orders_enabled = True - self.pmm_parameters.filled_order_delay = 50.0 - self.pmm_parameters.order_refresh_tolerance_pct = Decimal("0.01") - self.pmm_parameters.order_refresh_time = 10.0 - self.pmm_parameters.order_level_amount = Decimal("4") - self.pmm_parameters.order_level_spread = Decimal("0.05") - self.pmm_parameters.order_amount = Decimal("20") - self.pmm_parameters.inventory_skew_enabled = True - self.pmm_parameters.inventory_range_multiplier = Decimal("2") - self.pmm_parameters.inventory_target_base_pct = Decimal("0.6") - self.pmm_parameters.order_override = {"order_1": ["buy", Decimal("0.5"), Decimal("100")], - "order_2": ["sell", Decimal("0.55"), Decimal("101")], } - self._has_updated = True diff --git a/redist/VC_redist.x64.exe b/redist/VC_redist.x64.exe deleted file mode 100644 index 7476d75aaa..0000000000 Binary files a/redist/VC_redist.x64.exe and /dev/null differ diff --git a/scripts/community/adjusted_mid_price.py b/scripts/community/adjusted_mid_price.py index 82e9830a1c..fe73308457 100644 --- a/scripts/community/adjusted_mid_price.py +++ b/scripts/community/adjusted_mid_price.py @@ -112,7 +112,7 @@ def create_order(self, is_bid: bool) -> OrderCandidate: def adjusted_mid_price(self): """ - Returns the price of a hypothetical buy and sell or the base asset where the amout is {strategy.test_volume} + Returns the price of a hypothetical buy and sell or the base asset where the amount is {strategy.test_volume} """ ask_result = self.connector.get_quote_volume_for_base_amount(self.strategy["pair"], True, self.strategy["test_volume"]) bid_result = self.connector.get_quote_volume_for_base_amount(self.strategy["pair"], False, self.strategy["test_volume"]) diff --git a/scripts/community/arbitrage_with_smart_component.py b/scripts/community/arbitrage_with_smart_component.py index 63f4bbda46..3af760a50e 100644 --- a/scripts/community/arbitrage_with_smart_component.py +++ b/scripts/community/arbitrage_with_smart_component.py @@ -37,7 +37,7 @@ def on_tick(self): if sell_arbitrage_executor: self.active_sell_arbitrages.append(sell_arbitrage_executor) - def on_stop(self): + async def on_stop(self): for arbitrage in self.active_buy_arbitrages: arbitrage.stop() for arbitrage in self.active_sell_arbitrages: @@ -45,10 +45,10 @@ def on_stop(self): def create_arbitrage_executor(self, buying_exchange_pair: ConnectorPair, selling_exchange_pair: ConnectorPair): try: - base_asset_for_selling_exchange = self.connectors[selling_exchange_pair.exchange].get_available_balance( + base_asset_for_selling_exchange = self.connectors[selling_exchange_pair.connector_name].get_available_balance( selling_exchange_pair.trading_pair.split("-")[0]) if self.order_amount > base_asset_for_selling_exchange: - self.logger().info(f"Insufficient balance in exchange {selling_exchange_pair.exchange} " + self.logger().info(f"Insufficient balance in exchange {selling_exchange_pair.connector_name} " f"to sell {selling_exchange_pair.trading_pair.split('-')[0]} " f"Actual: {base_asset_for_selling_exchange} --> Needed: {self.order_amount}") return @@ -56,10 +56,10 @@ def create_arbitrage_executor(self, buying_exchange_pair: ConnectorPair, selling # Harcoded for now since we don't have a price oracle for WMATIC (CoinMarketCap rate source is requested and coming) pair_conversion = selling_exchange_pair.trading_pair.replace("W", "") price = RateOracle.get_instance().get_pair_rate(pair_conversion) - quote_asset_for_buying_exchange = self.connectors[buying_exchange_pair.exchange].get_available_balance( + quote_asset_for_buying_exchange = self.connectors[buying_exchange_pair.connector_name].get_available_balance( buying_exchange_pair.trading_pair.split("-")[1]) if self.order_amount * price > quote_asset_for_buying_exchange: - self.logger().info(f"Insufficient balance in exchange {buying_exchange_pair.exchange} " + self.logger().info(f"Insufficient balance in exchange {buying_exchange_pair.connector_name} " f"to buy {buying_exchange_pair.trading_pair.split('-')[1]} " f"Actual: {quote_asset_for_buying_exchange} --> Needed: {self.order_amount * price}") return @@ -72,9 +72,10 @@ def create_arbitrage_executor(self, buying_exchange_pair: ConnectorPair, selling ) arbitrage_executor = ArbitrageExecutor(strategy=self, config=arbitrage_config) + arbitrage_executor.start() return arbitrage_executor except Exception: - self.logger().error(f"Error creating executor to buy on {buying_exchange_pair.exchange} and sell on {selling_exchange_pair.exchange}") + self.logger().error(f"Error creating executor to buy on {buying_exchange_pair.connector_name} and sell on {selling_exchange_pair.connector_name}") def format_status(self) -> str: status = [] diff --git a/scripts/community/macd_bb_directional_strategy.py b/scripts/community/macd_bb_directional_strategy.py index 78ff53369c..34762a6ba5 100644 --- a/scripts/community/macd_bb_directional_strategy.py +++ b/scripts/community/macd_bb_directional_strategy.py @@ -112,7 +112,7 @@ def get_signal_tp_and_sl(self): indicators = [bbp, macdh, macd] return signal_value, take_profit, stop_loss, indicators - def on_stop(self): + async def on_stop(self): """ Without this functionality, the network iterator will continue running forever after stopping the strategy That's why is necessary to introduce this new feature to make a custom stop with the strategy. diff --git a/scripts/community/pmm_with_shifted_mid_dynamic_spreads.py b/scripts/community/pmm_with_shifted_mid_dynamic_spreads.py index a6ece2ec9a..5d132686fe 100644 --- a/scripts/community/pmm_with_shifted_mid_dynamic_spreads.py +++ b/scripts/community/pmm_with_shifted_mid_dynamic_spreads.py @@ -61,7 +61,7 @@ def __init__(self, connectors: Dict[str, ConnectorBase]): super().__init__(connectors) self.candles.start() - def on_stop(self): + async def on_stop(self): """ Without this functionality, the network iterator will continue running forever after stopping the strategy That's why is necessary to introduce this new feature to make a custom stop with the strategy. diff --git a/scripts/community/spot_perp_arb.py b/scripts/community/spot_perp_arb.py index 65b47626c9..ccd28b573e 100644 --- a/scripts/community/spot_perp_arb.py +++ b/scripts/community/spot_perp_arb.py @@ -395,7 +395,7 @@ def trade_state_log(self) -> str: return "close position" else: raise ValueError( - f"Strategy state: {self.strategy_state} shouldnt happen during trade." + f"Strategy state: {self.strategy_state} shouldn't happen during trade." ) def perp_trade_position_action(self) -> PositionAction: @@ -405,7 +405,7 @@ def perp_trade_position_action(self) -> PositionAction: return PositionAction.CLOSE else: raise ValueError( - f"Strategy state: {self.strategy_state} shouldnt happen during trade." + f"Strategy state: {self.strategy_state} shouldn't happen during trade." ) def format_status(self) -> str: diff --git a/scripts/gateway/amm_data_feed_example.py b/scripts/gateway/amm_data_feed_example.py index ee35cecf48..2e5c1b8ab3 100644 --- a/scripts/gateway/amm_data_feed_example.py +++ b/scripts/gateway/amm_data_feed_example.py @@ -27,7 +27,7 @@ def __init__(self, connectors: Dict[str, ConnectorBase]): self.amm_data_feed_uniswap.start() self.amm_data_feed_quickswap.start() - def on_stop(self): + async def on_stop(self): self.amm_data_feed_uniswap.stop() self.amm_data_feed_quickswap.stop() diff --git a/scripts/utility/backtest_mm_example.py b/scripts/utility/backtest_mm_example.py index 7f66b9e0c2..08a8b879f9 100644 --- a/scripts/utility/backtest_mm_example.py +++ b/scripts/utility/backtest_mm_example.py @@ -65,7 +65,7 @@ def on_tick(self): self.log_with_clock(logging.INFO, msg) self.notify_hb_app_with_timestamp(msg) - def on_stop(self): + async def on_stop(self): self.candle.stop() def get_trades_df(self, df): diff --git a/scripts/utility/candles_example.py b/scripts/utility/candles_example.py index a07ab4208c..37750d1c47 100644 --- a/scripts/utility/candles_example.py +++ b/scripts/utility/candles_example.py @@ -51,7 +51,7 @@ def all_candles_ready(self): def on_tick(self): pass - def on_stop(self): + async def on_stop(self): """ Without this functionality, the network iterator will continue running forever after stopping the strategy That's why is necessary to introduce this new feature to make a custom stop with the strategy. diff --git a/scripts/utility/download_candles.py b/scripts/utility/download_candles.py index da6cfc74a8..77be6c0a28 100644 --- a/scripts/utility/download_candles.py +++ b/scripts/utility/download_candles.py @@ -57,6 +57,6 @@ def on_tick(self): if all(candles_info["candles"].ready for candles_info in self.candles.values()): HummingbotApplication.main_application().stop() - def on_stop(self): + async def on_stop(self): for candles_info in self.candles.values(): candles_info["candles"].stop() diff --git a/scripts/utility/external_events_example.py b/scripts/utility/external_events_example.py index 7695497eef..ce67736fb4 100644 --- a/scripts/utility/external_events_example.py +++ b/scripts/utility/external_events_example.py @@ -30,7 +30,7 @@ def on_event(self, msg, name): def on_message(self, msg, topic): self.logger().info(f'Topic Message Callback fired: {topic} -> {msg}') - def on_stop(self): + async def on_stop(self): ExternalEventFactory.remove_listener('*', self.on_event) ExternalTopicFactory.remove_listener(self.listener) # ---------------------------------- diff --git a/scripts/utility/liquidations_example.py b/scripts/utility/liquidations_example.py index 04e564f996..c4f7727840 100644 --- a/scripts/utility/liquidations_example.py +++ b/scripts/utility/liquidations_example.py @@ -21,7 +21,7 @@ def __init__(self, connectors: Dict[str, ConnectorBase]): super().__init__(connectors) self.binance_liquidations_feed.start() - def on_stop(self): + async def on_stop(self): self.binance_liquidations_feed.stop() def on_tick(self): diff --git a/scripts/v2_with_controllers.py b/scripts/v2_with_controllers.py index 9d87df88fb..33850fcdf4 100644 --- a/scripts/v2_with_controllers.py +++ b/scripts/v2_with_controllers.py @@ -1,5 +1,6 @@ import os import time +from decimal import Decimal from typing import Dict, List, Optional, Set from pydantic import Field @@ -19,6 +20,8 @@ class GenericV2StrategyWithCashOutConfig(StrategyV2ConfigBase): candles_config: List[CandlesConfig] = [] markets: Dict[str, Set[str]] = {} time_to_cash_out: Optional[int] = None + max_global_drawdown: Optional[float] = None + max_controller_drawdown: Optional[float] = None class GenericV2StrategyWithCashOut(StrategyV2Base): @@ -36,6 +39,10 @@ def __init__(self, connectors: Dict[str, ConnectorBase], config: GenericV2Strate super().__init__(connectors, config) self.config = config self.cashing_out = False + self.max_pnl_by_controller = {} + self.performance_reports = {} + self.max_global_pnl = Decimal("0") + self.drawdown_exited_controllers = [] self.closed_executors_buffer: int = 30 self.performance_report_interval: int = 1 self._last_performance_report_timestamp = 0 @@ -58,20 +65,58 @@ def start(self, clock: Clock, timestamp: float) -> None: if self.mqtt_enabled: self._pub = ETopicPublisher("performance", use_bot_prefix=True) - def on_stop(self): + async def on_stop(self): + await super().on_stop() if self.mqtt_enabled: self._pub({controller_id: {} for controller_id in self.controllers.keys()}) self._pub = None def on_tick(self): super().on_tick() + self.performance_reports = {controller_id: self.executor_orchestrator.generate_performance_report(controller_id=controller_id).dict() for controller_id in self.controllers.keys()} self.control_cash_out() + self.control_max_drawdown() self.send_performance_report() + def control_max_drawdown(self): + if self.config.max_controller_drawdown: + self.check_max_controller_drawdown() + if self.config.max_global_drawdown: + self.check_max_global_drawdown() + + def check_max_controller_drawdown(self): + for controller_id, controller in self.controllers.items(): + controller_pnl = self.performance_reports[controller_id]["global_pnl_quote"] + last_max_pnl = self.max_pnl_by_controller[controller_id] + if controller_pnl > last_max_pnl: + self.max_pnl_by_controller[controller_id] = controller_pnl + else: + current_drawdown = last_max_pnl - controller_pnl + if current_drawdown > self.config.max_controller_drawdown: + self.logger().info(f"Controller {controller_id} reached max drawdown. Stopping the controller.") + controller.stop() + executors_order_placed = self.filter_executors( + executors=self.executors_info[controller_id], + filter_func=lambda x: x.is_active and not x.is_trading, + ) + self.executor_orchestrator.execute_actions( + actions=[StopExecutorAction(controller_id=controller_id, executor_id=executor.id) for executor in executors_order_placed] + ) + self.drawdown_exited_controllers.append(controller_id) + + def check_max_global_drawdown(self): + current_global_pnl = sum([report["global_pnl_quote"] for report in self.performance_reports.values()]) + if current_global_pnl > self.max_global_pnl: + self.max_global_pnl = current_global_pnl + else: + current_global_drawdown = self.max_global_pnl - current_global_pnl + if current_global_drawdown > self.config.max_global_drawdown: + self.logger().info("Global drawdown reached. Stopping the strategy.") + HummingbotApplication.main_application().stop() + def send_performance_report(self): if self.current_timestamp - self._last_performance_report_timestamp >= self.performance_report_interval and self.mqtt_enabled: - performance_reports = {controller_id: self.executor_orchestrator.generate_performance_report(controller_id=controller_id).dict() for controller_id in self.controllers.keys()} - self._pub(performance_reports) + self._pub(self.performance_reports) self._last_performance_report_timestamp = self.current_timestamp def control_cash_out(self): @@ -100,6 +145,8 @@ def check_manual_cash_out(self): [StopExecutorAction(executor_id=executor.id, controller_id=executor.controller_id) for executor in executors_to_stop]) if not controller.config.manual_kill_switch and controller.status == RunnableStatus.TERMINATED: + if controller_id in self.drawdown_exited_controllers: + continue self.logger().info(f"Restarting controller {controller_id}.") controller.start() @@ -129,6 +176,7 @@ def stop_actions_proposal(self) -> List[StopExecutorAction]: def apply_initial_setting(self): connectors_position_mode = {} for controller_id, controller in self.controllers.items(): + self.max_pnl_by_controller[controller_id] = Decimal("0") config_dict = controller.config.dict() if "connector_name" in config_dict: if self.is_perpetual(config_dict["connector_name"]): diff --git a/setup.py b/setup.py index 5ac9594703..7a84785ff9 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ def build_extensions(self): def main(): cpu_count = os.cpu_count() or 8 - version = "20240703" + version = "20240828" all_packages = find_packages(include=["hummingbot", "hummingbot.*"], ) excluded_paths = [ "hummingbot.connector.gateway.clob_spot.data_sources.injective", @@ -66,6 +66,7 @@ def main(): "commlib-py", "docker", "diff-cover", + "ecdsa", "eip712-structs", "eth-abi", "eth-account", @@ -74,6 +75,7 @@ def main(): "eth-typing", "eth-utils", "flake8", + "grpc", "hexbytes", "importlib-metadata", "injective-py", @@ -109,6 +111,7 @@ def main(): "websockets", "yarl", "pandas_ta==0.3.14b", + "xrpl-py==3.0.0", ] cython_kwargs = { diff --git a/setup/environment.yml b/setup/environment.yml index 09c1dac577..3b9d8cf175 100644 --- a/setup/environment.yml +++ b/setup/environment.yml @@ -4,7 +4,7 @@ channels: - defaults dependencies: - bidict - - coinbase-advanced-py + - coinbase-advanced-py=1.4.3 - coverage - cython=3.0 - nomkl @@ -48,7 +48,7 @@ dependencies: - gql - grpcio-tools - importlib-metadata==0.23 - - injective-py==1.5.* + - injective-py==1.6.* - jsonpickle==3.0.1 - mypy-extensions==0.4.3 - msgpack @@ -71,4 +71,4 @@ dependencies: - yarl==1.* - git+https://github.com/CoinAlpha/python-signalr-client.git - git+https://github.com/konichuvak/dydx-v3-python.git@web3 - - xrpl-py + - xrpl-py==3.0.0 diff --git a/setup/environment_dydx.yml b/setup/environment_dydx.yml new file mode 100644 index 0000000000..02a5fed99b --- /dev/null +++ b/setup/environment_dydx.yml @@ -0,0 +1,74 @@ +name: hummingbot +channels: + - conda-forge + - defaults +dependencies: + - bidict + - coinbase-advanced-py + - coverage + - cython=3.0 + - nomkl + - nose=1.3.7 + - nose-exclude + - numpy=1.23.5 + - numpy-base=1.23.5 + - pandas=1.5.3 + - pip + - prompt_toolkit=3.0.20 + - pydantic=1.10 + - pytest + - python=3.10 + - scipy=1.10.1 + - sqlalchemy=1.4 + - tabulate==0.8.9 + - ujson + - zlib + - pip: + - aiohttp==3.* + - aioprocessing==2.0 + - aioresponses + - aiounittest + - appdirs==1.4.3 + - async-timeout + - asyncssh==2.13.1 + - pyOpenSSL==21.0.0 + - appnope==0.1.3 + - base58==2.1.1 + - cachetools==4.0.0 + - commlib-py==0.10.6 + - cryptography==3.4.7 + - diff-cover + - docker==5.0.3 + - eth_abi==4.0.0 + - eth-account==0.8.0 + - eth-utils==2.2.0 + - eip712-structs==1.1.0 + - dotmap==1.3.30 + - flake8==3.7.9 + - gql + - grpcio-tools + - importlib-metadata==0.23 + - jsonpickle==3.0.1 + - mypy-extensions==0.4.3 + - msgpack + - pandas_ta==0.3.14b + - pre-commit==2.18.1 + - psutil==5.7.2 + - ptpython==3.0.20 + - pyjwt==2.8.0 + - pyperclip==1.7.0 + - python-telegram-bot==12.8 + - requests==2.* + - rsa==4.7 + - ruamel-yaml==0.16.10 + - signalr-client-aio==0.0.1.6.2 + - substrate-interface==1.6.2 + - solders==0.1.4 + - vega-python-sdk==0.1.3 + - v4_proto + - web3 + - websockets + - yarl==1.* + - git+https://github.com/CoinAlpha/python-signalr-client.git + - git+https://github.com/konichuvak/dydx-v3-python.git@web3 + - xrpl-py diff --git a/test/connector/.gitignore b/test/connector/.gitignore deleted file mode 100644 index dd8db5a3fa..0000000000 --- a/test/connector/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/*.sqlite diff --git a/test/connector/README.md b/test/connector/README.md deleted file mode 100644 index 104ea81a8c..0000000000 --- a/test/connector/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# Unit Testing - -Create a new environment called `MOCK_API_ENABLED` that switches between normal unit tests and mock tests. - -```bash -export MOCK_API_ENABLED=true -``` - -Before running the tests, make sure `conda` environment is enabled. - -```bash -conda activate hummingbot -``` - -Run nosetests from `hummingbot/test/integration` directory and add `-v` for logging to see what the tests are doing and what errors come up. - -```bash -nosetests -v test_binance_market.py -``` - -Markets that currently can run unit mock testing: - -- Binance -- Coinbase Pro -- Htx -- KuCoin \ No newline at end of file diff --git a/test/connector/derivative/binance_perpetual/test_binance_perpetual_market.py b/test/connector/derivative/binance_perpetual/test_binance_perpetual_market.py deleted file mode 100644 index 409f003a20..0000000000 --- a/test/connector/derivative/binance_perpetual/test_binance_perpetual_market.py +++ /dev/null @@ -1,287 +0,0 @@ -import asyncio -import contextlib -import logging -import time -import unittest -from decimal import Decimal -from typing import List - -import conf -from hummingbot.connector.derivative.binance_perpetual.binance_perpetual_derivative import BinancePerpetualDerivative -from hummingbot.core.clock import Clock -from hummingbot.core.clock_mode import ClockMode -from hummingbot.core.data_type.common import OrderType -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - OrderCancelledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.network_iterator import NetworkStatus -from hummingbot.core.utils.async_utils import safe_ensure_future, safe_gather -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class BinancePerpetualMarketUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.ReceivedAsset, - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - MarketEvent.OrderFailure - ] - - market: BinancePerpetualDerivative - market_logger: EventLogger - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls) -> None: - cls._ev_loop = asyncio.get_event_loop() - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.market: BinancePerpetualDerivative = BinancePerpetualDerivative( - api_key=conf.binance_perpetual_api_key, - api_secret=conf.binance_perpetual_api_secret, - trading_pairs=["ETH-USDT"] - ) - print("Initializing Binance Perpetual market... this will take about a minute.") - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.clock.add_iterator(cls.market) - cls.stack: contextlib.ExitStack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_till_ready()) - print("Market Ready.") - - @classmethod - async def wait_till_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self) -> None: - self.market_logger = EventLogger() - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - - def tearDown(self): - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market_logger = None - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - @unittest.skip("Too Simple, Unnecessary") - def test_network_status(self): - network_status: NetworkStatus = self.ev_loop.run_until_complete(self.market.check_network()) - self.assertEqual(NetworkStatus.CONNECTED, network_status) - - @unittest.skip("") - def test_buy_and_sell_order_then_cancel_individually(self): - trading_pair = "ETH-USDT" - # Create Buy Order - buy_order_id = self.market.buy( - trading_pair=trading_pair, - amount=Decimal(0.01), - order_type=OrderType.LIMIT, - price=Decimal(300) - ) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(buy_order_id, order_created_event.order_id) - self.assertEqual(trading_pair, order_created_event.trading_pair) - self.assertEqual(1, len(self.market.in_flight_orders)) - self.assertTrue(buy_order_id in self.market.in_flight_orders) - - # Create Sell Order - sell_order_id = self.market.sell( - trading_pair=trading_pair, - amount=Decimal(0.01), - order_type=OrderType.LIMIT, - price=Decimal(500) - ) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - order_created_event: SellOrderCreatedEvent = order_created_event - self.assertEqual(sell_order_id, order_created_event.order_id) - self.assertEqual(trading_pair, order_created_event.trading_pair) - self.assertEqual(2, len(self.market.in_flight_orders)) - self.assertTrue(sell_order_id in self.market.in_flight_orders) - self.assertTrue(buy_order_id in self.market.in_flight_orders) - - # Cancel Buy Order - self.market.cancel(trading_pair, buy_order_id) - [order_cancelled_event] = self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_cancelled_event: OrderCancelledEvent = order_cancelled_event - self.assertEqual(buy_order_id, order_cancelled_event.order_id) - self.assertEqual(1, len(self.market.in_flight_orders)) - self.assertTrue(sell_order_id in self.market.in_flight_orders) - self.assertTrue(buy_order_id not in self.market.in_flight_orders) - - # Cancel Sell Order - self.market.cancel(trading_pair, sell_order_id) - [order_cancelled_event] = self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_cancelled_event: OrderCancelledEvent = order_cancelled_event - self.assertEqual(sell_order_id, order_cancelled_event.order_id) - self.assertEqual(0, len(self.market.in_flight_orders)) - self.assertTrue(sell_order_id not in self.market.in_flight_orders) - self.assertTrue(buy_order_id not in self.market.in_flight_orders) - - @unittest.skip("") - def test_buy_and_sell_order_then_cancel_all(self): - trading_pair = "ETH-USDT" - # Create Buy Order - buy_order_id = self.market.buy( - trading_pair=trading_pair, - amount=Decimal(0.01), - order_type=OrderType.LIMIT, - price=Decimal(300) - ) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(buy_order_id, order_created_event.order_id) - self.assertEqual(trading_pair, order_created_event.trading_pair) - self.assertEqual(1, len(self.market.in_flight_orders)) - self.assertTrue(buy_order_id in self.market.in_flight_orders) - - # Create Sell Order - sell_order_id = self.market.sell( - trading_pair=trading_pair, - amount=Decimal(0.01), - order_type=OrderType.LIMIT, - price=Decimal(500) - ) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - order_created_event: SellOrderCreatedEvent = order_created_event - self.assertEqual(sell_order_id, order_created_event.order_id) - self.assertEqual(trading_pair, order_created_event.trading_pair) - self.assertEqual(2, len(self.market.in_flight_orders)) - self.assertTrue(sell_order_id in self.market.in_flight_orders) - self.assertTrue(buy_order_id in self.market.in_flight_orders) - - # Cancel All Orders - [cancellation_results] = self.run_parallel(self.market.cancel_all(5)) - for cancel_result in cancellation_results: - self.assertEqual(cancel_result.success, True) - - self.assertEqual(0, len(self.market.in_flight_orders)) - self.assertTrue(sell_order_id not in self.market.in_flight_orders) - self.assertTrue(buy_order_id not in self.market.in_flight_orders) - - @unittest.skip("") - def test_buy_and_sell_order_then_cancel_account_orders(self): - trading_pair = "ETH-USDT" - # Create Buy Order - buy_order_id = self.market.buy( - trading_pair=trading_pair, - amount=Decimal(0.01), - order_type=OrderType.LIMIT, - price=Decimal(300) - ) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(buy_order_id, order_created_event.order_id) - self.assertEqual(trading_pair, order_created_event.trading_pair) - self.assertEqual(1, len(self.market.in_flight_orders)) - self.assertTrue(buy_order_id in self.market.in_flight_orders) - - # Create Sell Order - sell_order_id = self.market.sell( - trading_pair=trading_pair, - amount=Decimal(0.01), - order_type=OrderType.LIMIT, - price=Decimal(500) - ) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - order_created_event: SellOrderCreatedEvent = order_created_event - self.assertEqual(sell_order_id, order_created_event.order_id) - self.assertEqual(trading_pair, order_created_event.trading_pair) - self.assertEqual(2, len(self.market.in_flight_orders)) - self.assertTrue(sell_order_id in self.market.in_flight_orders) - self.assertTrue(buy_order_id in self.market.in_flight_orders) - - # Cancel All Open Orders on Account (specified by trading pair) - self.ev_loop.run_until_complete(safe_ensure_future(self.market.cancel_all_account_orders(trading_pair))) - self.assertEqual(0, len(self.market.in_flight_orders)) - self.assertTrue(sell_order_id not in self.market.in_flight_orders) - self.assertTrue(buy_order_id not in self.market.in_flight_orders) - - @unittest.skip("") - def test_order_fill_event(self): - trading_pair = "ETH-USDT" - - amount: Decimal = Decimal(0.01) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - # Initialize Pricing (Buy) - price: Decimal = self.market.get_price(trading_pair, True) * Decimal("1.01") - quantized_price: Decimal = self.market.quantize_order_price(trading_pair, price) - - # Create Buy Order - buy_order_id = self.market.buy( - trading_pair=trading_pair, - amount=quantized_amount, - order_type=OrderType.LIMIT, - price=quantized_price - ) - [order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - self.assertEqual(buy_order_id, order_completed_event.order_id) - self.assertEqual(quantized_amount, order_completed_event.base_asset_amount) - self.assertEqual("ETH", order_completed_event.base_asset) - self.assertEqual("USDT", order_completed_event.quote_asset) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and event.order_id == buy_order_id - for event in self.market_logger.event_log])) - - # Initialize Pricing (Sell) - price = self.market.get_price(trading_pair, False) * Decimal("0.99") - quantized_price = self.market.quantize_order_price(trading_pair, price) - - # Create Sell Order - sell_order_id = self.market.sell( - trading_pair=trading_pair, - amount=quantized_amount, - order_type=OrderType.LIMIT, - price=quantized_price - ) - [order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - self.assertEqual(sell_order_id, order_completed_event.order_id) - self.assertEqual(quantized_amount, order_completed_event.base_asset_amount) - self.assertEqual("ETH", order_completed_event.base_asset) - self.assertEqual("USDT", order_completed_event.quote_asset) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == sell_order_id - for event in self.market_logger.event_log])) - - -def main(): - logging.getLogger("hummingbot.core.event.event_reporter").setLevel(logging.WARNING) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/derivative/binance_perpetual/test_binance_perpetual_order_book_tracker.py b/test/connector/derivative/binance_perpetual/test_binance_perpetual_order_book_tracker.py deleted file mode 100644 index 9c73aeeb92..0000000000 --- a/test/connector/derivative/binance_perpetual/test_binance_perpetual_order_book_tracker.py +++ /dev/null @@ -1,97 +0,0 @@ -import asyncio -import logging -import unittest -from typing import Optional, List, Dict - -from hummingbot.connector.derivative.binance_perpetual.binance_perpetual_order_book_tracker import \ - BinancePerpetualOrderBookTracker -from hummingbot.core.data_type.common import TradeType -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import OrderBookEvent, OrderBookTradeEvent -from hummingbot.core.utils.async_utils import safe_ensure_future, safe_gather - - -class BinancePerpetualOrderBookTrackerUnitTest(unittest.TestCase): - order_book_tracker: Optional[BinancePerpetualOrderBookTracker] = None - events: List[OrderBookEvent] = [ - OrderBookEvent.TradeEvent - ] - trading_pairs: List[str] = [ - "BTC-USDT", - "ETH-USDT" - ] - - @classmethod - def setUpClass(cls) -> None: - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.order_book_tracker: BinancePerpetualOrderBookTracker = BinancePerpetualOrderBookTracker( - trading_pairs=cls.trading_pairs, base_url="https://testnet.binancefuture.com", stream_url="wss://stream.binancefuture.com" - ) - cls.order_book_tracker_task: asyncio.Task = safe_ensure_future(cls.order_book_tracker.start()) - cls.ev_loop.run_until_complete(cls.wait_til_tracker_ready()) - - @classmethod - async def wait_til_tracker_ready(cls): - while True: - if len(cls.order_book_tracker.order_books) > 0: - print("Initialized real-time order books.") - return - await asyncio.sleep(1) - - def setUp(self) -> None: - self.event_logger = EventLogger() - for event_tag in self.events: - for trading_pair, order_book in self.order_book_tracker.order_books.items(): - order_book.add_listener(event_tag, self.event_logger) - - @staticmethod - async def run_parallel_async(*tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - await asyncio.sleep(1) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def test_order_book_trade_occurs(self): - self.run_parallel(self.event_logger.wait_for(OrderBookTradeEvent)) - for ob_trade_event in self.event_logger.event_log: - self.assertEqual(type(ob_trade_event), OrderBookTradeEvent) - self.assertTrue(ob_trade_event.trading_pair in self.trading_pairs) - self.assertEqual(type(ob_trade_event.timestamp), float) - self.assertEqual(type(ob_trade_event.amount), float) - self.assertEqual(type(ob_trade_event.price), float) - self.assertEqual(type(ob_trade_event.type), TradeType) - self.assertTrue(ob_trade_event.amount > 0) - self.assertTrue(ob_trade_event.price > 0) - - def test_tracker_adv(self): - self.ev_loop.run_until_complete(asyncio.sleep(10)) - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - btcusdt_book: OrderBook = order_books[self.trading_pairs[0]] - ethusdt_book: OrderBook = order_books[self.trading_pairs[1]] - - print("BTC-USDT SNAPSHOT: ") - print(btcusdt_book.snapshot) - print("ETH-USDT SNAPSHOT: ") - print(ethusdt_book.snapshot) - - self.assertGreaterEqual(btcusdt_book.get_price_for_volume(True, 10).result_price, - btcusdt_book.get_price(True)) - self.assertLessEqual(btcusdt_book.get_price_for_volume(False, 10).result_price, - btcusdt_book.get_price(False)) - self.assertGreaterEqual(ethusdt_book.get_price_for_volume(True, 10).result_price, - ethusdt_book.get_price(True)) - self.assertLessEqual(ethusdt_book.get_price_for_volume(False, 10).result_price, - ethusdt_book.get_price(False)) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/derivative/binance_perpetual/test_binance_perpetual_utils.py b/test/connector/derivative/binance_perpetual/test_binance_perpetual_utils.py deleted file mode 100644 index 0db1eecf37..0000000000 --- a/test/connector/derivative/binance_perpetual/test_binance_perpetual_utils.py +++ /dev/null @@ -1,32 +0,0 @@ -import unittest -import hummingbot.connector.derivative.binance_perpetual.binance_perpetual_utils as utils - - -class TradingPairUtilsTest(unittest.TestCase): - def test_parse_three_letters_base_and_three_letters_quote(self): - parsed_pair = utils.convert_from_exchange_trading_pair("BTCUSD") - self.assertEqual(parsed_pair, "BTC-USD") - - def test_parse_three_letters_base_and_four_letters_quote(self): - parsed_pair = utils.convert_from_exchange_trading_pair("BTCUSDT") - self.assertEqual(parsed_pair, "BTC-USDT") - - def test_parse_three_letters_base_and_three_letters_quote_matching_with_a_four_letters_quote_candidate(self): - parsed_pair = utils.convert_from_exchange_trading_pair("VETUSD") - self.assertEqual(parsed_pair, "VET-USD") - - def test_convert_to_exchange_format_three_letters_base_and_three_letters_quote(self): - converted_pair = utils.convert_to_exchange_trading_pair("BTC-USD") - self.assertEqual(converted_pair, "BTCUSD") - - def test_convert_to_exchange_format_three_letters_base_and_four_letters_quote(self): - converted_pair = utils.convert_to_exchange_trading_pair("BTC-USDT") - self.assertEqual(converted_pair, "BTCUSDT") - - def test_convert_to_exchange_format_three_letters_base_and_three_letters_quote_matching_with_a_four_letters_quote_candidate(self): - converted_pair = utils.convert_to_exchange_trading_pair("VET-USD") - self.assertEqual(converted_pair, "VETUSD") - - -if __name__ == '__main__': - unittest.main() diff --git a/test/connector/exchange/ascend_ex/.gitignore b/test/connector/exchange/ascend_ex/.gitignore deleted file mode 100644 index 23d9952b8c..0000000000 --- a/test/connector/exchange/ascend_ex/.gitignore +++ /dev/null @@ -1 +0,0 @@ -backups \ No newline at end of file diff --git a/test/connector/exchange/ascend_ex/test_ascend_ex_auth.py b/test/connector/exchange/ascend_ex/test_ascend_ex_auth.py deleted file mode 100644 index effa24743e..0000000000 --- a/test/connector/exchange/ascend_ex/test_ascend_ex_auth.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python -import sys -import asyncio -import unittest -import aiohttp -import ujson -import websockets -import conf -import logging -from os.path import join, realpath -from typing import Dict, Any -from hummingbot.connector.exchange.ascend_ex.ascend_ex_auth import AscendExAuth -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.connector.exchange.ascend_ex.ascend_ex_constants import REST_URL -from hummingbot.connector.exchange.ascend_ex.ascend_ex_utils import get_ws_url_private - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class TestAscendExAuth(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - api_key = conf.ascend_ex_api_key - secret_key = conf.ascend_ex_secret_key - cls.auth = AscendExAuth(api_key, secret_key) - - async def rest_auth(self) -> Dict[Any, Any]: - headers = { - **self.auth.get_headers(), - **self.auth.get_auth_headers("info"), - } - response = await aiohttp.ClientSession().get(f"{REST_URL}/info", headers=headers) - return await response.json() - - async def ws_auth(self) -> Dict[Any, Any]: - info = await self.rest_auth() - accountGroup = info.get("data").get("accountGroup") - headers = self.auth.get_auth_headers("stream") - ws = await websockets.connect(f"{get_ws_url_private(accountGroup)}/stream", extra_headers=headers) - - raw_msg = await asyncio.wait_for(ws.recv(), 5000) - msg = ujson.loads(raw_msg) - - return msg - - def test_rest_auth(self): - result = self.ev_loop.run_until_complete(self.rest_auth()) - assert result["code"] == 0 - - def test_ws_auth(self): - result = self.ev_loop.run_until_complete(self.ws_auth()) - assert result["m"] == "connected" - assert result["type"] == "auth" diff --git a/test/connector/exchange/ascend_ex/test_ascend_ex_exchange.py b/test/connector/exchange/ascend_ex/test_ascend_ex_exchange.py deleted file mode 100644 index 4ee9aac0f5..0000000000 --- a/test/connector/exchange/ascend_ex/test_ascend_ex_exchange.py +++ /dev/null @@ -1,432 +0,0 @@ -import asyncio -import contextlib -import logging -import math -import os -import time -import unittest -from decimal import Decimal -from os.path import join, realpath -from typing import List - -import conf -from hummingbot.connector.exchange.ascend_ex.ascend_ex_exchange import AscendExExchange -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.core.clock import Clock, ClockMode -from hummingbot.core.data_type.common import OrderType -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - MarketOrderFailureEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.utils.async_utils import safe_gather, safe_ensure_future -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.sql_connection_manager import ( - SQLConnectionManager, - SQLConnectionType -) -from hummingbot.model.trade_fill import TradeFill - -logging.basicConfig(level=METRICS_LOG_LEVEL) - -API_KEY = conf.ascend_ex_api_key -API_SECRET = conf.ascend_ex_secret_key - - -class AscendExExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - MarketEvent.OrderFailure - ] - connector: AscendExExchange - event_logger: EventLogger - trading_pair = "ZIL-USDT" - base_token, quote_token = trading_pair.split("-") - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - - cls.ev_loop = asyncio.get_event_loop() - - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.connector: AscendExExchange = AscendExExchange( - ascend_ex_api_key=API_KEY, - ascend_ex_secret_key=API_SECRET, - trading_pairs=[cls.trading_pair], - trading_required=True - ) - print("Initializing AscendEx exchange... this will take about a minute.") - cls.clock.add_iterator(cls.connector) - cls.stack: contextlib.ExitStack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.connector.start(cls._clock, time.time()) - cls.ev_loop.create_task(cls.connector.start_network()) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - print("Ready.") - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - - @classmethod - async def wait_til_ready(cls, connector = None): - if connector is None: - connector = cls.connector - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if connector.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../connector_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.event_logger = EventLogger() - for event_tag in self.events: - self.connector.add_listener(event_tag, self.event_logger) - - def tearDown(self): - for event_tag in self.events: - self.connector.remove_listener(event_tag, self.event_logger) - self.event_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def _place_order(self, is_buy, amount, order_type, price, ex_order_id) -> str: - if is_buy: - cl_order_id = self.connector.buy(self.trading_pair, amount, order_type, price) - else: - cl_order_id = self.connector.sell(self.trading_pair, amount, order_type, price) - return cl_order_id - - def _cancel_order(self, cl_order_id): - self.connector.cancel(self.trading_pair, cl_order_id) - - def test_estimate_fee(self): - maker_fee = self.connector.estimate_fee_pct(True) - self.assertAlmostEqual(maker_fee, Decimal("0.001")) - taker_fee = self.connector.estimate_fee_pct(False) - self.assertAlmostEqual(taker_fee, Decimal("0.001")) - - def test_create_order_failure(self): - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.10") - base_bal = self.connector.get_available_balance(self.base_token) - amount = base_bal - order_id_1 = self._place_order(False, amount, OrderType.LIMIT, price, 1) - order_id_2 = self._place_order(False, amount, OrderType.LIMIT, price, 1) - order_failed_event: MarketOrderFailureEvent = self.ev_loop.run_until_complete(asyncio.wait_for( - self.event_logger.wait_for(MarketOrderFailureEvent), 3.0)) - self.assertIn(order_failed_event.order_id, (order_id_1, order_id_2)) - - def test_buy_and_sell(self): - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.05") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - quote_bal = self.connector.get_available_balance(self.quote_token) - base_bal = self.connector.get_available_balance(self.base_token) - - order_id = self._place_order(True, amount, OrderType.LIMIT, price, 1) - order_completed_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCompletedEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(2)) - trade_events = [t for t in self.event_logger.event_log if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertEqual(amount, order_completed_event.base_asset_amount) - self.assertEqual(self.base_token, order_completed_event.base_asset) - self.assertEqual(self.quote_token, order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and event.order_id == order_id - for event in self.event_logger.event_log])) - - # check available quote balance gets updated, we need to wait a bit for the balance message to arrive - expected_quote_bal = quote_bal - quote_amount_traded - # self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.assertAlmostEqual(expected_quote_bal, self.connector.get_available_balance(self.quote_token), 1) - - # Reset the logs - self.event_logger.clear() - - # Try to sell back the same amount to the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.95") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - order_id = self._place_order(False, amount, OrderType.LIMIT, price, 2) - order_completed_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCompletedEvent)) - trade_events = [t for t in self.event_logger.event_log if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertEqual(amount, order_completed_event.base_asset_amount) - self.assertEqual(self.base_token, order_completed_event.base_asset) - self.assertEqual(self.quote_token, order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertGreater(order_completed_event.fee_amount, Decimal(0)) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.event_logger.event_log])) - - # check available base balance gets updated, we need to wait a bit for the balance message to arrive - expected_base_bal = base_bal - # self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.assertAlmostEqual(expected_base_bal, self.connector.get_available_balance(self.base_token), 5) - - def test_limit_makers_unfilled(self): - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.8") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - quote_bal = self.connector.get_available_balance(self.quote_token) - - cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - # check available quote balance gets updated, we need to wait a bit for the balance message to arrive - expected_quote_bal = quote_bal - (price * amount) - self.ev_loop.run_until_complete(self.connector._update_balances()) - # self.ev_loop.run_until_complete(asyncio.sleep(2)) - - self.assertAlmostEqual(expected_quote_bal, self.connector.get_available_balance(self.quote_token), 1) - self._cancel_order(cl_order_id) - event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.assertEqual(cl_order_id, event.order_id) - - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.2") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - - cl_order_id = self._place_order(False, amount, OrderType.LIMIT_MAKER, price, 2) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - self._cancel_order(cl_order_id) - event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.assertEqual(cl_order_id, event.order_id) - - # # @TODO: find a way to create "rejected" - # def test_limit_maker_rejections(self): - # price = self.connector.get_price(self.trading_pair, True) * Decimal("1.2") - # price = self.connector.quantize_order_price(self.trading_pair, price) - # amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.000001")) - # cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - # event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - # self.assertEqual(cl_order_id, event.order_id) - - # price = self.connector.get_price(self.trading_pair, False) * Decimal("0.8") - # price = self.connector.quantize_order_price(self.trading_pair, price) - # amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.000001")) - # cl_order_id = self._place_order(False, amount, OrderType.LIMIT_MAKER, price, 2) - # event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - # self.assertEqual(cl_order_id, event.order_id) - - def test_cancel_all(self): - bid_price = self.connector.get_price(self.trading_pair, True) - ask_price = self.connector.get_price(self.trading_pair, False) - bid_price = self.connector.quantize_order_price(self.trading_pair, bid_price * Decimal("0.9")) - ask_price = self.connector.quantize_order_price(self.trading_pair, ask_price * Decimal("1.1")) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - - buy_id = self._place_order(True, amount, OrderType.LIMIT, bid_price, 1) - sell_id = self._place_order(False, amount, OrderType.LIMIT, ask_price, 2) - - self.ev_loop.run_until_complete(asyncio.sleep(1)) - asyncio.ensure_future(self.connector.cancel_all(3)) - self.ev_loop.run_until_complete(asyncio.sleep(3)) - cancel_events = [t for t in self.event_logger.event_log if isinstance(t, OrderCancelledEvent)] - self.assertEqual({buy_id, sell_id}, {o.order_id for o in cancel_events}) - - def test_order_quantized_values(self): - bid_price: Decimal = self.connector.get_price(self.trading_pair, True) - ask_price: Decimal = self.connector.get_price(self.trading_pair, False) - mid_price: Decimal = (bid_price + ask_price) / 2 - zil_balance = self.connector.get_balance("ZIL") - usdt_balance = self.connector.get_balance("USDT") - - # Make sure there's enough balance to make the limit orders. - self.assertGreater(zil_balance, Decimal("90")) - self.assertGreater(usdt_balance, Decimal("10")) - - # Intentionally set some prices with too many decimal places s.t. they - # need to be quantized. Also, place them far away from the mid-price s.t. they won't - # get filled during the test. - bid_price = self.connector.quantize_order_price(self.trading_pair, mid_price * Decimal("0.9333192292111341")) - ask_price = self.connector.quantize_order_price(self.trading_pair, mid_price * Decimal("1.1492431474884933")) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - - # Test bid order - cl_order_id_1 = self._place_order(True, amount, OrderType.LIMIT, bid_price, 1) - # Wait for the order created event and examine the order made - self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - - # Test ask order - cl_order_id_2 = self._place_order(False, amount, OrderType.LIMIT, ask_price, 1) - # Wait for the order created event and examine and order made - self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCreatedEvent)) - - self._cancel_order(cl_order_id_1) - self._cancel_order(cl_order_id_2) - - def test_orders_saving_and_restoration(self): - config_path = "test_config" - strategy_name = "test_strategy" - sql = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id = None - recorder = MarketsRecorder(sql, [self.connector], config_path, strategy_name) - recorder.start() - - try: - self.connector._in_flight_orders.clear() - self.assertEqual(0, len(self.connector.tracking_states)) - - # Try to put limit buy order for 0.02 ETH worth of ZRX, and watch for order creation event. - current_bid_price: Decimal = self.connector.get_price(self.trading_pair, True) - price: Decimal = current_bid_price * Decimal("0.8") - price = self.connector.quantize_order_price(self.trading_pair, price) - - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - - cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.connector.tracking_states)) - self.assertEqual(cl_order_id, list(self.connector.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.connector) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(cl_order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.connector) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.connector.stop(self._clock) - self.ev_loop.run_until_complete(asyncio.sleep(5)) - self.clock.remove_iterator(self.connector) - for event_tag in self.events: - self.connector.remove_listener(event_tag, self.event_logger) - new_connector = AscendExExchange(API_KEY, API_SECRET, [self.trading_pair], True) - for event_tag in self.events: - new_connector.add_listener(event_tag, self.event_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [new_connector], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, new_connector) - self.clock.add_iterator(new_connector) - self.ev_loop.run_until_complete(self.wait_til_ready(new_connector)) - self.assertEqual(0, len(new_connector.limit_orders)) - self.assertEqual(0, len(new_connector.tracking_states)) - new_connector.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(new_connector.limit_orders)) - self.assertEqual(1, len(new_connector.tracking_states)) - - # Cancel the order and verify that the change is saved. - self._cancel_order(cl_order_id) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - order_id = None - self.assertEqual(0, len(new_connector.limit_orders)) - self.assertEqual(0, len(new_connector.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, new_connector) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.connector.cancel(self.trading_pair, cl_order_id) - self.run_parallel(self.event_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - - def test_update_last_prices(self): - # This is basic test to see if order_book last_trade_price is initiated and updated. - for order_book in self.connector.order_books.values(): - for _ in range(5): - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.assertFalse(math.isnan(order_book.last_trade_price)) - - def test_filled_orders_recorded(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - sql = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id = None - recorder = MarketsRecorder(sql, [self.connector], config_path, strategy_name) - recorder.start() - - try: - # Try to buy some token from the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.05") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - - order_id = self._place_order(True, amount, OrderType.LIMIT, price, 1) - self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCompletedEvent, timeout_seconds=10)) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - - # Reset the logs - self.event_logger.clear() - - # Try to sell back the same amount to the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.95") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("90")) - order_id = self._place_order(False, amount, OrderType.LIMIT, price, 2) - self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCompletedEvent)) - - # Query the persisted trade logs - trade_fills: List[TradeFill] = recorder.get_trades_for_config(config_path) - self.assertGreaterEqual(len(trade_fills), 2) - buy_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "BUY"] - sell_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "SELL"] - self.assertGreaterEqual(len(buy_fills), 1) - self.assertGreaterEqual(len(sell_fills), 1) - - order_id = None - - finally: - if order_id is not None: - self.connector.cancel(self.trading_pair, order_id) - self.run_parallel(self.event_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) diff --git a/test/connector/exchange/ascend_ex/test_ascend_ex_order_book_tracker.py b/test/connector/exchange/ascend_ex/test_ascend_ex_order_book_tracker.py deleted file mode 100644 index 2ecfb8b488..0000000000 --- a/test/connector/exchange/ascend_ex/test_ascend_ex_order_book_tracker.py +++ /dev/null @@ -1,104 +0,0 @@ -import asyncio -import logging -import math -import time -import unittest -from typing import Dict, List, Optional - -from hummingbot.connector.exchange.ascend_ex import ascend_ex_constants as CONSTANTS -from hummingbot.connector.exchange.ascend_ex.ascend_ex_api_order_book_data_source import AscendExAPIOrderBookDataSource -from hummingbot.connector.exchange.ascend_ex.ascend_ex_order_book_tracker import AscendExOrderBookTracker -from hummingbot.core.api_throttler.async_throttler import AsyncThrottler -from hummingbot.core.data_type.common import TradeType -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import OrderBookEvent, OrderBookTradeEvent -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - - -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class AscendExOrderBookTrackerUnitTest(unittest.TestCase): - order_book_tracker: Optional[AscendExOrderBookTracker] = None - events: List[OrderBookEvent] = [ - OrderBookEvent.TradeEvent - ] - trading_pairs: List[str] = [ - "BTC-USDT", - "ETH-USDT", - ] - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.throttler = AsyncThrottler(CONSTANTS.RATE_LIMITS) - cls.order_book_tracker: AscendExOrderBookTracker = AscendExOrderBookTracker(cls.throttler, cls.trading_pairs) - cls.order_book_tracker.start() - cls.ev_loop.run_until_complete(cls.wait_til_tracker_ready()) - - @classmethod - async def wait_til_tracker_ready(cls): - while True: - if len(cls.order_book_tracker.order_books) > 0: - print("Initialized real-time order books.") - return - await asyncio.sleep(1) - - async def run_parallel_async(self, *tasks, timeout=None): - future: asyncio.Future = asyncio.ensure_future(asyncio.gather(*tasks)) - timer = 0 - while not future.done(): - if timeout and timer > timeout: - raise Exception("Timeout running parallel async tasks in tests") - timer += 1 - now = time.time() - _next_iteration = now // 1.0 + 1 # noqa: F841 - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def setUp(self): - self.event_logger = EventLogger() - for event_tag in self.events: - for trading_pair, order_book in self.order_book_tracker.order_books.items(): - order_book.add_listener(event_tag, self.event_logger) - - def test_order_book_trade_event_emission(self): - """ - Tests if the order book tracker is able to retrieve order book trade message from exchange and emit order book - trade events after correctly parsing the trade messages - """ - self.run_parallel(self.event_logger.wait_for(OrderBookTradeEvent)) - for ob_trade_event in self.event_logger.event_log: - self.assertTrue(type(ob_trade_event) == OrderBookTradeEvent) - self.assertTrue(ob_trade_event.trading_pair in self.trading_pairs) - self.assertTrue(type(ob_trade_event.timestamp) in [float, int]) - self.assertTrue(type(ob_trade_event.amount) == float) - self.assertTrue(type(ob_trade_event.price) == float) - self.assertTrue(type(ob_trade_event.type) == TradeType) - # datetime is in milliseconds - self.assertTrue(math.ceil(math.log10(ob_trade_event.timestamp)) == 13) - self.assertTrue(ob_trade_event.amount > 0) - self.assertTrue(ob_trade_event.price > 0) - - def test_tracker_integrity(self): - # Wait 5 seconds to process some diffs. - self.ev_loop.run_until_complete(asyncio.sleep(5.0)) - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - eth_usdt: OrderBook = order_books["ETH-USDT"] - self.assertIsNot(eth_usdt.last_diff_uid, 0) - self.assertGreaterEqual(eth_usdt.get_price_for_volume(True, 10).result_price, - eth_usdt.get_price(True)) - self.assertLessEqual(eth_usdt.get_price_for_volume(False, 10).result_price, - eth_usdt.get_price(False)) - - def test_api_get_last_traded_prices(self): - prices = self.ev_loop.run_until_complete( - AscendExAPIOrderBookDataSource.get_last_traded_prices(["BTC-USDT", "LTC-BTC"])) - for key, value in prices.items(): - print(f"{key} last_trade_price: {value}") - self.assertGreater(prices["BTC-USDT"], 1000) - self.assertLess(prices["LTC-BTC"], 1) diff --git a/test/connector/exchange/ascend_ex/test_ascend_ex_user_stream_tracker.py b/test/connector/exchange/ascend_ex/test_ascend_ex_user_stream_tracker.py deleted file mode 100644 index 04130bc2e2..0000000000 --- a/test/connector/exchange/ascend_ex/test_ascend_ex_user_stream_tracker.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python -import sys -import asyncio -import logging -import unittest -import conf - -from os.path import join, realpath -from hummingbot.connector.exchange.ascend_ex.ascend_ex_user_stream_tracker import AscendExUserStreamTracker -from hummingbot.connector.exchange.ascend_ex.ascend_ex_auth import AscendExAuth -from hummingbot.core.api_throttler.async_throttler import AsyncThrottler -from hummingbot.core.utils.async_utils import safe_ensure_future -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.connector.exchange.ascend_ex import ascend_ex_constants as CONSTANTS - - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class AscendExUserStreamTrackerUnitTest(unittest.TestCase): - api_key = conf.ascend_ex_api_key - api_secret = conf.ascend_ex_secret_key - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.throttler = AsyncThrottler(CONSTANTS.RATE_LIMITS) - cls.ascend_ex_auth = AscendExAuth(cls.api_key, cls.api_secret) - cls.trading_pairs = ["BTC-USDT"] - cls.user_stream_tracker: AscendExUserStreamTracker = AscendExUserStreamTracker( - cls.throttler, ascend_ex_auth=cls.ascend_ex_auth, trading_pairs=cls.trading_pairs - ) - cls.user_stream_tracker_task: asyncio.Task = safe_ensure_future(cls.user_stream_tracker.start()) - - def test_user_stream(self): - # Wait process some msgs. - self.ev_loop.run_until_complete(asyncio.sleep(120.0)) - print(self.user_stream_tracker.user_stream) diff --git a/test/connector/exchange/bitfinex/test_bitfinex_api_order_book_data_source.py b/test/connector/exchange/bitfinex/test_bitfinex_api_order_book_data_source.py deleted file mode 100644 index afa29feded..0000000000 --- a/test/connector/exchange/bitfinex/test_bitfinex_api_order_book_data_source.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python -import aiohttp -from os.path import ( - join, - realpath -) -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../../"))) - -import asyncio -import logging -import unittest -from typing import List - -from hummingbot.connector.exchange.bitfinex.bitfinex_api_order_book_data_source import ( - BitfinexAPIOrderBookDataSource, -) - - -class BitfinexAPIOrderBookDataSourceUnitTest(unittest.TestCase): - trading_pairs: List[str] = [ - "BTC-USD", - ] - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.data_source: BitfinexAPIOrderBookDataSource = BitfinexAPIOrderBookDataSource( - trading_pairs=cls.trading_pairs - ) - - def test_get_trading_pairs(self): - result: List[str] = self.ev_loop.run_until_complete( - self.data_source.get_trading_pairs()) - - self.assertIsInstance(result, list) - self.assertGreater(len(result), 0) - self.assertIsInstance(result[0], str) - self.assertEqual(result[0], "BTC-USD") - - def test_size_snapshot(self): - async def run_session_for_fetch_snaphot(): - async with aiohttp.ClientSession() as client: - result = await self.data_source.get_snapshot(client, "BTC-USD") - assert len(result["bids"]) == self.data_source.SNAPSHOT_LIMIT_SIZE - assert len(result["asks"]) == self.data_source.SNAPSHOT_LIMIT_SIZE - - # 25 is default fetch value, that is very small for use in production - assert len(result["bids"]) > 25 - assert len(result["asks"]) > 25 - - self.ev_loop.run_until_complete(run_session_for_fetch_snaphot()) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/bitfinex/test_bitfinex_auth.py b/test/connector/exchange/bitfinex/test_bitfinex_auth.py deleted file mode 100644 index 5618c1bbcc..0000000000 --- a/test/connector/exchange/bitfinex/test_bitfinex_auth.py +++ /dev/null @@ -1,32 +0,0 @@ -import asyncio -import json -import unittest -from typing import List - -import websockets - -import conf -from hummingbot.connector.exchange.bitfinex import BITFINEX_WS_AUTH_URI -from hummingbot.connector.exchange.bitfinex.bitfinex_auth import BitfinexAuth - - -class TestAuth(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - - api_key = conf.bitfinex_api_key - secret_key = conf.bitfinex_secret_key - cls.auth = BitfinexAuth(api_key, secret_key) - - async def con_auth(self): - async with websockets.connect(BITFINEX_WS_AUTH_URI) as ws: - ws: websockets.WebSocketClientProtocol = ws - payload = self.auth.generate_auth_payload('AUTH{nonce}'.format(nonce=self.auth.get_nonce())) - await ws.send(json.dumps(payload)) - msg = await asyncio.wait_for(ws.recv(), timeout=30) # response - return msg - - def test_auth(self): - result: List[str] = self.ev_loop.run_until_complete(self.con_auth()) - assert "serverId" in result diff --git a/test/connector/exchange/bitfinex/test_bitfinex_market.py b/test/connector/exchange/bitfinex/test_bitfinex_market.py deleted file mode 100644 index 0a285a6546..0000000000 --- a/test/connector/exchange/bitfinex/test_bitfinex_market.py +++ /dev/null @@ -1,373 +0,0 @@ -import asyncio -import contextlib -import logging -import os -import time -import unittest -from decimal import Decimal -from os.path import join, realpath -from typing import ( - List, - Optional, -) - -import conf -from hummingbot.connector.exchange.bitfinex.bitfinex_exchange import BitfinexExchange -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.core.clock import ( - Clock, - ClockMode -) -from hummingbot.core.data_type.common import OrderType, TradeType -from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.utils.async_utils import ( - safe_ensure_future, - safe_gather, -) -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.sql_connection_manager import SQLConnectionManager, SQLConnectionType - -logging.basicConfig(level=METRICS_LOG_LEVEL) -API_KEY = conf.bitfinex_api_key -API_SECRET = conf.bitfinex_secret_key -trading_pair = "ETH-USD" -base_asset = trading_pair.split("-")[0] -quote_asset = trading_pair.split("-")[1] - - -class BitfinexExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.ReceivedAsset, - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.WithdrawAsset, - MarketEvent.OrderFilled, - MarketEvent.OrderCancelled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - ] - - market: BitfinexExchange - market_logger: EventLogger - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.market: BitfinexExchange = BitfinexExchange( - API_KEY, - API_SECRET, - trading_pairs=[trading_pair] - ) - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.clock.add_iterator(cls.market) - cls.stack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - - @classmethod - async def wait_til_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../bitfinex_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.market_logger = EventLogger() - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - - def tearDown(self): - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self.clock.run_til(next_iteration) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def test_get_fee(self): - limit_fee: AddedToCostTradeFee = self.market.get_fee(base_asset, quote_asset, OrderType.LIMIT, - TradeType.BUY, 1, 1) - self.assertGreater(limit_fee.percent, 0) - self.assertEqual(len(limit_fee.flat_fees), 0) - market_fee: AddedToCostTradeFee = self.market.get_fee(base_asset, quote_asset, OrderType.MARKET, - TradeType.BUY, 1) - self.assertGreater(market_fee.percent, 0) - self.assertEqual(len(market_fee.flat_fees), 0) - - def test_minimum_order_size(self): - amount = Decimal("0.001") - quantized_amount = self.market.quantize_order_amount(trading_pair, amount) - self.assertEqual(quantized_amount, 0) - - def test_get_balance(self): - balance = self.market.get_balance(quote_asset) - self.assertGreater(balance, 10) - - def test_limit_buy(self): - amount: Decimal = Decimal("0.04") - current_ask_price: Decimal = self.market.get_price(trading_pair, False) - # no fill - bid_price: Decimal = Decimal("0.9") * current_ask_price - quantize_ask_price: Decimal = self.market.quantize_order_price( - trading_pair, - bid_price - ) - - order_id = self.market.buy( - trading_pair, - amount, - OrderType.LIMIT, - quantize_ask_price - ) - - # Wait for order creation event - self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - - # Cancel order. Automatically asserts that order is tracked - self.market.cancel(trading_pair, order_id) - - [order_cancelled_event] = self.run_parallel( - self.market_logger.wait_for(OrderCancelledEvent)) - self.assertEqual(order_cancelled_event.order_id, order_id) - # # Reset the logs - self.market_logger.clear() - - def test_limit_sell(self): - amount: Decimal = Decimal("0.02") - current_ask_price: Decimal = self.market.get_price(trading_pair, False) - # for no fill - ask_price: Decimal = Decimal("1.1") * current_ask_price - quantize_ask_price: Decimal = self.market.quantize_order_price(trading_pair, - ask_price) - - order_id = self.market.sell(trading_pair, amount, OrderType.LIMIT, - quantize_ask_price) - # Wait for order creation event - self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - - # Cancel order. Automatically asserts that order is tracked - self.market.cancel(trading_pair, order_id) - - [order_cancelled_event] = self.run_parallel( - self.market_logger.wait_for(OrderCancelledEvent)) - - self.assertEqual(order_cancelled_event.order_id, order_id) - - # Reset the logs - self.market_logger.clear() - - def test_execute_limit_buy(self): - amount: Decimal = Decimal("0.04") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, - amount) - - bid_entries = self.market.order_books[trading_pair].bid_entries() - - most_top_bid = next(bid_entries) - bid_price: Decimal = Decimal(most_top_bid.price) - quantize_bid_price: Decimal = \ - self.market.quantize_order_price(trading_pair, bid_price) - quantize_bid_price = quantize_bid_price * Decimal("1.1") - - order_id = self.market.buy(trading_pair, - quantized_amount, - OrderType.LIMIT, - quantize_bid_price, - ) - - [order_completed_event] = self.run_parallel( - self.market_logger.wait_for(BuyOrderCompletedEvent)) - order_completed_event: BuyOrderCompletedEvent = order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent)] - base_amount_traded: Decimal = sum(t.amount for t in trade_events) - quote_amount_traded: Decimal = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, - order_completed_event.base_asset_amount) - self.assertEqual(base_asset, order_completed_event.base_asset) - self.assertEqual(quote_asset, order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, - order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, - order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - def test_execute_limit_sell(self): - amount: Decimal = Decimal(0.02) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, - amount) - ask_entries = self.market.order_books[trading_pair].ask_entries() - most_top_ask = next(ask_entries) - ask_price: Decimal = Decimal(most_top_ask.price) - quantize_ask_price: Decimal = \ - self.market.quantize_order_price(trading_pair, ask_price) - quantize_ask_price = quantize_ask_price * Decimal("0.9") - - order_id = self.market.sell(trading_pair, - quantized_amount, - OrderType.LIMIT, - quantize_ask_price, - ) - [order_completed_event] = self.run_parallel( - self.market_logger.wait_for(SellOrderCompletedEvent)) - - order_completed_event: SellOrderCompletedEvent = order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent)] - base_amount_traded: Decimal = sum(t.amount for t in trade_events) - quote_amount_traded: Decimal = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, - order_completed_event.base_asset_amount) - self.assertEqual(base_asset, order_completed_event.base_asset) - self.assertEqual(quote_asset, order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, - order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, - order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - def test_orders_saving_and_restoration(self): - self.tearDownClass() - self.setUpClass() - self.setUp() - - config_path: str = "test_config" - strategy_name: str = "test_strategy" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - self.assertEqual(0, len(self.market.tracking_states)) - - amount: Decimal = Decimal("0.04") - current_ask_price: Decimal = self.market.get_price(trading_pair, False) - bid_price: Decimal = Decimal("0.9") * current_ask_price - quantize_ask_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - order_id = self.market.buy(trading_pair, amount, OrderType.LIMIT, quantize_ask_price) - - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.market.tracking_states)) - self.assertEqual(order_id, list(self.market.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.market) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.market) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.clock.remove_iterator(self.market) - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market: BitfinexExchange = BitfinexExchange( - API_KEY, - API_SECRET, - trading_pairs=[trading_pair] - ) - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, self.market) - self.clock.add_iterator(self.market) - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - self.market.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(self.market.limit_orders)) - self.assertEqual(1, len(self.market.tracking_states)) - - # Cancel the order and verify that the change is saved. - self.run_parallel(asyncio.sleep(5.0)) - self.market.cancel(trading_pair, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_id = None - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, self.market) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.market.cancel(trading_pair, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - self.setUpClass() - - def test_cancel_all(self): - bid_price: Decimal = self.market.get_price(trading_pair, True) - ask_price: Decimal = self.market.get_price(trading_pair, False) - amount: Decimal = Decimal("0.04") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - # Intentionally setting invalid price to prevent getting filled - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price * Decimal("0.9")) - quantize_ask_price: Decimal = self.market.quantize_order_price(trading_pair, ask_price * Decimal("1.1")) - - self.market.buy(trading_pair, quantized_amount, OrderType.LIMIT, quantize_bid_price) - self.market.sell(trading_pair, quantized_amount, OrderType.LIMIT, quantize_ask_price) - self.run_parallel(asyncio.sleep(5)) - [cancellation_results] = self.run_parallel(self.market.cancel_all(45)) - for cr in cancellation_results: - self.assertEqual(cr.success, True) diff --git a/test/connector/exchange/bitfinex/test_bitfinex_order_book_tracker.py b/test/connector/exchange/bitfinex/test_bitfinex_order_book_tracker.py deleted file mode 100644 index 5ea356a61f..0000000000 --- a/test/connector/exchange/bitfinex/test_bitfinex_order_book_tracker.py +++ /dev/null @@ -1,158 +0,0 @@ -import asyncio -import logging -import math -import sys -import time -import unittest -from typing import Dict, List, Optional - -from hummingbot.connector.exchange.bitfinex.bitfinex_order_book_tracker import BitfinexOrderBookTracker -from hummingbot.core.data_type.common import TradeType -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import OrderBookEvent, OrderBookTradeEvent -from hummingbot.core.utils.async_utils import safe_ensure_future - - -class BitfinexOrderBookTrackerUnitTest(unittest.TestCase): - order_book_tracker: Optional[BitfinexOrderBookTracker] = None - events: List[OrderBookEvent] = [ - OrderBookEvent.TradeEvent - ] - trading_pairs: List[str] = [ - "BTC-USD", - ] - integrity_test_max_volume = 5 # Max volume in asks and bids for the book to be ready for tests - daily_volume = 2500 # Approximate total daily volume in BTC for this exchange for sanity test - book_enties = 5 # Number of asks and bids (each) for the book to be ready for tests - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.order_book_tracker: BitfinexOrderBookTracker = BitfinexOrderBookTracker(trading_pairs=cls.trading_pairs) - cls.order_book_tracker_task: asyncio.Task = safe_ensure_future(cls.order_book_tracker.start()) - cls.ev_loop.run_until_complete(cls.wait_til_tracker_ready()) - - @classmethod - async def wait_til_tracker_ready(cls): - ''' - Wait until the order book under test fills as needed - ''' - print("Waiting for order book to fill...") - while True: - book_present = cls.trading_pairs[0] in cls.order_book_tracker.order_books - enough_asks = False - enough_bids = False - enough_ask_rows = False - enough_bid_rows = False - if book_present: - ask_volume = sum(i.amount for i in cls.order_book_tracker.order_books[cls.trading_pairs[0]].ask_entries()) - ask_count = sum(1 for i in cls.order_book_tracker.order_books[cls.trading_pairs[0]].ask_entries()) - - bid_volume = sum(i.amount for i in cls.order_book_tracker.order_books[cls.trading_pairs[0]].bid_entries()) - bid_count = sum(1 for i in cls.order_book_tracker.order_books[cls.trading_pairs[0]].bid_entries()) - - enough_asks = ask_volume >= cls.integrity_test_max_volume - enough_bids = bid_volume >= cls.integrity_test_max_volume - - enough_ask_rows = ask_count >= cls.book_enties - enough_bid_rows = bid_count >= cls.book_enties - - print("Bid volume in book: %f (in %d bids), ask volume in book: %f (in %d asks)" % (bid_volume, bid_count, ask_volume, ask_count)) - - if book_present and enough_asks and enough_bids and enough_ask_rows and enough_bid_rows: - print("Initialized real-time order books.") - return - await asyncio.sleep(1) - - async def run_parallel_async(self, *tasks, timeout=None): - future: asyncio.Future = asyncio.ensure_future(asyncio.gather(*tasks)) - timer = 0 - while not future.done(): - if timeout and timer > timeout: - raise Exception("Timeout running parallel async tasks in tests") - timer += 1 - now = time.time() - _next_iteration = now // 1.0 + 1 # noqa: F841 - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def setUp(self): - self.event_logger = EventLogger() - for event_tag in self.events: - for trading_pair, order_book in self.order_book_tracker.order_books.items(): - order_book.add_listener(event_tag, self.event_logger) - - def test_order_book_trade_event_emission(self): - """2 - Tests if the order book tracker is able to retrieve order book trade message from exchange - and emit order book trade events after correctly parsing the trade messages - """ - self.run_parallel(self.event_logger.wait_for(OrderBookTradeEvent)) - for ob_trade_event in self.event_logger.event_log: - self.assertTrue(ob_trade_event.trading_pair in self.trading_pairs) - self.assertTrue(isinstance(ob_trade_event, OrderBookTradeEvent)) - self.assertTrue(isinstance(ob_trade_event.timestamp, (int, float))) - self.assertTrue(isinstance(ob_trade_event.amount, float)) - self.assertTrue(isinstance(ob_trade_event.price, float)) - self.assertTrue(isinstance(ob_trade_event.type, TradeType)) - - self.assertTrue(math.ceil(math.log10(ob_trade_event.timestamp)) == 10) - self.assertTrue(ob_trade_event.amount > 0) - self.assertTrue(ob_trade_event.price > 0) - - def test_tracker_integrity(self): - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - sut_book: OrderBook = order_books[self.trading_pairs[0]] - - # # 1 - test that best bid is less than best ask - # self.assertGreater(sut_book.get_price(False), sut_book.get_price(True)) - - # 2 - test that price to buy integrity_test_max_volume BTC is is greater than or equal to best ask - self.assertGreaterEqual(sut_book.get_price_for_volume(True, self.integrity_test_max_volume).result_price, - sut_book.get_price(True)) - - # 3 - test that price to sell integrity_test_max_volume BTC is is less than or equal to best bid - self.assertLessEqual(sut_book.get_price_for_volume(False, self.integrity_test_max_volume).result_price, - sut_book.get_price(False)) - - # 4 - test that all bids in order book are sorted by price in descending order - previous_price = sys.float_info.max - for bid_row in sut_book.bid_entries(): - self.assertTrue(previous_price >= bid_row.price) - previous_price = bid_row.price - - # 5 - test that all asks in order book are sorted by price in ascending order - previous_price = 0 - for ask_row in sut_book.ask_entries(): - self.assertTrue(previous_price <= ask_row.price) - previous_price = ask_row.price - - # 6 - test that total volume in first orders in book is less than 10 times - # daily traded volumes for this exchange - total_volume = 0 - count = 0 - for bid_row in sut_book.bid_entries(): - total_volume += bid_row.amount - count += 1 - if count > self.book_enties: - break - count = 0 - for ask_row in sut_book.ask_entries(): - total_volume += ask_row.amount - count += 1 - if count > self.book_enties: - break - self.assertLessEqual(total_volume, 10 * self.daily_volume) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/bitfinex/test_bitfinex_user_steam_tracker.py b/test/connector/exchange/bitfinex/test_bitfinex_user_steam_tracker.py deleted file mode 100644 index 47917caac6..0000000000 --- a/test/connector/exchange/bitfinex/test_bitfinex_user_steam_tracker.py +++ /dev/null @@ -1,25 +0,0 @@ -import asyncio -import unittest -import conf - -from hummingbot.connector.exchange.bitfinex.bitfinex_auth import BitfinexAuth -from hummingbot.connector.exchange.bitfinex.bitfinex_user_stream_tracker import \ - BitfinexUserStreamTracker - - -class BitfinexUserStreamTrackerUnitTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.bitfinex_auth = BitfinexAuth(conf.bitfinex_api_key, - conf.bitfinex_secret_key) - cls.trading_pair = ["ETHUSD"] # Using V3 convention since OrderBook is built using V3 - cls.user_stream_tracker: BitfinexUserStreamTracker = BitfinexUserStreamTracker( - bitfinex_auth=cls.bitfinex_auth, trading_pairs=cls.trading_pair) - cls.user_stream_tracker_task: asyncio.Task = asyncio.ensure_future( - cls.user_stream_tracker.start()) - - def test_user_stream(self): - # Wait process some msgs. - self.ev_loop.run_until_complete(asyncio.sleep(120.0)) - assert self.user_stream_tracker.user_stream.qsize() > 0 diff --git a/test/connector/exchange/coinbase_pro/fixture_coinbase_pro.py b/test/connector/exchange/coinbase_pro/fixture_coinbase_pro.py deleted file mode 100644 index 40b84ed7a7..0000000000 --- a/test/connector/exchange/coinbase_pro/fixture_coinbase_pro.py +++ /dev/null @@ -1,81 +0,0 @@ -class FixtureCoinbasePro: - BALANCES = [ - { - "id": "2d36cb78-5145-41fe-90b0-3f204f2e357d", "currency": "USDC", "balance": "90.1480261500000000", - "available": "90.14802615", "hold": "0.0000000000000000", - "profile_id": "bc2f3a64-0c0b-49ce-bb3e-5efc978b5b5c", "trading_enabled": True - }, - { - "id": "d3356a99-ad27-4b1b-92f8-26233be0d62a", "currency": "ETH", "balance": "0.4424257124965000", - "available": "0.4424257124965", "hold": "0.0000000000000000", - "profile_id": "bc2f3a64-0c0b-49ce-bb3e-5efc978b5b5c", "trading_enabled": True - } - ] - - TRADE_FEES = {"maker_fee_rate": "0.0050", "taker_fee_rate": "0.0050", "usd_volume": "462.93"} - - ORDERS_STATUS = [] - - OPEN_BUY_LIMIT_ORDER = { - "id": "4aa4773e-ca4e-4146-8ac1-a0ec8c39f835", "price": "278.05000000", "size": "0.02000000", - "product_id": "ETH-USDC", "side": "buy", "stp": "dc", "type": "limit", "time_in_force": "GTC", - "post_only": False, "created_at": "2020-02-14T06:52:32.167853Z", "fill_fees": "0", - "filled_size": "0", "executed_value": "0", "status": "pending", "settled": False} - - OPEN_SELL_LIMIT_ORDER = { - "id": "9087815a-3d3d-4c2c-b627-78fd83d8644e", "price": "787.44000000", "size": "0.07000000", - "product_id": "ETH-USDC", "side": "sell", "stp": "dc", "type": "limit", "time_in_force": "GTC", - "post_only": False, "created_at": "2020-02-14T07:57:31.842502Z", "fill_fees": "0", - "filled_size": "0", "executed_value": "0", "status": "pending", "settled": False} - - WS_AFTER_BUY_2 = { - "type": "done", "side": "buy", "product_id": "ETH-USDC", "time": "2020-02-14T06:52:32.172333Z", - "sequence": 544313348, "profile_id": "bc2f3a64-0c0b-49ce-bb3e-5efc978b5b5c", - "user_id": "5dc62091b2d9e604842cad56", "order_id": "4aa4773e-ca4e-4146-8ac1-a0ec8c39f835", - "reason": "filled", "price": "278.05", "remaining_size": "0"} - - BUY_MARKET_ORDER = { - "id": "dedfcd66-2324-4805-bd31-b8920c3a25b4", "size": "0.02000000", "product_id": "ETH-USDC", - "side": "buy", "stp": "dc", "funds": "84.33950263", "type": "market", "post_only": False, - "created_at": "2020-02-14T07:21:17.166831Z", "fill_fees": "0", "filled_size": "0", - "executed_value": "0", "status": "pending", "settled": False} - - SELL_MARKET_ORDER = { - "id": "CBS_MARKET_SELL", "size": "0.02000000", "product_id": "ETH-USDC", - "side": "sell", "stp": "dc", "funds": "84.33950263", "type": "market", "post_only": False, - "created_at": "2020-02-14T07:21:17.166831Z", "fill_fees": "0", "filled_size": "0", - "executed_value": "0", "status": "pending", "settled": False} - - WS_AFTER_MARKET_BUY_2 = { - "type": "done", "side": "buy", "product_id": "ETH-USDC", "time": "2020-02-14T07:21:17.171949Z", - "sequence": 544350029, "profile_id": "bc2f3a64-0c0b-49ce-bb3e-5efc978b5b5c", - "user_id": "5dc62091b2d9e604842cad56", "order_id": "dedfcd66-2324-4805-bd31-b8920c3a25b4", - "reason": "filled", "remaining_size": "0"} - - WS_ORDER_OPEN = { - "type": "open", "side": "buy", "product_id": "ETH-USDC", "time": "2020-02-14T07:41:45.174224Z", - "sequence": 544392466, "profile_id": "bc2f3a64-0c0b-49ce-bb3e-5efc978b5b5c", - "user_id": "5dc62091b2d9e604842cad56", "price": "235.67", - "order_id": "9d8c39b0-094f-4832-9a73-9b2e43b03780", "remaining_size": "0.02"} - - WS_ORDER_CANCELED = { - "type": "done", "side": "buy", "product_id": "ETH-USDC", - "time": "2020-02-14T07:41:45.450940Z", "sequence": 544392470, - "profile_id": "bc2f3a64-0c0b-49ce-bb3e-5efc978b5b5c", "user_id": "5dc62091b2d9e604842cad56", - "order_id": "9d8c39b0-094f-4832-9a73-9b2e43b03780", "reason": "canceled", "price": "235.67", - "remaining_size": "0.02"} - - COINBASE_ACCOUNTS_GET = [ - { - "id": "8543f030-4a21-58d6-ba63-3446e74a01fe", "name": "ETH Wallet", "balance": "0.00000000", - "currency": "ETH", - "type": "wallet", "primary": False, "active": True, "available_on_consumer": True, "hold_balance": "0.00", - "hold_currency": "PHP" - }, - { - "id": "414f0c91-8490-5790-bb6e-6007c7686267", "name": "USDC Wallet", "balance": "0.000000", - "currency": "USDC", - "type": "wallet", "primary": False, "active": True, "available_on_consumer": True, "hold_balance": "0.00", - "hold_currency": "PHP" - }, - ] diff --git a/test/connector/exchange/coinbase_pro/test_coinbase_pro_active_order_tracker.py b/test/connector/exchange/coinbase_pro/test_coinbase_pro_active_order_tracker.py deleted file mode 100644 index 854a3b4808..0000000000 --- a/test/connector/exchange/coinbase_pro/test_coinbase_pro_active_order_tracker.py +++ /dev/null @@ -1,246 +0,0 @@ -#!/usr/bin/env python - -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../../"))) - -import logging -import unittest -from typing import ( - Any, - Dict, - Optional -) -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_tracker import CoinbaseProOrderBookTracker -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.data_type.order_book_tracker import OrderBookTrackerDataSourceType -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_message import CoinbaseProOrderBookMessage -from hummingbot.core.data_type.order_book_row import OrderBookRow - -test_trading_pair = "BTC-USD" - - -class CoinbaseProOrderBookTrackerUnitTest(unittest.TestCase): - order_book_tracker: Optional[CoinbaseProOrderBookTracker] = None - - @classmethod - def setUpClass(cls): - cls.order_book_tracker: CoinbaseProOrderBookTracker = CoinbaseProOrderBookTracker( - OrderBookTrackerDataSourceType.EXCHANGE_API, - trading_pairs=[test_trading_pair]) - - def test_diff_message_not_found(self): - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - test_order_book: OrderBook = order_books[test_trading_pair] - test_active_order_tracker = self.order_book_tracker._active_order_trackers[test_trading_pair] - - # receive match message that is not in active orders (should be ignored) - match_msg_to_ignore: Dict[str, Any] = { - "type": "match", - "trade_id": 10, - "sequence": 50, - "maker_order_id": "ac928c66-ca53-498f-9c13-a110027a60e8", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "size": "5.23512", - "price": "400.23", - "side": "sell" - } - ignore_msg: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(match_msg_to_ignore) - open_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(ignore_msg) - self.assertEqual(open_ob_row, ([], [])) - - def test_buy_diff_message(self): - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - test_order_book: OrderBook = order_books[test_trading_pair] - test_active_order_tracker = self.order_book_tracker._active_order_trackers[test_trading_pair] - - # receive open buy message to be added to active orders - order_id = "abc" - side = "buy" - price = 1337.0 - open_size = 100.0 - open_sequence = 1 - open_message_dict: Dict[str, Any] = { - "type": "open", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "sequence": open_sequence, - "order_id": order_id, - "price": str(price), - "remaining_size": str(open_size), - "side": side - } - open_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(open_message_dict) - open_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(open_message) - self.assertEqual(open_ob_row[0], [OrderBookRow(price, open_size, open_sequence)]) - - # receive change message - change_size = 50.0 - change_sequence = 2 - change_message_dict: Dict[str, Any] = { - "type": "change", - "time": "2014-11-07T08:19:27.028459Z", - "sequence": change_sequence, - "order_id": order_id, - "product_id": test_trading_pair, - "new_size": str(change_size), - "old_size": "100.0", - "price": str(price), - "side": side - } - change_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(change_message_dict) - - change_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(change_message) - self.assertEqual(change_ob_row[0], [OrderBookRow(price, change_size, change_sequence)]) - - # receive match message - match_size = 30.0 - match_sequence = 3 - match_message_dict: Dict[str, Any] = { - "type": "match", - "trade_id": 10, - "sequence": match_sequence, - "maker_order_id": order_id, - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "size": str(match_size), - "price": str(price), - "side": side - } - match_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(match_message_dict) - - match_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(match_message) - self.assertEqual(match_ob_row[0], [OrderBookRow(price, change_size - match_size, match_sequence)]) - - # receive done message - done_size = 0.0 - done_sequence = 4 - done_message_dict: Dict[str, Any] = { - "type": "done", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "sequence": done_sequence, - "price": str(price), - "order_id": order_id, - "reason": "filled", - "side": side, - "remaining_size": "0" - } - done_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(done_message_dict) - - done_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(done_message) - self.assertEqual(done_ob_row[0], [OrderBookRow(price, done_size, done_sequence)]) - - def test_sell_diff_message(self): - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - test_order_book: OrderBook = order_books[test_trading_pair] - test_active_order_tracker = self.order_book_tracker._active_order_trackers[test_trading_pair] - - # receive open sell message to be added to active orders - order_id = "abc" - side = "sell" - price = 1337.0 - open_size = 100.0 - open_sequence = 1 - open_message_dict: Dict[str, Any] = { - "type": "open", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "sequence": open_sequence, - "order_id": order_id, - "price": str(price), - "remaining_size": str(open_size), - "side": side - } - open_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(open_message_dict) - open_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(open_message) - self.assertEqual(open_ob_row[1], [OrderBookRow(price, open_size, open_sequence)]) - - # receive open sell message to be added to active orders - order_id_2 = "def" - side = "sell" - price = 1337.0 - open_size_2 = 100.0 - open_sequence_2 = 2 - open_message_dict_2: Dict[str, Any] = { - "type": "open", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "sequence": open_sequence_2, - "order_id": order_id_2, - "price": str(price), - "remaining_size": str(open_size), - "side": side - } - open_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(open_message_dict_2) - open_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(open_message) - self.assertEqual(open_ob_row[1], [OrderBookRow(price, open_size + open_size_2, open_sequence_2)]) - - # receive change message - change_size = 50.0 - change_sequence = 3 - change_message_dict: Dict[str, Any] = { - "type": "change", - "time": "2014-11-07T08:19:27.028459Z", - "sequence": change_sequence, - "order_id": order_id, - "product_id": test_trading_pair, - "new_size": str(change_size), - "old_size": "100.0", - "price": str(price), - "side": side - } - change_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(change_message_dict) - - change_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(change_message) - self.assertEqual(change_ob_row[1], [OrderBookRow(price, change_size + open_size_2, change_sequence)]) - - # receive match message - match_size = 30.0 - match_sequence = 4 - match_message_dict: Dict[str, Any] = { - "type": "match", - "trade_id": 10, - "sequence": match_sequence, - "maker_order_id": order_id, - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "size": str(match_size), - "price": str(price), - "side": side - } - match_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(match_message_dict) - - match_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(match_message) - self.assertEqual(match_ob_row[1], [OrderBookRow(price, change_size - match_size + open_size_2, match_sequence)]) - - # receive done message - done_size = 0.0 - done_sequence = 5 - done_message_dict: Dict[str, Any] = { - "type": "done", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": test_trading_pair, - "sequence": done_sequence, - "price": str(price), - "order_id": order_id, - "reason": "filled", - "side": side, - "remaining_size": "0" - } - done_message: CoinbaseProOrderBookMessage = test_order_book.diff_message_from_exchange(done_message_dict) - - done_ob_row: OrderBookRow = test_active_order_tracker.convert_diff_message_to_order_book_row(done_message) - self.assertEqual(done_ob_row[1], [OrderBookRow(price, done_size + open_size_2, done_sequence)]) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/coinbase_pro/test_coinbase_pro_market.py b/test/connector/exchange/coinbase_pro/test_coinbase_pro_market.py deleted file mode 100644 index f49eff109f..0000000000 --- a/test/connector/exchange/coinbase_pro/test_coinbase_pro_market.py +++ /dev/null @@ -1,563 +0,0 @@ -import asyncio -import contextlib -import logging -import math -import os -import sys -import time -import unittest -from decimal import Decimal -from os.path import join, realpath -from typing import ( - List, - Optional -) -from unittest import mock - -import conf -from hummingbot.client.config.fee_overrides_config_map import fee_overrides_config_map -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_exchange import CoinbaseProExchange -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.core.clock import ( - Clock, - ClockMode -) -from hummingbot.core.data_type.common import OrderType, TradeType -from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - MarketOrderFailureEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.mock_api.mock_web_server import MockWebServer -from hummingbot.core.mock_api.mock_web_socket_server import MockWebSocketServerFactory -from hummingbot.core.utils.async_utils import ( - safe_ensure_future, - safe_gather, -) -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.sql_connection_manager import ( - SQLConnectionManager, - SQLConnectionType -) -from hummingbot.model.trade_fill import TradeFill -from test.connector.exchange.coinbase_pro.fixture_coinbase_pro import FixtureCoinbasePro - -# API_SECRET length must be multiple of 4 otherwise base64.b64decode will fail -API_MOCK_ENABLED = conf.mock_api_enabled is not None and conf.mock_api_enabled.lower() in ['true', 'yes', '1'] -API_KEY = "XXXX" if API_MOCK_ENABLED else conf.coinbase_pro_api_key -API_SECRET = "YYYY" if API_MOCK_ENABLED else conf.coinbase_pro_secret_key -API_PASSPHRASE = "ZZZZ" if API_MOCK_ENABLED else conf.coinbase_pro_passphrase -API_BASE_URL = "api.pro.coinbase.com" -WS_BASE_URL = "wss://ws-feed.pro.coinbase.com" - -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class CoinbaseProExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.ReceivedAsset, - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.OrderCancelled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - MarketEvent.OrderFailure - ] - - market: CoinbaseProExchange - market_logger: EventLogger - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - cls.ev_loop = asyncio.get_event_loop() - trading_pair = "ETH-USDC" - if API_MOCK_ENABLED: - cls.web_app = MockWebServer.get_instance() - cls.web_app.add_host_to_mock(API_BASE_URL, ["/time", "/products", f"/products/{trading_pair}/book"]) - cls.web_app.start() - cls.ev_loop.run_until_complete(cls.web_app.wait_til_started()) - cls._patcher = mock.patch("aiohttp.client.URL") - cls._url_mock = cls._patcher.start() - cls._url_mock.side_effect = cls.web_app.reroute_local - cls.web_app.update_response("get", API_BASE_URL, "/accounts", FixtureCoinbasePro.BALANCES) - cls.web_app.update_response("get", API_BASE_URL, "/fees", FixtureCoinbasePro.TRADE_FEES) - cls.web_app.update_response("get", API_BASE_URL, "/orders", FixtureCoinbasePro.ORDERS_STATUS) - - MockWebSocketServerFactory.start_new_server(WS_BASE_URL) - cls._ws_patcher = unittest.mock.patch("websockets.connect", autospec=True) - cls._ws_mock = cls._ws_patcher.start() - cls._ws_mock.side_effect = MockWebSocketServerFactory.reroute_ws_connect - - cls._t_nonce_patcher = unittest.mock.patch( - "hummingbot.connector.exchange.coinbase_pro.coinbase_pro_exchange.get_tracking_nonce") - cls._t_nonce_mock = cls._t_nonce_patcher.start() - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.market: CoinbaseProExchange = CoinbaseProExchange( - API_KEY, - API_SECRET, - API_PASSPHRASE, - trading_pairs=[trading_pair] - ) - print("Initializing Coinbase Pro market... this will take about a minute.") - cls.clock.add_iterator(cls.market) - cls.stack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - print("Ready.") - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - if API_MOCK_ENABLED: - cls.web_app.stop() - cls._patcher.stop() - cls._t_nonce_patcher.stop() - - @classmethod - async def wait_til_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../coinbase_pro_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.market_logger = EventLogger() - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - - def tearDown(self): - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self.clock.run_til(next_iteration) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def test_get_fee(self): - limit_fee: AddedToCostTradeFee = self.market.get_fee("ETH", "USDC", OrderType.LIMIT_MAKER, TradeType.BUY, 1, 1) - self.assertGreater(limit_fee.percent, 0) - self.assertEqual(len(limit_fee.flat_fees), 0) - market_fee: AddedToCostTradeFee = self.market.get_fee("ETH", "USDC", OrderType.LIMIT, TradeType.BUY, 1) - self.assertGreater(market_fee.percent, 0) - self.assertEqual(len(market_fee.flat_fees), 0) - - def test_fee_overrides_config(self): - fee_overrides_config_map["coinbase_pro_taker_fee"].value = None - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.005"), taker_fee.percent) - fee_overrides_config_map["coinbase_pro_taker_fee"].value = Decimal('0.2') - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.002"), taker_fee.percent) - fee_overrides_config_map["coinbase_pro_maker_fee"].value = None - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.005"), maker_fee.percent) - fee_overrides_config_map["coinbase_pro_maker_fee"].value = Decimal('0.75') - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.0075"), maker_fee.percent) - - def place_order(self, is_buy, trading_pair, amount, order_type, price, nonce, fixture_resp, fixture_ws): - order_id, exch_order_id = None, None - if API_MOCK_ENABLED: - self._t_nonce_mock.return_value = nonce - side = 'buy' if is_buy else 'sell' - resp = fixture_resp.copy() - exch_order_id = resp["id"] - resp["side"] = side - self.web_app.update_response("post", API_BASE_URL, "/orders", resp) - if is_buy: - order_id = self.market.buy(trading_pair, amount, order_type, price) - else: - order_id = self.market.sell(trading_pair, amount, order_type, price) - if API_MOCK_ENABLED: - resp = fixture_ws.copy() - resp["order_id"] = exch_order_id - resp["side"] = side - MockWebSocketServerFactory.send_json_threadsafe(WS_BASE_URL, resp, delay=0.1) - return order_id, exch_order_id - - def cancel_order(self, trading_pair, order_id, exchange_order_id, fixture_ws): - if API_MOCK_ENABLED: - self.web_app.update_response("delete", API_BASE_URL, f"/orders/{exchange_order_id}", exchange_order_id) - self.market.cancel(trading_pair, order_id) - if API_MOCK_ENABLED: - resp = fixture_ws.copy() - resp["order_id"] = exchange_order_id - MockWebSocketServerFactory.send_json_threadsafe(WS_BASE_URL, resp, delay=0.1) - - def test_limit_maker_rejections(self): - if API_MOCK_ENABLED: - return - trading_pair = "ETH-USDC" - - # Try to put a buy limit maker order that is going to match, this should triggers order failure event. - price: Decimal = self.market.get_price(trading_pair, True) * Decimal('1.02') - price: Decimal = self.market.quantize_order_price(trading_pair, price) - amount = self.market.quantize_order_amount(trading_pair, Decimal("0.02")) - order_id = self.market.buy(trading_pair, amount, OrderType.LIMIT_MAKER, price) - [order_failure_event] = self.run_parallel(self.market_logger.wait_for(MarketOrderFailureEvent)) - self.assertEqual(order_id, order_failure_event.order_id) - - self.market_logger.clear() - - # Try to put a sell limit maker order that is going to match, this should triggers order failure event. - price: Decimal = self.market.get_price(trading_pair, True) * Decimal('0.98') - price: Decimal = self.market.quantize_order_price(trading_pair, price) - amount = self.market.quantize_order_amount(trading_pair, Decimal("0.02")) - - order_id = self.market.sell(trading_pair, amount, OrderType.LIMIT_MAKER, price) - [order_failure_event] = self.run_parallel(self.market_logger.wait_for(MarketOrderFailureEvent)) - self.assertEqual(order_id, order_failure_event.order_id) - - def test_limit_makers_unfilled(self): - if API_MOCK_ENABLED: - return - trading_pair = "ETH-USDC" - bid_price: Decimal = self.market.get_price(trading_pair, True) * Decimal("0.5") - ask_price: Decimal = self.market.get_price(trading_pair, False) * 2 - amount: Decimal = 10 / bid_price - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - # Intentionally setting invalid price to prevent getting filled - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price * Decimal("0.7")) - quantize_ask_price: Decimal = self.market.quantize_order_price(trading_pair, ask_price * Decimal("1.5")) - - order_id, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, - 10001, FixtureCoinbasePro.OPEN_BUY_LIMIT_ORDER, - FixtureCoinbasePro.WS_ORDER_OPEN) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id, order_created_event.order_id) - - order_id_2, exch_order_id_2 = self.place_order(False, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_ask_price, 10002, - FixtureCoinbasePro.OPEN_SELL_LIMIT_ORDER, - FixtureCoinbasePro.WS_ORDER_OPEN) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id_2, order_created_event.order_id) - - self.run_parallel(asyncio.sleep(1)) - - if API_MOCK_ENABLED: - self.web_app.update_response("delete", API_BASE_URL, f"/orders/{exch_order_id}", exch_order_id) - self.web_app.update_response("delete", API_BASE_URL, f"/orders/{exch_order_id_2}", exch_order_id_2) - [cancellation_results] = self.run_parallel(self.market.cancel_all(5)) - if API_MOCK_ENABLED: - resp = FixtureCoinbasePro.WS_ORDER_CANCELED.copy() - resp["order_id"] = exch_order_id - MockWebSocketServerFactory.send_json_threadsafe(WS_BASE_URL, resp, delay=0.1) - resp = FixtureCoinbasePro.WS_ORDER_CANCELED.copy() - resp["order_id"] = exch_order_id_2 - MockWebSocketServerFactory.send_json_threadsafe(WS_BASE_URL, resp, delay=0.11) - for cr in cancellation_results: - self.assertEqual(cr.success, True) - - # NOTE that orders of non-USD pairs (including USDC pairs) are LIMIT only - def test_limit_taker_buy(self): - self.assertGreater(self.market.get_balance("ETH"), Decimal("0.1")) - trading_pair = "ETH-USDC" - price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, _ = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT, price, 10001, - FixtureCoinbasePro.BUY_MARKET_ORDER, FixtureCoinbasePro.WS_AFTER_MARKET_BUY_2) - [order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - order_completed_event: BuyOrderCompletedEvent = order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent)] - base_amount_traded: Decimal = sum(t.amount for t in trade_events) - quote_amount_traded: Decimal = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT_MAKER for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, order_completed_event.base_asset_amount) - self.assertEqual("ETH", order_completed_event.base_asset) - self.assertEqual("USDC", order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - # NOTE that orders of non-USD pairs (including USDC pairs) are LIMIT only - def test_limit_taker_sell(self): - trading_pair = "ETH-USDC" - price: Decimal = self.market.get_price(trading_pair, False) - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, _ = self.place_order(False, trading_pair, quantized_amount, OrderType.LIMIT, price, 10001, - FixtureCoinbasePro.BUY_MARKET_ORDER, FixtureCoinbasePro.WS_AFTER_MARKET_BUY_2) - [order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - order_completed_event: SellOrderCompletedEvent = order_completed_event - trade_events = [t for t in self.market_logger.event_log if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT_MAKER for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, order_completed_event.base_asset_amount) - self.assertEqual("ETH", order_completed_event.base_asset) - self.assertEqual("USDC", order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - def test_cancel_order(self): - trading_pair = "ETH-USDC" - - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal("0.2") - self.assertGreater(self.market.get_balance("ETH"), amount) - - bid_price: Decimal = current_bid_price - Decimal("0.1") * current_bid_price - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, 10001, FixtureCoinbasePro.OPEN_BUY_LIMIT_ORDER, - FixtureCoinbasePro.WS_ORDER_OPEN) - - self.cancel_order(trading_pair, order_id, exch_order_id, FixtureCoinbasePro.WS_ORDER_CANCELED) - [order_cancelled_event] = self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_cancelled_event: OrderCancelledEvent = order_cancelled_event - self.assertEqual(order_cancelled_event.order_id, order_id) - - def test_cancel_all(self): - trading_pair = "ETH-USDC" - bid_price: Decimal = self.market.get_price(trading_pair, True) * Decimal("0.5") - ask_price: Decimal = self.market.get_price(trading_pair, False) * 2 - amount: Decimal = 10 / bid_price - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - # Intentionally setting invalid price to prevent getting filled - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price * Decimal("0.7")) - quantize_ask_price: Decimal = self.market.quantize_order_price(trading_pair, ask_price * Decimal("1.5")) - - _, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, quantize_bid_price, - 10001, FixtureCoinbasePro.OPEN_BUY_LIMIT_ORDER, FixtureCoinbasePro.WS_ORDER_OPEN) - _, exch_order_id_2 = self.place_order(False, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_ask_price, 10002, FixtureCoinbasePro.OPEN_SELL_LIMIT_ORDER, - FixtureCoinbasePro.WS_ORDER_OPEN) - self.run_parallel(asyncio.sleep(1)) - - if API_MOCK_ENABLED: - self.web_app.update_response("delete", API_BASE_URL, f"/orders/{exch_order_id}", exch_order_id) - self.web_app.update_response("delete", API_BASE_URL, f"/orders/{exch_order_id_2}", exch_order_id_2) - [cancellation_results] = self.run_parallel(self.market.cancel_all(5)) - if API_MOCK_ENABLED: - resp = FixtureCoinbasePro.WS_ORDER_CANCELED.copy() - resp["order_id"] = exch_order_id - MockWebSocketServerFactory.send_json_threadsafe(WS_BASE_URL, resp, delay=0.1) - resp = FixtureCoinbasePro.WS_ORDER_CANCELED.copy() - resp["order_id"] = exch_order_id_2 - MockWebSocketServerFactory.send_json_threadsafe(WS_BASE_URL, resp, delay=0.11) - for cr in cancellation_results: - self.assertEqual(cr.success, True) - - @unittest.skipUnless(any("test_list_orders" in arg for arg in sys.argv), "List order test requires manual action.") - def test_list_orders(self): - self.assertGreater(self.market.get_balance("ETH"), Decimal("0.1")) - trading_pair = "ETH-USDC" - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - bid_price: Decimal = current_bid_price + Decimal("0.05") * current_bid_price - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - - self.market.buy(trading_pair, quantized_amount, OrderType.LIMIT_MAKER, quantize_bid_price) - self.run_parallel(asyncio.sleep(1)) - [order_details] = self.run_parallel(self.market.list_orders()) - self.assertGreaterEqual(len(order_details), 1) - - self.market_logger.clear() - - def test_orders_saving_and_restoration(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - trading_pair: str = "ETH-USDC" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - self.assertEqual(0, len(self.market.tracking_states)) - - # Try to put limit buy order for 0.04 ETH, and watch for order creation event. - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - bid_price: Decimal = current_bid_price * Decimal("0.8") - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, 10001, FixtureCoinbasePro.OPEN_BUY_LIMIT_ORDER, - FixtureCoinbasePro.WS_ORDER_OPEN) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.market.tracking_states)) - self.assertEqual(order_id, list(self.market.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.market) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.market) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.clock.remove_iterator(self.market) - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market: CoinbaseProExchange = CoinbaseProExchange( - coinbase_pro_api_key=API_KEY, - coinbase_pro_secret_key=API_SECRET, - coinbase_pro_passphrase=API_PASSPHRASE, - trading_pairs=["ETH-USDC"] - ) - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, self.market) - self.clock.add_iterator(self.market) - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - self.market.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(self.market.limit_orders)) - self.assertEqual(1, len(self.market.tracking_states)) - - # Cancel the order and verify that the change is saved. - self.cancel_order(trading_pair, order_id, exch_order_id, FixtureCoinbasePro.WS_ORDER_CANCELED) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_id = None - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, self.market) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.cancel_order(trading_pair, order_id, exch_order_id, FixtureCoinbasePro.WS_ORDER_CANCELED) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - - def test_update_last_prices(self): - # This is basic test to see if order_book last_trade_price is initiated and updated. - for order_book in self.market.order_books.values(): - for _ in range(5): - self.ev_loop.run_until_complete(asyncio.sleep(1)) - print(order_book.last_trade_price) - self.assertFalse(math.isnan(order_book.last_trade_price)) - - def test_order_fill_record(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - trading_pair: str = "ETH-USDC" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - # Try to buy 0.04 ETH from the exchange, and watch for completion event. - price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal("0.02") - order_id, exch_order_id = self.place_order(True, trading_pair, amount, OrderType.LIMIT, price, - 10001, FixtureCoinbasePro.BUY_MARKET_ORDER, - FixtureCoinbasePro.WS_AFTER_MARKET_BUY_2) - [buy_order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - - # Reset the logs - self.market_logger.clear() - - # Try to sell back the same amount of ETH to the exchange, and watch for completion event. - price: Decimal = self.market.get_price(trading_pair, False) - amount = buy_order_completed_event.base_asset_amount - order_id, exch_order_id = self.place_order(False, trading_pair, amount, OrderType.LIMIT, price, - 10002, FixtureCoinbasePro.SELL_MARKET_ORDER, - FixtureCoinbasePro.WS_AFTER_MARKET_BUY_2) - [sell_order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - - # Query the persisted trade logs - trade_fills: List[TradeFill] = recorder.get_trades_for_config(config_path) - self.assertEqual(2, len(trade_fills)) - buy_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "BUY"] - sell_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "SELL"] - self.assertEqual(1, len(buy_fills)) - self.assertEqual(1, len(sell_fills)) - - order_id = None - - finally: - if order_id is not None: - self.cancel_order(trading_pair, order_id, exch_order_id, FixtureCoinbasePro.WS_ORDER_CANCELED) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/connector/exchange/coinbase_pro/test_coinbase_pro_order_book_tracker.py b/test/connector/exchange/coinbase_pro/test_coinbase_pro_order_book_tracker.py deleted file mode 100644 index ea98aa04fe..0000000000 --- a/test/connector/exchange/coinbase_pro/test_coinbase_pro_order_book_tracker.py +++ /dev/null @@ -1,217 +0,0 @@ -import asyncio -import logging -import math -import unittest -from datetime import datetime -from decimal import Decimal -from typing import ( - Dict, - Optional, - List, -) - -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_api_order_book_data_source import \ - CoinbaseProAPIOrderBookDataSource -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book import CoinbaseProOrderBook -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_tracker import CoinbaseProOrderBookTracker -from hummingbot.core.data_type.common import TradeType -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.data_type.order_book_tracker import OrderBookTrackerDataSource -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - OrderBookEvent, - OrderBookTradeEvent, -) -from hummingbot.core.utils.async_utils import ( - safe_ensure_future, - safe_gather, -) - - -class CoinbaseProOrderBookTrackerUnitTest(unittest.TestCase): - order_book_tracker: Optional[CoinbaseProOrderBookTracker] = None - events: List[OrderBookEvent] = [ - OrderBookEvent.TradeEvent - ] - trading_pairs: List[str] = [ - "BTC-USD" - ] - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.order_book_tracker: CoinbaseProOrderBookTracker = CoinbaseProOrderBookTracker( - trading_pairs=cls.trading_pairs) - cls.order_book_tracker_task: asyncio.Task = safe_ensure_future(cls.order_book_tracker.start()) - cls.ev_loop.run_until_complete(cls.wait_til_tracker_ready()) - - @classmethod - async def wait_til_tracker_ready(cls): - await cls.order_book_tracker._order_books_initialized.wait() - # while True: - # if len(cls.order_book_tracker.order_books) > 0: - # print("Initialized real-time order books.") - # return - # await asyncio.sleep(1) - - async def run_parallel_async(self, *tasks, timeout=None): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - timer = 0 - while not future.done(): - if timeout and timer > timeout: - raise Exception("Time out running parallel async task in tests.") - timer += 1 - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def setUp(self): - self.event_logger = EventLogger() - for event_tag in self.events: - for trading_pair, order_book in self.order_book_tracker.order_books.items(): - order_book.add_listener(event_tag, self.event_logger) - - def test_order_book_trade_event_emission(self): - """ - Test if order book tracker is able to retrieve order book trade message from exchange and - emit order book trade events after correctly parsing the trade messages - """ - self.run_parallel(self.event_logger.wait_for(OrderBookTradeEvent)) - for ob_trade_event in self.event_logger.event_log: - self.assertTrue(type(ob_trade_event) == OrderBookTradeEvent) - self.assertTrue(ob_trade_event.trading_pair in self.trading_pairs) - self.assertTrue(type(ob_trade_event.timestamp) == float) - self.assertTrue(type(ob_trade_event.amount) == float) - self.assertTrue(type(ob_trade_event.price) == float) - self.assertTrue(type(ob_trade_event.type) == TradeType) - self.assertTrue(math.ceil(math.log10(ob_trade_event.timestamp)) == 10) - self.assertTrue(ob_trade_event.amount > 0) - self.assertTrue(ob_trade_event.price > 0) - - def test_tracker_integrity(self): - # Wait 5 seconds to process some diffs. - self.ev_loop.run_until_complete(asyncio.sleep(5.0)) - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - test_order_book: OrderBook = order_books["BTC-USD"] - # print("test_order_book") - # print(test_order_book.snapshot) - self.assertGreaterEqual(test_order_book.get_price_for_volume(True, 10).result_price, - test_order_book.get_price(True)) - self.assertLessEqual(test_order_book.get_price_for_volume(False, 10).result_price, - test_order_book.get_price(False)) - - test_active_order_tracker = self.order_book_tracker._active_order_trackers["BTC-USD"] - self.assertTrue(len(test_active_order_tracker.active_asks) > 0) - self.assertTrue(len(test_active_order_tracker.active_bids) > 0) - for order_book in self.order_book_tracker.order_books.values(): - # print(order_book.last_trade_price) - self.assertFalse(math.isnan(order_book.last_trade_price)) - - def test_order_book_data_source(self): - self.assertTrue(isinstance(self.order_book_tracker.data_source, OrderBookTrackerDataSource)) - - def test_diff_msg_get_added_to_order_book(self): - test_active_order_tracker = self.order_book_tracker._active_order_trackers["BTC-USD"] - - price = "200" - order_id = "test_order_id" - product_id = "BTC-USD" - remaining_size = "1.00" - - # Test open message diff - raw_open_message = { - "type": "open", - "time": datetime.now().isoformat(), - "product_id": product_id, - "sequence": 20000000000, - "order_id": order_id, - "price": price, - "remaining_size": remaining_size, - "side": "buy" - } - open_message = CoinbaseProOrderBook.diff_message_from_exchange(raw_open_message) - self.order_book_tracker._order_book_diff_stream.put_nowait(open_message) - self.run_parallel(asyncio.sleep(5)) - - test_order_book_row = test_active_order_tracker.active_bids[Decimal(price)] - self.assertEqual(test_order_book_row[order_id]["remaining_size"], remaining_size) - - # Test change message diff - new_size = "2.00" - raw_change_message = { - "type": "change", - "time": datetime.now().isoformat(), - "product_id": product_id, - "sequence": 20000000001, - "order_id": order_id, - "price": price, - "new_size": new_size, - "old_size": remaining_size, - "side": "buy", - } - change_message = CoinbaseProOrderBook.diff_message_from_exchange(raw_change_message) - self.order_book_tracker._order_book_diff_stream.put_nowait(change_message) - self.run_parallel(asyncio.sleep(5)) - - test_order_book_row = test_active_order_tracker.active_bids[Decimal(price)] - self.assertEqual(test_order_book_row[order_id]["remaining_size"], new_size) - - # Test match message diff - match_size = "0.50" - raw_match_message = { - "type": "match", - "trade_id": 10, - "sequence": 20000000002, - "maker_order_id": order_id, - "taker_order_id": "test_order_id_2", - "time": datetime.now().isoformat(), - "product_id": "BTC-USD", - "size": match_size, - "price": price, - "side": "buy" - } - match_message = CoinbaseProOrderBook.diff_message_from_exchange(raw_match_message) - self.order_book_tracker._order_book_diff_stream.put_nowait(match_message) - self.run_parallel(asyncio.sleep(5)) - - test_order_book_row = test_active_order_tracker.active_bids[Decimal(price)] - self.assertEqual(Decimal(test_order_book_row[order_id]["remaining_size"]), - Decimal(new_size) - Decimal(match_size)) - - # Test done message diff - raw_done_message = { - "type": "done", - "time": datetime.now().isoformat(), - "product_id": "BTC-USD", - "sequence": 20000000003, - "price": price, - "order_id": order_id, - "reason": "filled", - "remaining_size": 0, - "side": "buy", - } - done_message = CoinbaseProOrderBook.diff_message_from_exchange(raw_done_message) - self.order_book_tracker._order_book_diff_stream.put_nowait(done_message) - self.run_parallel(asyncio.sleep(5)) - - test_order_book_row = test_active_order_tracker.active_bids[Decimal(price)] - self.assertTrue(order_id not in test_order_book_row) - - def test_api_get_last_traded_prices(self): - prices = self.ev_loop.run_until_complete( - CoinbaseProAPIOrderBookDataSource.get_last_traded_prices(["BTC-USD", "LTC-USD"])) - for key, value in prices.items(): - print(f"{key} last_trade_price: {value}") - self.assertGreater(prices["BTC-USD"], 1000) - self.assertLess(prices["LTC-USD"], 1000) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/coinbase_pro/test_coinbase_pro_user_stream_tracker.py b/test/connector/exchange/coinbase_pro/test_coinbase_pro_user_stream_tracker.py deleted file mode 100644 index 38a0ad4808..0000000000 --- a/test/connector/exchange/coinbase_pro/test_coinbase_pro_user_stream_tracker.py +++ /dev/null @@ -1,181 +0,0 @@ -import asyncio -import contextlib -import logging -import time -import unittest -from decimal import Decimal -from typing import Optional - -import conf -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_exchange import CoinbaseProAuth, CoinbaseProExchange -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_message import CoinbaseProOrderBookMessage -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_user_stream_tracker import CoinbaseProUserStreamTracker -from hummingbot.core.clock import ( - Clock, - ClockMode -) -from hummingbot.core.data_type.common import OrderType -from hummingbot.core.utils.async_utils import ( - safe_ensure_future, - safe_gather, -) - - -class CoinbaseProUserStreamTrackerUnitTest(unittest.TestCase): - user_stream_tracker: Optional[CoinbaseProUserStreamTracker] = None - - market: CoinbaseProExchange - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.coinbase_pro_auth = CoinbaseProAuth(conf.coinbase_pro_api_key, - conf.coinbase_pro_secret_key, - conf.coinbase_pro_passphrase) - cls.trading_pairs = ["ETH-USDC"] - cls.user_stream_tracker: CoinbaseProUserStreamTracker = CoinbaseProUserStreamTracker( - coinbase_pro_auth=cls.coinbase_pro_auth, trading_pairs=cls.trading_pairs) - cls.user_stream_tracker_task: asyncio.Task = safe_ensure_future(cls.user_stream_tracker.start()) - - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.market: CoinbaseProExchange = CoinbaseProExchange( - conf.coinbase_pro_api_key, - conf.coinbase_pro_secret_key, - conf.coinbase_pro_passphrase, - trading_pairs=cls.trading_pairs - ) - print("Initializing Coinbase Pro market... this will take about a minute.") - cls.clock.add_iterator(cls.market) - cls.stack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - print("Ready.") - - @classmethod - async def wait_til_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self.clock.run_til(next_iteration) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def test_limit_order_cancelled(self): - """ - This test should be run after the developer has implemented the limit buy and cancel - in the corresponding market class - """ - self.assertGreater(self.market.get_balance("ETH"), Decimal("0.1")) - trading_pair = self.trading_pairs[0] - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - bid_price: Decimal = current_bid_price * Decimal("0.8") - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - - client_order_id = self.market.buy(trading_pair, quantized_amount, OrderType.LIMIT, quantize_bid_price) - - self.ev_loop.run_until_complete(asyncio.sleep(5.0)) - [open_message] = self.run_parallel(self.user_stream_tracker.user_stream.get()) - - # print(open_message) - self.assertTrue(isinstance(open_message, CoinbaseProOrderBookMessage)) - self.assertEqual(open_message.trading_pair, trading_pair) - self.assertEqual(open_message.content["type"], "open") - self.assertEqual(open_message.content["side"], "buy") - self.assertEqual(open_message.content["product_id"], trading_pair) - self.assertEqual(Decimal(open_message.content["price"]), quantize_bid_price) - self.assertEqual(Decimal(open_message.content["remaining_size"]), quantized_amount) - - self.run_parallel(asyncio.sleep(5.0)) - self.market.cancel(trading_pair, client_order_id) - - self.ev_loop.run_until_complete(asyncio.sleep(5.0)) - [done_message] = self.run_parallel(self.user_stream_tracker.user_stream.get()) - - # print(done_message) - self.assertEqual(done_message.trading_pair, trading_pair) - self.assertEqual(done_message.content["type"], "done") - self.assertEqual(done_message.content["side"], "buy") - self.assertEqual(done_message.content["product_id"], trading_pair) - self.assertEqual(Decimal(done_message.content["price"]), quantize_bid_price) - self.assertEqual(Decimal(done_message.content["remaining_size"]), quantized_amount) - self.assertEqual(done_message.content["reason"], "canceled") - - @unittest.skip - def test_limit_order_filled(self): - """ - This test should be run after the developer has implemented the limit buy in the corresponding market class - """ - self.assertGreater(self.market.get_balance("ETH"), Decimal("0.1")) - trading_pair = self.trading_pairs[0] - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - bid_price: Decimal = current_bid_price * Decimal("1.05") - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - - self.market.buy(trading_pair, quantized_amount, OrderType.LIMIT, quantize_bid_price) - - self.ev_loop.run_until_complete(asyncio.sleep(5.0)) - [message_1, message_2] = self.run_parallel(self.user_stream_tracker.user_stream.get(), - self.user_stream_tracker.user_stream.get()) - self.assertTrue(isinstance(message_1, CoinbaseProOrderBookMessage)) - self.assertTrue(isinstance(message_2, CoinbaseProOrderBookMessage)) - if message_1.content["type"] == "done": - done_message = message_1 - match_message = message_2 - else: - done_message = message_2 - match_message = message_1 - - # print(done_message) - self.assertEqual(done_message.trading_pair, trading_pair) - self.assertEqual(done_message.content["type"], "done") - self.assertEqual(done_message.content["side"], "buy") - self.assertEqual(done_message.content["product_id"], trading_pair) - self.assertEqual(Decimal(done_message.content["price"]), quantize_bid_price) - self.assertEqual(Decimal(done_message.content["remaining_size"]), Decimal(0.0)) - self.assertEqual(done_message.content["reason"], "filled") - - # print(match_message) - self.assertEqual(match_message.trading_pair, trading_pair) - self.assertEqual(match_message.content["type"], "match") - self.assertEqual(match_message.content["side"], "sell") - self.assertEqual(match_message.content["product_id"], trading_pair) - self.assertLessEqual(Decimal(match_message.content["price"]), quantize_bid_price) - self.assertEqual(Decimal(match_message.content["size"]), quantized_amount) - - @unittest.skip - def test_user_stream_manually(self): - """ - This test should be run before market functions like buy and sell are implemented. - Developer needs to manually trigger those actions in order for the messages to show up in the user stream. - """ - self.ev_loop.run_until_complete(asyncio.sleep(30.0)) - print(self.user_stream_tracker.user_stream) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/gate_io/.gitignore b/test/connector/exchange/gate_io/.gitignore deleted file mode 100644 index 23d9952b8c..0000000000 --- a/test/connector/exchange/gate_io/.gitignore +++ /dev/null @@ -1 +0,0 @@ -backups \ No newline at end of file diff --git a/test/connector/exchange/gate_io/test_gate_io_auth.py b/test/connector/exchange/gate_io/test_gate_io_auth.py deleted file mode 100644 index b9acfce73f..0000000000 --- a/test/connector/exchange/gate_io/test_gate_io_auth.py +++ /dev/null @@ -1,84 +0,0 @@ -import asyncio -import logging -import sys -import unittest -from os.path import join, realpath -from typing import Any, Dict - -import aiohttp -import ujson - -import conf -from hummingbot.connector.exchange.gate_io import gate_io_constants as CONSTANTS -from hummingbot.connector.exchange.gate_io.gate_io_auth import GateIoAuth -from hummingbot.connector.exchange.gate_io.gate_io_utils import build_gate_io_api_factory, rest_response_with_errors -from hummingbot.connector.exchange.gate_io.gate_io_websocket import GateIoWebsocket -from hummingbot.core.api_throttler.async_throttler import AsyncThrottler -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class TestAuth(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - api_key = conf.gate_io_api_key - secret_key = conf.gate_io_secret_key - cls.auth = GateIoAuth(api_key, secret_key) - - async def rest_auth(self) -> Dict[Any, Any]: - endpoint = CONSTANTS.USER_BALANCES_PATH_URL - headers = self.auth.get_headers("GET", f"{CONSTANTS.REST_URL_AUTH}/{endpoint}", None) - http_client = aiohttp.ClientSession() - response = await http_client.get(f"{CONSTANTS.REST_URL}/{endpoint}", headers=headers) - await http_client.close() - return await response.json() - - async def rest_auth_post(self) -> Dict[Any, Any]: - endpoint = CONSTANTS.ORDER_CREATE_PATH_URL - http_client = aiohttp.ClientSession() - order_params = ujson.dumps({ - 'currency_pair': 'ETH_BTC', - 'type': 'limit', - 'side': 'buy', - 'amount': '0.00000001', - 'price': '0.0000001', - }) - headers = self.auth.get_headers("POST", f"{CONSTANTS.REST_URL_AUTH}/{endpoint}", order_params) - http_status, response, request_errors = await rest_response_with_errors( - http_client.request( - method='POST', url=f"{CONSTANTS.REST_URL}/{endpoint}", headers=headers, data=order_params - ) - ) - await http_client.close() - return response - - async def ws_auth(self) -> Dict[Any, Any]: - ws = GateIoWebsocket(api_factory=build_gate_io_api_factory( - throttler=AsyncThrottler(CONSTANTS.RATE_LIMITS)), - auth=self.auth) - await ws.connect() - await ws.subscribe(CONSTANTS.USER_BALANCE_ENDPOINT_NAME) - async for response in ws.on_message(): - if ws.is_subscribed: - return True - return False - - def test_rest_auth(self): - result = self.ev_loop.run_until_complete(self.rest_auth()) - if len(result) == 0 or "currency" not in result[0].keys(): - print(f"Unexpected response for API call: {result}") - assert "currency" in result[0].keys() - - def test_rest_auth_post(self): - result = self.ev_loop.run_until_complete(self.rest_auth_post()) - if "message" not in result.keys(): - print(f"Unexpected response for API call: {result}") - assert "message" in result.keys() - assert "Your order size 0.00000001 is too small" in result['message'] - - def test_ws_auth(self): - response = self.ev_loop.run_until_complete(self.ws_auth()) - assert response is True diff --git a/test/connector/exchange/gate_io/test_gate_io_exchange.py b/test/connector/exchange/gate_io/test_gate_io_exchange.py deleted file mode 100644 index c167886981..0000000000 --- a/test/connector/exchange/gate_io/test_gate_io_exchange.py +++ /dev/null @@ -1,436 +0,0 @@ -import asyncio -import contextlib -import logging -import math -import os -import time -import unittest -from decimal import Decimal -from os.path import join, realpath -from typing import List - -import conf -from hummingbot.connector.exchange.gate_io.gate_io_exchange import GateIoExchange -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.core.clock import Clock, ClockMode -from hummingbot.core.data_type.common import OrderType -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.utils.async_utils import safe_gather, safe_ensure_future -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.sql_connection_manager import ( - SQLConnectionManager, - SQLConnectionType -) -from hummingbot.model.trade_fill import TradeFill - -logging.basicConfig(level=METRICS_LOG_LEVEL) - -API_KEY = conf.gate_io_api_key -API_SECRET = conf.gate_io_secret_key - - -class GateIoExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - MarketEvent.OrderFailure - ] - connector: GateIoExchange - event_logger: EventLogger - trading_pair = "BTC-USDT" - base_token, quote_token = trading_pair.split("-") - order_amount = Decimal("0.0001") - order_amount_quant = Decimal("0.000123456") - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - global MAINNET_RPC_URL - - cls.ev_loop = asyncio.get_event_loop() - - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.connector: GateIoExchange = GateIoExchange( - gate_io_api_key=API_KEY, - gate_io_secret_key=API_SECRET, - trading_pairs=[cls.trading_pair], - trading_required=True - ) - print("Initializing Gate.Io market... this will take about a minute.") - cls.clock.add_iterator(cls.connector) - cls.stack: contextlib.ExitStack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - print("Ready.") - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - - @classmethod - async def wait_til_ready(cls, connector = None): - if connector is None: - connector = cls.connector - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if connector.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../connector_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.event_logger = EventLogger() - for event_tag in self.events: - self.connector.add_listener(event_tag, self.event_logger) - - def tearDown(self): - for event_tag in self.events: - self.connector.remove_listener(event_tag, self.event_logger) - self.event_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def _place_order(self, is_buy, amount, order_type, price, ex_order_id) -> str: - if is_buy: - cl_order_id = self.connector.buy(self.trading_pair, amount, order_type, price) - else: - cl_order_id = self.connector.sell(self.trading_pair, amount, order_type, price) - return cl_order_id - - def _cancel_order(self, cl_order_id, connector=None): - if connector is None: - connector = self.connector - return connector.cancel(self.trading_pair, cl_order_id) - - def test_estimate_fee(self): - maker_fee = self.connector.estimate_fee_pct(True) - self.assertAlmostEqual(maker_fee, Decimal("0.001")) - taker_fee = self.connector.estimate_fee_pct(False) - self.assertAlmostEqual(taker_fee, Decimal("0.0025")) - - def test_buy_and_sell(self): - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.02") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount) - quote_bal = self.connector.get_available_balance(self.quote_token) - base_bal = self.connector.get_available_balance(self.base_token) - - order_id = self._place_order(True, amount, OrderType.LIMIT, price, 1) - order_completed_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCompletedEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(5)) - trade_events = [t for t in self.event_logger.event_log if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertEqual(amount, order_completed_event.base_asset_amount) - self.assertEqual("BTC", order_completed_event.base_asset) - self.assertEqual("USDT", order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and str(event.order_id) == str(order_id) - for event in self.event_logger.event_log])) - - # check available quote balance gets updated, we need to wait a bit for the balance message to arrive - expected_quote_bal = quote_bal - quote_amount_traded - # self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.assertAlmostEqual(expected_quote_bal, self.connector.get_available_balance(self.quote_token), 1) - - # Reset the logs - self.event_logger.clear() - - # Try to sell back the same amount to the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.98") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount) - order_id = self._place_order(False, amount, OrderType.LIMIT, price, 2) - order_completed_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCompletedEvent)) - trade_events = [t for t in self.event_logger.event_log if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertEqual(amount, order_completed_event.base_asset_amount) - self.assertEqual("BTC", order_completed_event.base_asset) - self.assertEqual("USDT", order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertGreater(order_completed_event.fee_amount, Decimal(0)) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.event_logger.event_log])) - - # check available base balance gets updated, we need to wait a bit for the balance message to arrive - expected_base_bal = base_bal - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.ev_loop.run_until_complete(asyncio.sleep(5)) - self.assertAlmostEqual(expected_base_bal, self.connector.get_available_balance(self.base_token), 5) - - def test_limit_makers_unfilled(self): - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.8") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.ev_loop.run_until_complete(asyncio.sleep(2)) - quote_bal = self.connector.get_available_balance(self.quote_token) - - cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - # check available quote balance gets updated, we need to wait a bit for the balance message to arrive - quote_amount = (price * amount) - expected_quote_bal = quote_bal - quote_amount - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.ev_loop.run_until_complete(asyncio.sleep(2)) - - self.assertAlmostEqual(expected_quote_bal, self.connector.get_available_balance(self.quote_token), 5) - self._cancel_order(cl_order_id) - event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.assertEqual(cl_order_id, event.order_id) - - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.2") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount) - - cl_order_id = self._place_order(False, amount, OrderType.LIMIT_MAKER, price, 2) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - self._cancel_order(cl_order_id) - event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.assertEqual(cl_order_id, event.order_id) - - # # @TODO: find a way to create "rejected" - # def test_limit_maker_rejections(self): - # price = self.connector.get_price(self.trading_pair, True) * Decimal("1.2") - # price = self.connector.quantize_order_price(self.trading_pair, price) - # amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.000001")) - # cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - # event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - # self.assertEqual(cl_order_id, event.order_id) - - # price = self.connector.get_price(self.trading_pair, False) * Decimal("0.8") - # price = self.connector.quantize_order_price(self.trading_pair, price) - # amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.000001")) - # cl_order_id = self._place_order(False, amount, OrderType.LIMIT_MAKER, price, 2) - # event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - # self.assertEqual(cl_order_id, event.order_id) - - def test_cancel_all(self): - bid_price = self.connector.get_price(self.trading_pair, True) - ask_price = self.connector.get_price(self.trading_pair, False) - bid_price = self.connector.quantize_order_price(self.trading_pair, bid_price * Decimal("0.9")) - ask_price = self.connector.quantize_order_price(self.trading_pair, ask_price * Decimal("1.1")) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount) - - buy_id = self._place_order(True, amount, OrderType.LIMIT, bid_price, 1) - sell_id = self._place_order(False, amount, OrderType.LIMIT, ask_price, 2) - - self.ev_loop.run_until_complete(asyncio.sleep(1)) - asyncio.ensure_future(self.connector.cancel_all(5)) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - cancel_events = [t for t in self.event_logger.event_log if isinstance(t, OrderCancelledEvent)] - self.assertEqual({buy_id, sell_id}, {o.order_id for o in cancel_events}) - - def test_order_quantized_values(self): - bid_price: Decimal = self.connector.get_price(self.trading_pair, True) - ask_price: Decimal = self.connector.get_price(self.trading_pair, False) - mid_price: Decimal = (bid_price + ask_price) / 2 - - # Make sure there's enough balance to make the limit orders. - self.assertGreater(self.connector.get_balance("BTC"), Decimal("0.0005")) - self.assertGreater(self.connector.get_balance("USDT"), Decimal("10")) - - # Intentionally set some prices with too many decimal places s.t. they - # need to be quantized. Also, place them far away from the mid-price s.t. they won't - # get filled during the test. - bid_price = self.connector.quantize_order_price(self.trading_pair, mid_price * Decimal("0.9333192292111341")) - ask_price = self.connector.quantize_order_price(self.trading_pair, mid_price * Decimal("1.1492431474884933")) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount_quant) - - # Test bid order - cl_order_id_1 = self._place_order(True, amount, OrderType.LIMIT, bid_price, 1) - # Wait for the order created event and examine the order made - self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - - # Test ask order - cl_order_id_2 = self._place_order(False, amount, OrderType.LIMIT, ask_price, 1) - # Wait for the order created event and examine and order made - self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCreatedEvent)) - - self._cancel_order(cl_order_id_1) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self._cancel_order(cl_order_id_2) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - - def test_orders_saving_and_restoration(self): - config_path = "test_config" - strategy_name = "test_strategy" - sql = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id = None - recorder = MarketsRecorder(sql, [self.connector], config_path, strategy_name) - recorder.start() - - try: - self.connector._in_flight_orders.clear() - self.assertEqual(0, len(self.connector.tracking_states)) - - # Try to put limit buy order for 0.02 ETH worth of ZRX, and watch for order creation event. - current_bid_price: Decimal = self.connector.get_price(self.trading_pair, True) - price: Decimal = current_bid_price * Decimal("0.8") - price = self.connector.quantize_order_price(self.trading_pair, price) - - amount: Decimal = self.order_amount - amount = self.connector.quantize_order_amount(self.trading_pair, amount) - - cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.connector.tracking_states)) - self.assertEqual(cl_order_id, list(self.connector.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.connector) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(cl_order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.connector) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.connector.stop(self._clock) - self.ev_loop.run_until_complete(asyncio.sleep(5)) - self.clock.remove_iterator(self.connector) - for event_tag in self.events: - self.connector.remove_listener(event_tag, self.event_logger) - # Clear the event loop - self.event_logger.clear() - new_connector = GateIoExchange(API_KEY, API_SECRET, [self.trading_pair], True) - for event_tag in self.events: - new_connector.add_listener(event_tag, self.event_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [new_connector], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, new_connector) - self.clock.add_iterator(new_connector) - self.ev_loop.run_until_complete(self.wait_til_ready(new_connector)) - self.assertEqual(0, len(new_connector.limit_orders)) - self.assertEqual(0, len(new_connector.tracking_states)) - new_connector.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(new_connector.limit_orders)) - self.assertEqual(1, len(new_connector.tracking_states)) - - # Cancel the order and verify that the change is saved. - self._cancel_order(cl_order_id, new_connector) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - recorder.save_market_states(config_path, new_connector) - order_id = None - self.assertEqual(0, len(new_connector.limit_orders)) - self.assertEqual(0, len(new_connector.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, new_connector) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.connector.cancel(self.trading_pair, cl_order_id) - self.run_parallel(self.event_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - - def test_update_last_prices(self): - # This is basic test to see if order_book last_trade_price is initiated and updated. - for order_book in self.connector.order_books.values(): - for _ in range(5): - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.assertFalse(math.isnan(order_book.last_trade_price)) - - def test_filled_orders_recorded(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - sql = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id = None - recorder = MarketsRecorder(sql, [self.connector], config_path, strategy_name) - recorder.start() - - try: - # Try to buy some token from the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.05") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount) - - order_id = self._place_order(True, amount, OrderType.LIMIT, price, 1) - self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCompletedEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - - # Reset the logs - self.event_logger.clear() - - # Try to sell back the same amount to the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.95") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, self.order_amount) - order_id = self._place_order(False, amount, OrderType.LIMIT, price, 2) - self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCompletedEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - - # Query the persisted trade logs - trade_fills: List[TradeFill] = recorder.get_trades_for_config(config_path) - self.assertGreaterEqual(len(trade_fills), 2) - buy_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "BUY"] - sell_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "SELL"] - self.assertGreaterEqual(len(buy_fills), 1) - self.assertGreaterEqual(len(sell_fills), 1) - - order_id = None - - finally: - if order_id is not None: - self.connector.cancel(self.trading_pair, order_id) - self.run_parallel(self.event_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) diff --git a/test/connector/exchange/gate_io/test_gate_io_order_book_tracker.py b/test/connector/exchange/gate_io/test_gate_io_order_book_tracker.py deleted file mode 100755 index 52d7b7761b..0000000000 --- a/test/connector/exchange/gate_io/test_gate_io_order_book_tracker.py +++ /dev/null @@ -1,103 +0,0 @@ -import asyncio -import logging -import math -import time -import unittest -from typing import Dict, Optional, List - -from hummingbot.connector.exchange.gate_io import gate_io_constants as CONSTANTS -from hummingbot.connector.exchange.gate_io.gate_io_api_order_book_data_source import GateIoAPIOrderBookDataSource -from hummingbot.connector.exchange.gate_io.gate_io_order_book_tracker import GateIoOrderBookTracker -from hummingbot.core.api_throttler.async_throttler import AsyncThrottler -from hummingbot.core.data_type.common import TradeType -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import OrderBookEvent, OrderBookTradeEvent -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class GateIoOrderBookTrackerUnitTest(unittest.TestCase): - order_book_tracker: Optional[GateIoOrderBookTracker] = None - events: List[OrderBookEvent] = [ - OrderBookEvent.TradeEvent - ] - trading_pairs: List[str] = [ - "BTC-USDT", - "ETH-USDT", - ] - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() - cls.throttler = AsyncThrottler(CONSTANTS.RATE_LIMITS) - cls.order_book_tracker: GateIoOrderBookTracker = GateIoOrderBookTracker(cls.throttler, cls.trading_pairs) - cls.order_book_tracker.start() - cls.ev_loop.run_until_complete(cls.wait_til_tracker_ready()) - - @classmethod - async def wait_til_tracker_ready(cls): - while True: - if len(cls.order_book_tracker.order_books) > 0: - print("Initialized real-time order books.") - return - await asyncio.sleep(1) - - async def run_parallel_async(self, *tasks, timeout=None): - future: asyncio.Future = asyncio.ensure_future(asyncio.gather(*tasks)) - timer = 0 - while not future.done(): - if timeout and timer > timeout: - raise Exception("Timeout running parallel async tasks in tests") - timer += 1 - now = time.time() - _next_iteration = now // 1.0 + 1 # noqa: F841 - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def setUp(self): - self.event_logger = EventLogger() - for event_tag in self.events: - for trading_pair, order_book in self.order_book_tracker.order_books.items(): - order_book.add_listener(event_tag, self.event_logger) - - def test_order_book_trade_event_emission(self): - """ - Tests if the order book tracker is able to retrieve order book trade message from exchange and emit order book - trade events after correctly parsing the trade messages - """ - self.run_parallel(self.event_logger.wait_for(OrderBookTradeEvent)) - for ob_trade_event in self.event_logger.event_log: - self.assertTrue(type(ob_trade_event) == OrderBookTradeEvent) - self.assertTrue(ob_trade_event.trading_pair in self.trading_pairs) - self.assertTrue(type(ob_trade_event.timestamp) in [float, int]) - self.assertTrue(type(ob_trade_event.amount) == float) - self.assertTrue(type(ob_trade_event.price) == float) - self.assertTrue(type(ob_trade_event.type) == TradeType) - # datetime is in seconds - self.assertTrue(math.ceil(math.log10(ob_trade_event.timestamp)) == 10) - self.assertTrue(ob_trade_event.amount > 0) - self.assertTrue(ob_trade_event.price > 0) - - def test_tracker_integrity(self): - # Wait 5 seconds to process some diffs. - self.ev_loop.run_until_complete(asyncio.sleep(5.0)) - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - eth_usd: OrderBook = order_books["ETH-USDT"] - self.assertIsNot(eth_usd.last_diff_uid, 0) - self.assertGreaterEqual(eth_usd.get_price_for_volume(True, 10).result_price, - eth_usd.get_price(True)) - self.assertLessEqual(eth_usd.get_price_for_volume(False, 10).result_price, - eth_usd.get_price(False)) - - def test_api_get_last_traded_prices(self): - prices = self.ev_loop.run_until_complete( - GateIoAPIOrderBookDataSource.get_last_traded_prices(["BTC-USDT", "LTC-BTC"])) - for key, value in prices.items(): - print(f"{key} last_trade_price: {value}") - self.assertGreater(prices["BTC-USDT"], 1000) - self.assertLess(prices["LTC-BTC"], 1) diff --git a/test/connector/exchange/gate_io/test_gate_io_order_status.py b/test/connector/exchange/gate_io/test_gate_io_order_status.py deleted file mode 100644 index f82cb2b118..0000000000 --- a/test/connector/exchange/gate_io/test_gate_io_order_status.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python -import sys -import asyncio -import unittest -import aiohttp -import conf -import logging -import os -from os.path import join, realpath -from typing import Dict, Any -from hummingbot.connector.exchange.gate_io.gate_io_auth import GateIoAuth -from hummingbot.connector.exchange.gate_io.gate_io_utils import rest_response_with_errors -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.connector.exchange.gate_io import gate_io_constants as CONSTANTS - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class TestAuth(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - api_key = conf.gate_io_api_key - secret_key = conf.gate_io_secret_key - cls.auth = GateIoAuth(api_key, secret_key) - cls.exchange_order_id = os.getenv("TEST_ORDER_ID") - cls.trading_pair = os.getenv("TEST_TRADING_PAIR") - - async def fetch_order_status(self) -> Dict[Any, Any]: - endpoint = CONSTANTS.ORDER_STATUS_PATH_URL.format(id=self.exchange_order_id) - params = {'currency_pair': self.trading_pair} - http_client = aiohttp.ClientSession() - headers = self.auth.get_headers("GET", f"{CONSTANTS.REST_URL_AUTH}/{endpoint}", params) - http_status, response, request_errors = await rest_response_with_errors( - http_client.request(method='GET', url=f"{CONSTANTS.REST_URL}/{endpoint}", headers=headers, params=params) - ) - await http_client.close() - return response - - def test_order_status(self): - status_test_ready = all({ - 'id': self.exchange_order_id is not None and len(self.exchange_order_id), - 'pair': self.trading_pair is not None and len(self.trading_pair), - }.values()) - if status_test_ready: - result = self.ev_loop.run_until_complete(self.fetch_order_status()) - print(f"Response:\n{result}") diff --git a/test/connector/exchange/gate_io/test_gate_io_user_stream_tracker.py b/test/connector/exchange/gate_io/test_gate_io_user_stream_tracker.py deleted file mode 100644 index ef5dc568d3..0000000000 --- a/test/connector/exchange/gate_io/test_gate_io_user_stream_tracker.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -import sys -import asyncio -import logging -import unittest -import conf - -from os.path import join, realpath -from hummingbot.connector.exchange.gate_io.gate_io_user_stream_tracker import GateIoUserStreamTracker -from hummingbot.connector.exchange.gate_io.gate_io_auth import GateIoAuth -from hummingbot.core.utils.async_utils import safe_ensure_future -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class GateIoUserStreamTrackerUnitTest(unittest.TestCase): - api_key = conf.gate_io_api_key - api_secret = conf.gate_io_secret_key - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.trading_pairs = ["BTC-USDT"] - cls.user_stream_tracker: GateIoUserStreamTracker = GateIoUserStreamTracker( - gate_io_auth=GateIoAuth(cls.api_key, cls.api_secret), - trading_pairs=cls.trading_pairs) - cls.user_stream_tracker_task: asyncio.Task = safe_ensure_future(cls.user_stream_tracker.start()) - - def test_user_stream(self): - # Wait process some msgs. - print("Sleeping for 30s to gather some user stream messages.") - self.ev_loop.run_until_complete(asyncio.sleep(30.0)) - print(self.user_stream_tracker.user_stream) diff --git a/test/connector/exchange/hitbtc/.gitignore b/test/connector/exchange/hitbtc/.gitignore deleted file mode 100644 index 23d9952b8c..0000000000 --- a/test/connector/exchange/hitbtc/.gitignore +++ /dev/null @@ -1 +0,0 @@ -backups \ No newline at end of file diff --git a/test/connector/exchange/hitbtc/test_hitbtc_auth.py b/test/connector/exchange/hitbtc/test_hitbtc_auth.py deleted file mode 100644 index 1943412ea3..0000000000 --- a/test/connector/exchange/hitbtc/test_hitbtc_auth.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python -import sys -import asyncio -import unittest -import aiohttp -import conf -import logging -from os.path import join, realpath -from typing import Dict, Any -from hummingbot.connector.exchange.hitbtc.hitbtc_auth import HitbtcAuth -from hummingbot.connector.exchange.hitbtc.hitbtc_websocket import HitbtcWebsocket -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.connector.exchange.hitbtc.hitbtc_constants import Constants - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class TestAuth(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - api_key = conf.hitbtc_api_key - secret_key = conf.hitbtc_secret_key - cls.auth = HitbtcAuth(api_key, secret_key) - - async def rest_auth(self) -> Dict[Any, Any]: - endpoint = Constants.ENDPOINT['USER_BALANCES'] - headers = self.auth.get_headers("GET", f"{Constants.REST_URL_AUTH}/{endpoint}", None) - http_client = aiohttp.ClientSession() - response = await http_client.get(f"{Constants.REST_URL}/{endpoint}", headers=headers) - await http_client.close() - return await response.json() - - async def ws_auth(self) -> Dict[Any, Any]: - ws = HitbtcWebsocket(self.auth) - await ws.connect() - await ws.subscribe(Constants.WS_SUB["USER_ORDERS_TRADES"], None, {}) - async for response in ws.on_message(): - return response - - def test_rest_auth(self): - result = self.ev_loop.run_until_complete(self.rest_auth()) - if len(result) == 0 or "currency" not in result[0].keys(): - print(f"Unexpected response for API call: {result}") - assert "currency" in result[0].keys() - - def test_ws_auth(self): - response = self.ev_loop.run_until_complete(self.ws_auth()) - if 'result' not in response: - print(f"Unexpected response for API call: {response}") - assert response['result'] is True diff --git a/test/connector/exchange/hitbtc/test_hitbtc_currencies.py b/test/connector/exchange/hitbtc/test_hitbtc_currencies.py deleted file mode 100644 index 3e73e75968..0000000000 --- a/test/connector/exchange/hitbtc/test_hitbtc_currencies.py +++ /dev/null @@ -1,47 +0,0 @@ -import sys -import asyncio -import unittest -import aiohttp -import logging -from os.path import join, realpath -from typing import Dict, Any - -from hummingbot.connector.exchange.hitbtc.hitbtc_api_order_book_data_source import HitbtcAPIOrderBookDataSource -from hummingbot.connector.exchange.hitbtc.hitbtc_constants import Constants -from hummingbot.connector.exchange.hitbtc.hitbtc_utils import aiohttp_response_with_errors -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class TestAuth(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - - async def fetch_symbols(self) -> Dict[Any, Any]: - endpoint = Constants.ENDPOINT['SYMBOL'] - http_client = aiohttp.ClientSession() - http_status, response, request_errors = await aiohttp_response_with_errors(http_client.request(method='GET', - url=f"{Constants.REST_URL}/{endpoint}")) - await http_client.close() - return response - - def test_all_trading_pairs_matched(self): - result = self.ev_loop.run_until_complete(self.fetch_symbols()) - print('') - pairs = [i['id'] for i in result] - unmatched_pairs = [] - for pair in pairs: - matched_pair = asyncio.get_event_loop().run_until_complete( - HitbtcAPIOrderBookDataSource.trading_pair_associated_to_exchange_symbol(pair)) - if matched_pair is None: - matched_pair_split = None - print(f"\nUnmatched pair: {pair}\n") - unmatched_pairs.append(pair) - else: - matched_pair_split = matched_pair.split('-') - if 'USDUSD' in pair or ('USD' in matched_pair_split[0] and 'USD' in matched_pair_split[1]): - print(f'Found double USD pair: `{pair}` matched to => `{matched_pair}`') - assert len(unmatched_pairs) == 0 diff --git a/test/connector/exchange/hitbtc/test_hitbtc_exchange.py b/test/connector/exchange/hitbtc/test_hitbtc_exchange.py deleted file mode 100644 index ff962a72a3..0000000000 --- a/test/connector/exchange/hitbtc/test_hitbtc_exchange.py +++ /dev/null @@ -1,437 +0,0 @@ -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -import asyncio -import logging -from decimal import Decimal -import unittest -import contextlib -import time -import os -from typing import List -import conf -import math - -from hummingbot.core.clock import Clock, ClockMode -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.core.utils.async_utils import safe_gather, safe_ensure_future -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.data_type.common import OrderType -from hummingbot.model.sql_connection_manager import ( - SQLConnectionManager, - SQLConnectionType -) -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.trade_fill import TradeFill -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.connector.exchange.hitbtc.hitbtc_exchange import HitbtcExchange - -logging.basicConfig(level=METRICS_LOG_LEVEL) - -API_KEY = conf.hitbtc_api_key -API_SECRET = conf.hitbtc_secret_key - - -class HitbtcExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - MarketEvent.OrderFailure - ] - connector: HitbtcExchange - event_logger: EventLogger - trading_pair = "BTC-USDT" - base_token, quote_token = trading_pair.split("-") - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - global MAINNET_RPC_URL - - cls.ev_loop = asyncio.get_event_loop() - - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.connector: HitbtcExchange = HitbtcExchange( - hitbtc_api_key=API_KEY, - hitbtc_secret_key=API_SECRET, - trading_pairs=[cls.trading_pair], - trading_required=True - ) - print("Initializing Hitbtc market... this will take about a minute.") - cls.clock.add_iterator(cls.connector) - cls.stack: contextlib.ExitStack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - print("Ready.") - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - - @classmethod - async def wait_til_ready(cls, connector = None): - if connector is None: - connector = cls.connector - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if connector.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../connector_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.event_logger = EventLogger() - for event_tag in self.events: - self.connector.add_listener(event_tag, self.event_logger) - - def tearDown(self): - for event_tag in self.events: - self.connector.remove_listener(event_tag, self.event_logger) - self.event_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def _place_order(self, is_buy, amount, order_type, price, ex_order_id) -> str: - if is_buy: - cl_order_id = self.connector.buy(self.trading_pair, amount, order_type, price) - else: - cl_order_id = self.connector.sell(self.trading_pair, amount, order_type, price) - return cl_order_id - - def _cancel_order(self, cl_order_id, connector=None): - if connector is None: - connector = self.connector - return connector.cancel(self.trading_pair, cl_order_id) - - def test_estimate_fee(self): - maker_fee = self.connector.estimate_fee_pct(True) - self.assertAlmostEqual(maker_fee, Decimal("0.001")) - taker_fee = self.connector.estimate_fee_pct(False) - self.assertAlmostEqual(taker_fee, Decimal("0.0025")) - - def test_buy_and_sell(self): - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.02") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.0002")) - quote_bal = self.connector.get_available_balance(self.quote_token) - base_bal = self.connector.get_available_balance(self.base_token) - - order_id = self._place_order(True, amount, OrderType.LIMIT, price, 1) - order_completed_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCompletedEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(5)) - trade_events = [t for t in self.event_logger.event_log if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertEqual(amount, order_completed_event.base_asset_amount) - self.assertEqual("BTC", order_completed_event.base_asset) - self.assertEqual("USDT", order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and str(event.order_id) == str(order_id) - for event in self.event_logger.event_log])) - - # check available quote balance gets updated, we need to wait a bit for the balance message to arrive - expected_quote_bal = quote_bal - quote_amount_traded - # self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.assertAlmostEqual(expected_quote_bal, self.connector.get_available_balance(self.quote_token), 1) - - # Reset the logs - self.event_logger.clear() - - # Try to sell back the same amount to the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.98") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.0002")) - order_id = self._place_order(False, amount, OrderType.LIMIT, price, 2) - order_completed_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCompletedEvent)) - trade_events = [t for t in self.event_logger.event_log if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertEqual(amount, order_completed_event.base_asset_amount) - self.assertEqual("BTC", order_completed_event.base_asset) - self.assertEqual("USDT", order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertGreater(order_completed_event.fee_amount, Decimal(0)) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.event_logger.event_log])) - - # check available base balance gets updated, we need to wait a bit for the balance message to arrive - expected_base_bal = base_bal - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.ev_loop.run_until_complete(asyncio.sleep(5)) - self.assertAlmostEqual(expected_base_bal, self.connector.get_available_balance(self.base_token), 5) - - def test_limit_makers_unfilled(self): - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.8") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.0002")) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.ev_loop.run_until_complete(asyncio.sleep(2)) - quote_bal = self.connector.get_available_balance(self.quote_token) - - cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - # check available quote balance gets updated, we need to wait a bit for the balance message to arrive - taker_fee = self.connector.estimate_fee_pct(False) - quote_amount = ((price * amount)) - quote_amount = ((price * amount) * (Decimal("1") + taker_fee)) - expected_quote_bal = quote_bal - quote_amount - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.ev_loop.run_until_complete(self.connector._update_balances()) - self.ev_loop.run_until_complete(asyncio.sleep(2)) - - self.assertAlmostEqual(expected_quote_bal, self.connector.get_available_balance(self.quote_token), 5) - self._cancel_order(cl_order_id) - event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.assertEqual(cl_order_id, event.order_id) - - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.2") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.0002")) - - cl_order_id = self._place_order(False, amount, OrderType.LIMIT_MAKER, price, 2) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - self._cancel_order(cl_order_id) - event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.assertEqual(cl_order_id, event.order_id) - - # # @TODO: find a way to create "rejected" - # def test_limit_maker_rejections(self): - # price = self.connector.get_price(self.trading_pair, True) * Decimal("1.2") - # price = self.connector.quantize_order_price(self.trading_pair, price) - # amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.000001")) - # cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - # event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - # self.assertEqual(cl_order_id, event.order_id) - - # price = self.connector.get_price(self.trading_pair, False) * Decimal("0.8") - # price = self.connector.quantize_order_price(self.trading_pair, price) - # amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.000001")) - # cl_order_id = self._place_order(False, amount, OrderType.LIMIT_MAKER, price, 2) - # event = self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - # self.assertEqual(cl_order_id, event.order_id) - - def test_cancel_all(self): - bid_price = self.connector.get_price(self.trading_pair, True) - ask_price = self.connector.get_price(self.trading_pair, False) - bid_price = self.connector.quantize_order_price(self.trading_pair, bid_price * Decimal("0.9")) - ask_price = self.connector.quantize_order_price(self.trading_pair, ask_price * Decimal("1.1")) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.0002")) - - buy_id = self._place_order(True, amount, OrderType.LIMIT, bid_price, 1) - sell_id = self._place_order(False, amount, OrderType.LIMIT, ask_price, 2) - - self.ev_loop.run_until_complete(asyncio.sleep(1)) - asyncio.ensure_future(self.connector.cancel_all(5)) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - cancel_events = [t for t in self.event_logger.event_log if isinstance(t, OrderCancelledEvent)] - self.assertEqual({buy_id, sell_id}, {o.order_id for o in cancel_events}) - - def test_order_quantized_values(self): - bid_price: Decimal = self.connector.get_price(self.trading_pair, True) - ask_price: Decimal = self.connector.get_price(self.trading_pair, False) - mid_price: Decimal = (bid_price + ask_price) / 2 - - # Make sure there's enough balance to make the limit orders. - self.assertGreater(self.connector.get_balance("BTC"), Decimal("0.0005")) - self.assertGreater(self.connector.get_balance("USDT"), Decimal("10")) - - # Intentionally set some prices with too many decimal places s.t. they - # need to be quantized. Also, place them far away from the mid-price s.t. they won't - # get filled during the test. - bid_price = self.connector.quantize_order_price(self.trading_pair, mid_price * Decimal("0.9333192292111341")) - ask_price = self.connector.quantize_order_price(self.trading_pair, mid_price * Decimal("1.1492431474884933")) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.000223456")) - - # Test bid order - cl_order_id_1 = self._place_order(True, amount, OrderType.LIMIT, bid_price, 1) - # Wait for the order created event and examine the order made - self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - - # Test ask order - cl_order_id_2 = self._place_order(False, amount, OrderType.LIMIT, ask_price, 1) - # Wait for the order created event and examine and order made - self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCreatedEvent)) - - self._cancel_order(cl_order_id_1) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - self._cancel_order(cl_order_id_2) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - - def test_orders_saving_and_restoration(self): - config_path = "test_config" - strategy_name = "test_strategy" - sql = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id = None - recorder = MarketsRecorder(sql, [self.connector], config_path, strategy_name) - recorder.start() - - try: - self.connector._in_flight_orders.clear() - self.assertEqual(0, len(self.connector.tracking_states)) - - # Try to put limit buy order for 0.02 ETH worth of ZRX, and watch for order creation event. - current_bid_price: Decimal = self.connector.get_price(self.trading_pair, True) - price: Decimal = current_bid_price * Decimal("0.8") - price = self.connector.quantize_order_price(self.trading_pair, price) - - amount: Decimal = Decimal("0.0002") - amount = self.connector.quantize_order_amount(self.trading_pair, amount) - - cl_order_id = self._place_order(True, amount, OrderType.LIMIT_MAKER, price, 1) - order_created_event = self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCreatedEvent)) - self.assertEqual(cl_order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.connector.tracking_states)) - self.assertEqual(cl_order_id, list(self.connector.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.connector) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(cl_order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.connector) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.connector.stop(self._clock) - self.ev_loop.run_until_complete(asyncio.sleep(5)) - self.clock.remove_iterator(self.connector) - for event_tag in self.events: - self.connector.remove_listener(event_tag, self.event_logger) - # Clear the event loop - self.event_logger.clear() - new_connector = HitbtcExchange(API_KEY, API_SECRET, [self.trading_pair], True) - for event_tag in self.events: - new_connector.add_listener(event_tag, self.event_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [new_connector], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, new_connector) - self.clock.add_iterator(new_connector) - self.ev_loop.run_until_complete(self.wait_til_ready(new_connector)) - self.assertEqual(0, len(new_connector.limit_orders)) - self.assertEqual(0, len(new_connector.tracking_states)) - new_connector.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(new_connector.limit_orders)) - self.assertEqual(1, len(new_connector.tracking_states)) - - # Cancel the order and verify that the change is saved. - self._cancel_order(cl_order_id, new_connector) - self.ev_loop.run_until_complete(self.event_logger.wait_for(OrderCancelledEvent)) - recorder.save_market_states(config_path, new_connector) - order_id = None - self.assertEqual(0, len(new_connector.limit_orders)) - self.assertEqual(0, len(new_connector.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, new_connector) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.connector.cancel(self.trading_pair, cl_order_id) - self.run_parallel(self.event_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - - def test_update_last_prices(self): - # This is basic test to see if order_book last_trade_price is initiated and updated. - for order_book in self.connector.order_books.values(): - for _ in range(5): - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.assertFalse(math.isnan(order_book.last_trade_price)) - - def test_filled_orders_recorded(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - sql = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id = None - recorder = MarketsRecorder(sql, [self.connector], config_path, strategy_name) - recorder.start() - - try: - # Try to buy some token from the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("1.05") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.0002")) - - order_id = self._place_order(True, amount, OrderType.LIMIT, price, 1) - self.ev_loop.run_until_complete(self.event_logger.wait_for(BuyOrderCompletedEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - - # Reset the logs - self.event_logger.clear() - - # Try to sell back the same amount to the exchange, and watch for completion event. - price = self.connector.get_price(self.trading_pair, True) * Decimal("0.95") - price = self.connector.quantize_order_price(self.trading_pair, price) - amount = self.connector.quantize_order_amount(self.trading_pair, Decimal("0.0002")) - order_id = self._place_order(False, amount, OrderType.LIMIT, price, 2) - self.ev_loop.run_until_complete(self.event_logger.wait_for(SellOrderCompletedEvent)) - self.ev_loop.run_until_complete(asyncio.sleep(1)) - - # Query the persisted trade logs - trade_fills: List[TradeFill] = recorder.get_trades_for_config(config_path) - self.assertGreaterEqual(len(trade_fills), 2) - buy_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "BUY"] - sell_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "SELL"] - self.assertGreaterEqual(len(buy_fills), 1) - self.assertGreaterEqual(len(sell_fills), 1) - - order_id = None - - finally: - if order_id is not None: - self.connector.cancel(self.trading_pair, order_id) - self.run_parallel(self.event_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) diff --git a/test/connector/exchange/hitbtc/test_hitbtc_order_book_tracker.py b/test/connector/exchange/hitbtc/test_hitbtc_order_book_tracker.py deleted file mode 100755 index 5ae1f74e58..0000000000 --- a/test/connector/exchange/hitbtc/test_hitbtc_order_book_tracker.py +++ /dev/null @@ -1,101 +0,0 @@ -import math -import time -import asyncio -import logging -import unittest - -from typing import Dict, Optional, List -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import OrderBookEvent, OrderBookTradeEvent -from hummingbot.core.data_type.common import TradeType -from hummingbot.connector.exchange.hitbtc.hitbtc_order_book_tracker import HitbtcOrderBookTracker -from hummingbot.connector.exchange.hitbtc.hitbtc_api_order_book_data_source import HitbtcAPIOrderBookDataSource -from hummingbot.core.data_type.order_book import OrderBook -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - - -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class HitbtcOrderBookTrackerUnitTest(unittest.TestCase): - order_book_tracker: Optional[HitbtcOrderBookTracker] = None - events: List[OrderBookEvent] = [ - OrderBookEvent.TradeEvent - ] - trading_pairs: List[str] = [ - "BTC-USDT", - "ETH-USDT", - ] - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.order_book_tracker: HitbtcOrderBookTracker = HitbtcOrderBookTracker(cls.trading_pairs) - cls.order_book_tracker.start() - cls.ev_loop.run_until_complete(cls.wait_til_tracker_ready()) - - @classmethod - async def wait_til_tracker_ready(cls): - while True: - if len(cls.order_book_tracker.order_books) > 0: - print("Initialized real-time order books.") - return - await asyncio.sleep(1) - - async def run_parallel_async(self, *tasks, timeout=None): - future: asyncio.Future = asyncio.ensure_future(asyncio.gather(*tasks)) - timer = 0 - while not future.done(): - if timeout and timer > timeout: - raise Exception("Timeout running parallel async tasks in tests") - timer += 1 - now = time.time() - _next_iteration = now // 1.0 + 1 # noqa: F841 - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def setUp(self): - self.event_logger = EventLogger() - for event_tag in self.events: - for trading_pair, order_book in self.order_book_tracker.order_books.items(): - order_book.add_listener(event_tag, self.event_logger) - - def test_order_book_trade_event_emission(self): - """ - Tests if the order book tracker is able to retrieve order book trade message from exchange and emit order book - trade events after correctly parsing the trade messages - """ - self.run_parallel(self.event_logger.wait_for(OrderBookTradeEvent)) - for ob_trade_event in self.event_logger.event_log: - self.assertTrue(type(ob_trade_event) == OrderBookTradeEvent) - self.assertTrue(ob_trade_event.trading_pair in self.trading_pairs) - self.assertTrue(type(ob_trade_event.timestamp) in [float, int]) - self.assertTrue(type(ob_trade_event.amount) == float) - self.assertTrue(type(ob_trade_event.price) == float) - self.assertTrue(type(ob_trade_event.type) == TradeType) - # datetime is in seconds - self.assertTrue(math.ceil(math.log10(ob_trade_event.timestamp)) == 10) - self.assertTrue(ob_trade_event.amount > 0) - self.assertTrue(ob_trade_event.price > 0) - - def test_tracker_integrity(self): - # Wait 5 seconds to process some diffs. - self.ev_loop.run_until_complete(asyncio.sleep(5.0)) - order_books: Dict[str, OrderBook] = self.order_book_tracker.order_books - eth_usd: OrderBook = order_books["ETH-USDT"] - self.assertIsNot(eth_usd.last_diff_uid, 0) - self.assertGreaterEqual(eth_usd.get_price_for_volume(True, 10).result_price, - eth_usd.get_price(True)) - self.assertLessEqual(eth_usd.get_price_for_volume(False, 10).result_price, - eth_usd.get_price(False)) - - def test_api_get_last_traded_prices(self): - prices = self.ev_loop.run_until_complete( - HitbtcAPIOrderBookDataSource.get_last_traded_prices(["BTC-USDT", "LTC-BTC"])) - for key, value in prices.items(): - print(f"{key} last_trade_price: {value}") - self.assertGreater(prices["BTC-USDT"], 1000) - self.assertLess(prices["LTC-BTC"], 1) diff --git a/test/connector/exchange/hitbtc/test_hitbtc_user_stream_tracker.py b/test/connector/exchange/hitbtc/test_hitbtc_user_stream_tracker.py deleted file mode 100644 index c53dcff7bc..0000000000 --- a/test/connector/exchange/hitbtc/test_hitbtc_user_stream_tracker.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -import sys -import asyncio -import logging -import unittest -import conf - -from os.path import join, realpath -from hummingbot.connector.exchange.hitbtc.hitbtc_user_stream_tracker import HitbtcUserStreamTracker -from hummingbot.connector.exchange.hitbtc.hitbtc_auth import HitbtcAuth -from hummingbot.core.utils.async_utils import safe_ensure_future -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL - - -sys.path.insert(0, realpath(join(__file__, "../../../../../"))) -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class HitbtcUserStreamTrackerUnitTest(unittest.TestCase): - api_key = conf.hitbtc_api_key - api_secret = conf.hitbtc_secret_key - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.trading_pairs = ["BTC-USDT"] - cls.user_stream_tracker: HitbtcUserStreamTracker = HitbtcUserStreamTracker( - hitbtc_auth=HitbtcAuth(cls.api_key, cls.api_secret), - trading_pairs=cls.trading_pairs) - cls.user_stream_tracker_task: asyncio.Task = safe_ensure_future(cls.user_stream_tracker.start()) - - def test_user_stream(self): - # Wait process some msgs. - print("Sleeping for 30s to gather some user stream messages.") - self.ev_loop.run_until_complete(asyncio.sleep(30.0)) - print(self.user_stream_tracker.user_stream) diff --git a/test/connector/exchange/kraken/test_kraken_api_order_book_data_source.py b/test/connector/exchange/kraken/test_kraken_api_order_book_data_source.py deleted file mode 100644 index ff6b56c996..0000000000 --- a/test/connector/exchange/kraken/test_kraken_api_order_book_data_source.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../../"))) - -from hummingbot.connector.exchange.kraken.kraken_api_order_book_data_source import KrakenAPIOrderBookDataSource -from hummingbot.core.data_type.order_book_tracker_entry import OrderBookTrackerEntry -import asyncio -import aiohttp -import logging -from typing import ( - Dict, - Optional, - Any, - List, -) -import unittest - - -class KrakenAPIOrderBookDataSourceUnitTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.order_book_data_source: KrakenAPIOrderBookDataSource = KrakenAPIOrderBookDataSource(["ETHUSDC", "XBTUSDC", "ETHDAI"]) - - def run_async(self, task): - return self.ev_loop.run_until_complete(task) - - def test_get_trading_pairs(self): - trading_pairs: List[str] = self.run_async(self.order_book_data_source.get_trading_pairs()) - self.assertIn("ETHDAI", trading_pairs) - - async def get_snapshot(self): - async with aiohttp.ClientSession() as client: - trading_pairs: List[str] = await self.order_book_data_source.get_trading_pairs() - trading_pair: str = trading_pairs[0] - try: - snapshot: Dict[str, Any] = await self.order_book_data_source.get_snapshot(client, trading_pair, 1000) - return snapshot - except Exception: - return None - - def test_get_snapshot(self): - snapshot: Optional[Dict[str, Any]] = self.run_async(self.get_snapshot()) - self.assertIsNotNone(snapshot) - self.assertIn(snapshot["trading_pair"], self.run_async(self.order_book_data_source.get_trading_pairs())) - - def test_get_tracking_pairs(self): - tracking_pairs: Dict[str, OrderBookTrackerEntry] = self.run_async(self.order_book_data_source.get_tracking_pairs()) - self.assertIsInstance(tracking_pairs["ETHDAI"], OrderBookTrackerEntry) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/kraken/test_kraken_api_user_stream_data_source.py b/test/connector/exchange/kraken/test_kraken_api_user_stream_data_source.py deleted file mode 100644 index 1ada757568..0000000000 --- a/test/connector/exchange/kraken/test_kraken_api_user_stream_data_source.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../../"))) - -from hummingbot.connector.exchange.kraken.kraken_api_user_stream_data_source import KrakenAPIUserStreamDataSource -from hummingbot.connector.exchange.kraken.kraken_auth import KrakenAuth -import asyncio -import logging -import unittest -import conf - - -class KrakenAPIOrderBookDataSourceUnitTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.kraken_auth = KrakenAuth(conf.kraken_api_key.strip(), conf.kraken_secret_key.strip()) - cls.user_stream_data_source: KrakenAPIUserStreamDataSource = KrakenAPIUserStreamDataSource(kraken_auth=cls.kraken_auth) - - def run_async(self, task): - return self.ev_loop.run_until_complete(task) - - def test_get_auth_token(self): - self.token: str = self.run_async(self.user_stream_data_source.get_auth_token()) - self.assertIsInstance(self.token, str) - self.run_async(self.user_stream_data_source.stop()) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/kraken/test_kraken_market.py b/test/connector/exchange/kraken/test_kraken_market.py deleted file mode 100644 index 74bcc5305a..0000000000 --- a/test/connector/exchange/kraken/test_kraken_market.py +++ /dev/null @@ -1,429 +0,0 @@ -import asyncio -import contextlib -import logging -import time -import unittest -from decimal import Decimal -from os import unlink -from os.path import join, realpath -from typing import List, Optional - -import conf -from hummingbot.client.config.fee_overrides_config_map import fee_overrides_config_map -from hummingbot.connector.exchange.kraken.kraken_exchange import KrakenExchange -from hummingbot.connector.exchange.kraken.kraken_utils import convert_to_exchange_trading_pair -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.core.clock import ( - Clock, - ClockMode, -) -from hummingbot.core.data_type.common import OrderType, TradeType -from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.sql_connection_manager import ( - SQLConnectionManager, - SQLConnectionType -) -from hummingbot.model.trade_fill import TradeFill - -PAIR = "ETH-USDC" -BASE = "ETH" -QUOTE = "USDC" - - -class KrakenExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.ReceivedAsset, - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.OrderCancelled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled - ] - - market: KrakenExchange - market_logger: EventLogger - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.market: KrakenExchange = KrakenExchange( - conf.kraken_api_key, - conf.kraken_secret_key, - trading_pairs=[PAIR] - ) - - cls.count = 0 - - print("Initializing Kraken market... this will take about a minute. ") - cls.clock.add_iterator(cls.market) - cls.stack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - print("Ready.") - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - - @classmethod - async def wait_til_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready: - break - else: - await cls._clock.run_til(next_iteration) - cls.count += 1 - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../kraken_test.sqlite")) - try: - unlink(self.db_path) - except FileNotFoundError: - pass - - self.market_logger = EventLogger() - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - - def tearDown(self): - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = asyncio.ensure_future(asyncio.gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self.clock.run_til(next_iteration) - return future.result() - - def run_parallel(self, *tasks): - return self.run_async(self.run_parallel_async(*tasks)) - - def run_async(self, task): - return self.ev_loop.run_until_complete(task) - - def sleep(self, t=1.0): - self.run_parallel(asyncio.sleep(t)) - - def test_get_fee(self): - limit_fee: AddedToCostTradeFee = self.market.get_fee(BASE, QUOTE, OrderType.LIMIT_MAKER, TradeType.BUY, 1, 1) - self.assertGreater(limit_fee.percent, 0) - self.assertEqual(len(limit_fee.flat_fees), 0) - market_fee: AddedToCostTradeFee = self.market.get_fee(BASE, QUOTE, OrderType.LIMIT, TradeType.BUY, 1) - self.assertGreater(market_fee.percent, 0) - self.assertEqual(len(market_fee.flat_fees), 0) - - def test_fee_overrides_config(self): - fee_overrides_config_map["kraken_taker_fee"].value = None - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.0026"), taker_fee.percent) - fee_overrides_config_map["kraken_taker_fee"].value = Decimal('0.2') - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.002"), taker_fee.percent) - fee_overrides_config_map["kraken_maker_fee"].value = None - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.0016"), maker_fee.percent) - fee_overrides_config_map["kraken_maker_fee"].value = Decimal('0.5') - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.005"), maker_fee.percent) - - def place_order(self, is_buy, trading_pair, amount, order_type, price): - order_id = None - if is_buy: - order_id = self.market.buy(trading_pair, amount, order_type, price) - else: - order_id = self.market.sell(trading_pair, amount, order_type, price) - return order_id - - def cancel_order(self, trading_pair, order_id): - self.market.cancel(trading_pair, order_id) - - def test_limit_taker_buy(self): - self.assertGreater(self.market.get_balance(QUOTE), 6) - trading_pair = PAIR - - self.sleep(3) - price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id = self.place_order( - True, - trading_pair, - quantized_amount, - OrderType.LIMIT, - price - ) - [order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - order_completed_event: BuyOrderCompletedEvent = order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent) and t.amount is not None] - base_amount_traded: Decimal = sum(t.amount for t in trade_events) - quote_amount_traded: Decimal = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, order_completed_event.base_asset_amount) - self.assertEqual(BASE, order_completed_event.base_asset) - self.assertEqual(QUOTE, order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - def test_limit_sell(self): - self.assertGreater(self.market.get_balance(BASE), 0.02) - trading_pair = PAIR - - self.sleep(3) - price: Decimal = self.market.get_price(trading_pair, False) - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id = self.place_order( - False, - trading_pair, - quantized_amount, - OrderType.LIMIT, - price - ) - [order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - order_completed_event: SellOrderCompletedEvent = order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent) and t.amount is not None] - base_amount_traded: Decimal = sum(t.amount for t in trade_events) - quote_amount_traded: Decimal = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, order_completed_event.base_asset_amount) - self.assertEqual(BASE, order_completed_event.base_asset) - self.assertEqual(QUOTE, order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, order_completed_event.quote_asset_amount) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - def underpriced_limit_buy(self): - self.assertGreater(self.market.get_balance(QUOTE), 4) - trading_pair = PAIR - - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - bid_price: Decimal = current_bid_price * Decimal('0.005') - quantized_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id = self.place_order( - True, - trading_pair, - quantized_amount, - OrderType.LIMIT_MAKER, - quantized_bid_price - ) - - return order_id - - def underpriced_limit_buy_multiple(self, num): - order_ids = [] - for _ in range(num): - order_ids.append(self.underpriced_limit_buy()) - self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - return order_ids - - def test_cancel_order(self): - order_id = self.underpriced_limit_buy() - self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - - self.cancel_order(PAIR, order_id) - - [order_cancelled_event] = self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_cancelled_event: OrderCancelledEvent = order_cancelled_event - self.assertEqual(order_cancelled_event.order_id, order_id) - - def test_cancel_all(self): - order_ids = self.underpriced_limit_buy_multiple(2) - - cancelled_orders = self.run_async(self.market.cancel_all(10.)) - self.assertEqual([order.order_id for order in cancelled_orders], order_ids) - self.assertTrue([order.success for order in cancelled_orders]) - - def test_order_saving_and_restoration(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - self.assertEqual(0, len(self.market.tracking_states)) - - # Try to put limit buy order for 0.02 ETH at fraction of USDC market price, and watch for order creation event. - order_id = self.underpriced_limit_buy() - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.market.tracking_states)) - self.assertEqual(order_id, list(self.market.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.market) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.market) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.clock.remove_iterator(self.market) - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market: KrakenExchange = KrakenExchange( - conf.kraken_api_key, - conf.kraken_secret_key, - trading_pairs=[PAIR] - ) - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, self.market) - self.clock.add_iterator(self.market) - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - self.market.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(self.market.limit_orders)) - self.assertEqual(1, len(self.market.tracking_states)) - - # Cancel the order and verify that the change is saved. - self.market.cancel(PAIR, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_id = None - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, self.market) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.market.cancel(PAIR, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - unlink(self.db_path) - - def test_order_fill_record(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - # Try to buy 0.02 ETH from the exchange, and watch for completion event. - price: Decimal = self.market.get_price(PAIR, True) - amount: Decimal = Decimal("0.02") - quantized_amount: Decimal = self.market.quantize_order_amount(PAIR, amount) - order_id = self.place_order( - True, - PAIR, - quantized_amount, - OrderType.LIMIT, - price - ) - [buy_order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - - # Reset the logs - self.market_logger.clear() - - # Try to sell back the same amount of ETH to the exchange, and watch for completion event. - price: Decimal = self.market.get_price(PAIR, False) - amount = buy_order_completed_event.base_asset_amount - quantized_amount: Decimal = self.market.quantize_order_amount(PAIR, amount) - order_id = self.place_order( - False, - PAIR, - quantized_amount, - OrderType.LIMIT, - price - ) - [sell_order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - - # Query the persisted trade logs - trade_fills: List[TradeFill] = recorder.get_trades_for_config(config_path) - self.assertGreaterEqual(len(trade_fills), 2) - buy_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "BUY"] - sell_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "SELL"] - self.assertGreaterEqual(len(buy_fills), 1) - self.assertGreaterEqual(len(sell_fills), 1) - - order_id = None - - finally: - if order_id is not None: - self.market.cancel(PAIR, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - unlink(self.db_path) - - def test_pair_convesion(self): - for pair in self.market.trading_rules: - exchange_pair = convert_to_exchange_trading_pair(pair) - self.assertTrue(exchange_pair in self.market.order_books) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/kraken/test_kraken_order_book_tracker.py b/test/connector/exchange/kraken/test_kraken_order_book_tracker.py deleted file mode 100644 index 9bd53c3fc0..0000000000 --- a/test/connector/exchange/kraken/test_kraken_order_book_tracker.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../../"))) - -from hummingbot.connector.exchange.kraken.kraken_order_book_tracker import KrakenOrderBookTracker -from hummingbot.connector.exchange.kraken.kraken_api_order_book_data_source import KrakenAPIOrderBookDataSource -import asyncio -import logging -import unittest - - -class KrakenOrderBookTrackerUnitTest(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.order_book_tracker: KrakenOrderBookTracker = KrakenOrderBookTracker(trading_pairs=["ETHUSDC", "XBTUSDC"]) - cls.order_book_tracker.start() - cls.ev_loop.run_until_complete(cls.wait_til_tracker_ready()) - - @classmethod - async def wait_til_tracker_ready(cls): - while True: - if len(cls.order_book_tracker.order_books) > 0: - print("Initialized real-time order books.") - return - await asyncio.sleep(1) - - def run_async(self, task): - return self.ev_loop.run_until_complete(task) - - def test_data_source(self): - self.assertIsInstance(self.order_book_tracker.data_source, KrakenAPIOrderBookDataSource) - - def test_name(self): - self.assertEqual(self.order_book_tracker.exchange_name, "kraken") - - def test_start_stop(self): - self.assertTrue(asyncio.isfuture(self.order_book_tracker._order_book_snapshot_router_task)) - self.order_book_tracker.stop() - self.assertIsNone(self.order_book_tracker._order_book_snapshot_router_task) - self.order_book_tracker.start() - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/kraken/test_kraken_user_stream_tracker.py b/test/connector/exchange/kraken/test_kraken_user_stream_tracker.py deleted file mode 100644 index 646235cea9..0000000000 --- a/test/connector/exchange/kraken/test_kraken_user_stream_tracker.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../../"))) - -from hummingbot.connector.exchange.kraken.kraken_user_stream_tracker import KrakenUserStreamTracker -from hummingbot.connector.exchange.kraken.kraken_auth import KrakenAuth -from hummingbot.core.utils.async_utils import safe_ensure_future -import asyncio -import logging -import unittest -import conf - - -class KrakenUserStreamTrackerUnitTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.kraken_auth = KrakenAuth(conf.kraken_api_key, conf.kraken_secret_key) - cls.user_stream_tracker: KrakenUserStreamTracker = KrakenUserStreamTracker(kraken_auth=cls.kraken_auth) - cls.user_stream_tracker_task: asyncio.Task = safe_ensure_future(cls.user_stream_tracker.start()) - - def run_async(self, task): - return self.ev_loop.run_until_complete(task) - - def test_user_stream(self): - self.ev_loop.run_until_complete(asyncio.sleep(20.0)) - print(self.user_stream_tracker.user_stream) - - -def main(): - logging.basicConfig(level=logging.INFO) - unittest.main() - - -if __name__ == "__main__": - main() diff --git a/test/connector/exchange/kucoin/fixture_kucoin.py b/test/connector/exchange/kucoin/fixture_kucoin.py deleted file mode 100644 index 10890daf2c..0000000000 --- a/test/connector/exchange/kucoin/fixture_kucoin.py +++ /dev/null @@ -1,106 +0,0 @@ -class FixtureKucoin: - BALANCES = {"code": "200000", "data": [ - {"balance": "0.1910973", "available": "0.1910973", "holds": "0", "currency": "ETH", - "id": "5e3291017e612d0009cb8fa6", "type": "trade"}, - {"balance": "1", "available": "1", "holds": "0", "currency": "GRIN", "id": "5e32910f6743620009c134b0", - "type": "trade"}, - {"balance": "0", "available": "0", "holds": "0", "currency": "ETH", "id": "5e3275507cb36900083d9f8e", - "type": "main"}]} - - ORDER_PLACE = {"code": "200000", "data": {"orderId": "5e3cd0540fb53d000961491a"}} - - FILLED_SELL_LIMIT_ORDER = { - "code": "200000", - "data": { - "symbol": "ETH-USDT", "hidden": False, "opType": "DEAL", "fee": "0.0021957", - "channel": "API", "feeCurrency": "USDT", "type": "limit", "isActive": False, - "createdAt": 1581043796000, "visibleSize": "0", "price": "208.61", - "iceberg": False, "stopTriggered": False, "funds": "0", - "id": "5e3cd0540fb53d000961491a", "timeInForce": "GTC", "tradeType": "TRADE", - "side": "sell", "dealSize": "0.01", "cancelAfter": 0, "dealFunds": "2.1957", - "stp": "", "postOnly": False, "stopPrice": "0", "size": "0.01", "stop": "", - "cancelExist": False, "clientOid": "sell-ETH-USDT-1581043796007943"}} - - FILLED_BUY_LIMIT_ORDER = { - "code": "200000", - "data": { - "symbol": "ETH-USDT", "hidden": False, "opType": "DEAL", "fee": "0.001969718114", - "channel": "API", "feeCurrency": "USDT", "type": "limit", "isActive": False, - "createdAt": 1581045461000, "visibleSize": "0", "price": "229.8", "iceberg": False, - "stopTriggered": False, "funds": "0", "id": "5e3cd6d56e350a00094d32b8", - "timeInForce": "GTC", "tradeType": "TRADE", "side": "buy", "dealSize": "0.01", - "cancelAfter": 0, "dealFunds": "1.969718114", "stp": "", "postOnly": False, - "stopPrice": "0", "size": "0.01", "stop": "", "cancelExist": False, - "clientOid": "buy-ETH-USDT-1581045461006371"}} - - SELL_MARKET_ORDER = { - "code": "200000", - "data": { - "symbol": "ETH-USDT", "hidden": False, "opType": "DEAL", "fee": "0.002401058172", - "channel": "API", "feeCurrency": "USDT", "type": "market", "isActive": False, - "createdAt": 1581055817000, "visibleSize": "0", "price": "0", "iceberg": False, - "stopTriggered": False, "funds": "0", "id": "5e3cff496e350a0009aa51d6", - "timeInForce": "GTC", "tradeType": "TRADE", "side": "sell", - "dealSize": "0.0109999", "cancelAfter": 0, "dealFunds": "2.401058172", "stp": "", - "postOnly": False, "stopPrice": "0", "size": "0.0109999", "stop": "", - "cancelExist": False, "clientOid": "sell-ETH-USDT-1581055817012353"}} - - BUY_MARKET_ORDER = { - "code": "200000", - "data": { - "symbol": "ETH-USDT", "hidden": False, "opType": "DEAL", "fee": "0.0021843", - "channel": "API", "feeCurrency": "USDT", "type": "market", "isActive": False, - "createdAt": 1581056207000, "visibleSize": "0", "price": "0", "iceberg": False, - "stopTriggered": False, "funds": "0", "id": "5e3d00cf1fbc8d0008d81a18", - "timeInForce": "GTC", "tradeType": "TRADE", "side": "buy", "dealSize": "0.01", - "cancelAfter": 0, "dealFunds": "2.1843", "stp": "", "postOnly": False, - "stopPrice": "0", "size": "0.01", "stop": "", "cancelExist": False, - "clientOid": "buy-ETH-USDT-1581056207008008"}} - - CANCEL_ORDER = {"code": "200000", "data": {"cancelledOrderIds": ["5e3d03c86e350a0009b380a7"]}} - - OPEN_SELL_LIMIT_ORDER = { - "code": "200000", - "data": { - "symbol": "ETH-USDT", "hidden": False, "opType": "DEAL", "fee": "0", - "channel": "API", - "feeCurrency": "USDT", "type": "limit", "isActive": True, - "createdAt": 1581056968000, - "visibleSize": "0", "price": "240.11", "iceberg": False, - "stopTriggered": False, - "funds": "0", "id": "5e3d03c86e350a0009b380a7", "timeInForce": "GTC", - "tradeType": "TRADE", "side": "sell", "dealSize": "0", "cancelAfter": 0, - "dealFunds": "0", "stp": "", "postOnly": False, "stopPrice": "0", - "size": "0.01", - "stop": "", "cancelExist": False, - "clientOid": "sell-ETH-USDT-1581056966892386"}} - - GET_CANCELED_ORDER = { - "code": "200000", - "data": { - "symbol": "ETH-USDT", "hidden": False, "opType": "DEAL", "fee": "0", - "channel": "API", "feeCurrency": "USDT", "type": "limit", "isActive": False, - "createdAt": 1581056968000, "visibleSize": "0", "price": "240.11", "iceberg": False, - "stopTriggered": False, "funds": "0", "id": "5e3d03c86e350a0009b380a7", - "timeInForce": "GTC", "tradeType": "TRADE", "side": "sell", "dealSize": "0", - "cancelAfter": 0, "dealFunds": "0", "stp": "", "postOnly": False, "stopPrice": "0", - "size": "0.01", "stop": "", "cancelExist": True, - "clientOid": "sell-ETH-USDT-1581056966892386"}} - - ORDER_PLACE_2 = {"code": "200000", "data": {"orderId": "5e3d08516e350a0009bcd272"}} - - OPEN_BUY_LIMIT_ORDER = { - "code": "200000", - "data": { - "symbol": "ETH-USDT", "hidden": False, "opType": "DEAL", "fee": "0", - "channel": "API", "feeCurrency": "USDT", "type": "limit", "isActive": True, - "createdAt": 1581058129000, "visibleSize": "0", "price": "174.61", - "iceberg": False, "stopTriggered": False, "funds": "0", - "id": "5e3d08516e350a0009bcd272", "timeInForce": "GTC", "tradeType": "TRADE", - "side": "buy", "dealSize": "0", "cancelAfter": 0, "dealFunds": "0", "stp": "", - "postOnly": False, "stopPrice": "0", "size": "0.01", "stop": "", - "cancelExist": False, "clientOid": "buy-ETH-USDT-1581058129011078"}} - - ORDERS_BATCH_CANCELED = { - "code": "200000", - "data": {"cancelledOrderIds": ["5e3d0851051a350008723a81", "5e3d08516e350a0009bcd272"]}} diff --git a/test/connector/exchange/kucoin/test_kucoin_market.py b/test/connector/exchange/kucoin/test_kucoin_market.py deleted file mode 100644 index 43983ccdd8..0000000000 --- a/test/connector/exchange/kucoin/test_kucoin_market.py +++ /dev/null @@ -1,547 +0,0 @@ -import asyncio -import contextlib -import logging -import math -import os -import time -import unittest -from decimal import Decimal -from os.path import join, realpath -from typing import ( - List, - Optional -) -from unittest import mock - -import conf -from hummingbot.client.config.fee_overrides_config_map import fee_overrides_config_map -from hummingbot.connector.exchange.kucoin.kucoin_exchange import KucoinExchange -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.core.clock import ( - Clock, - ClockMode -) -from hummingbot.core.data_type.common import OrderType, TradeType -from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - MarketOrderFailureEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.mock_api.mock_web_server import MockWebServer -from hummingbot.core.utils.async_utils import ( - safe_ensure_future, - safe_gather, -) -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.sql_connection_manager import ( - SQLConnectionManager, - SQLConnectionType -) -from hummingbot.model.trade_fill import TradeFill -from test.connector.exchange.kucoin.fixture_kucoin import FixtureKucoin - -logging.basicConfig(level=METRICS_LOG_LEVEL) -API_MOCK_ENABLED = conf.mock_api_enabled is not None and conf.mock_api_enabled.lower() in ['true', 'yes', '1'] -API_KEY = "XXX" if API_MOCK_ENABLED else conf.kucoin_api_key -API_SECRET = "YYY" if API_MOCK_ENABLED else conf.kucoin_secret_key -API_PASSPHRASE = "ZZZ" if API_MOCK_ENABLED else conf.kucoin_passphrase -API_BASE_URL = "api.kucoin.com" -EXCHANGE_ORDER_ID = 20001 - - -class KucoinExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.OrderCancelled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - MarketEvent.OrderFailure - ] - - market: KucoinExchange - market_logger: EventLogger - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - if API_MOCK_ENABLED: - cls.web_app = MockWebServer.get_instance() - cls.web_app.add_host_to_mock(API_BASE_URL, ["/api/v1/timestamp", "/api/v1/symbols", - "/api/v1/bullet-public", - "/api/v2/market/orderbook/level2"]) - cls.web_app.start() - cls.ev_loop.run_until_complete(cls.web_app.wait_til_started()) - cls._patcher = mock.patch("aiohttp.client.URL") - cls._url_mock = cls._patcher.start() - cls._url_mock.side_effect = cls.web_app.reroute_local - cls.web_app.update_response("get", API_BASE_URL, "/api/v1/accounts", FixtureKucoin.BALANCES) - - cls._t_nonce_patcher = unittest.mock.patch( - "hummingbot.connector.exchange.kucoin.kucoin_exchange.get_tracking_nonce") - cls._t_nonce_mock = cls._t_nonce_patcher.start() - cls._exch_order_id = 20001 - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.market: KucoinExchange = KucoinExchange( - kucoin_api_key=API_KEY, - kucoin_passphrase=API_PASSPHRASE, - kucoin_secret_key=API_SECRET, - trading_pairs=["ETH-USDT"] - ) - # Need 2nd instance of market to prevent events mixing up across tests - cls.market_2: KucoinExchange = KucoinExchange( - kucoin_api_key=API_KEY, - kucoin_passphrase=API_PASSPHRASE, - kucoin_secret_key=API_SECRET, - trading_pairs=["ETH-USDT"] - ) - cls.clock.add_iterator(cls.market) - cls.clock.add_iterator(cls.market_2) - cls.stack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - if API_MOCK_ENABLED: - cls.web_app.stop() - cls._patcher.stop() - cls._t_nonce_patcher.stop() - - @classmethod - async def wait_til_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready and cls.market_2.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../kucoin_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.market_logger = EventLogger() - self.market_2_logger = EventLogger() - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - self.market_2.add_listener(event_tag, self.market_2_logger) - - def tearDown(self): - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market_2.remove_listener(event_tag, self.market_2_logger) - self.market_logger = None - self.market_2_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self._clock.run_til(next_iteration) - await asyncio.sleep(0.5) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def test_get_fee(self): - limit_fee: AddedToCostTradeFee = self.market.get_fee("ETH", "USDT", OrderType.LIMIT_MAKER, TradeType.BUY, 1, 10) - self.assertGreater(limit_fee.percent, 0) - self.assertEqual(len(limit_fee.flat_fees), 0) - market_fee: AddedToCostTradeFee = self.market.get_fee("ETH", "USDT", OrderType.LIMIT, TradeType.BUY, 1) - self.assertGreater(market_fee.percent, 0) - self.assertEqual(len(market_fee.flat_fees), 0) - sell_trade_fee: AddedToCostTradeFee = self.market.get_fee( - "ETH", "USDT", OrderType.LIMIT_MAKER, TradeType.SELL, 1, 10 - ) - self.assertGreater(sell_trade_fee.percent, 0) - self.assertEqual(len(sell_trade_fee.flat_fees), 0) - - def order_response(self, fixture_data, nonce): - self._t_nonce_mock.return_value = nonce - order_resp = fixture_data.copy() - return order_resp - - def place_order(self, is_buy, trading_pair, amount, order_type, price, nonce, post_resp, get_resp): - global EXCHANGE_ORDER_ID - order_id, exch_order_id = None, None - if API_MOCK_ENABLED: - exch_order_id = f"KUCOIN_{EXCHANGE_ORDER_ID}" - EXCHANGE_ORDER_ID += 1 - resp = self.order_response(post_resp, nonce) - resp["data"]["orderId"] = exch_order_id - self.web_app.update_response("post", API_BASE_URL, "/api/v1/orders", resp) - if is_buy: - order_id = self.market.buy(trading_pair, amount, order_type, price) - else: - order_id = self.market.sell(trading_pair, amount, order_type, price) - if API_MOCK_ENABLED: - resp = get_resp.copy() - resp["data"]["id"] = exch_order_id - resp["data"]["clientOid"] = order_id - self.web_app.update_response("get", API_BASE_URL, f"/api/v1/orders/{exch_order_id}", resp) - return order_id, exch_order_id - - def test_fee_overrides_config(self): - fee_overrides_config_map["kucoin_taker_fee"].value = None - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.001"), taker_fee.percent) - fee_overrides_config_map["kucoin_taker_fee"].value = Decimal('0.2') - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.002"), taker_fee.percent) - fee_overrides_config_map["kucoin_maker_fee"].value = None - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.001"), maker_fee.percent) - fee_overrides_config_map["kucoin_maker_fee"].value = Decimal('0.5') - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.005"), maker_fee.percent) - - def test_limit_maker_rejections(self): - if API_MOCK_ENABLED: - return - trading_pair = "ETH-USDT" - - # Try to put a buy limit maker order that is going to match, this should triggers order failure event. - price: Decimal = self.market.get_price(trading_pair, True) * Decimal('1.02') - price: Decimal = self.market.quantize_order_price(trading_pair, price) - amount = self.market.quantize_order_amount(trading_pair, Decimal(0.01)) - - order_id = self.market.buy(trading_pair, amount, OrderType.LIMIT_MAKER, price) - [order_failure_event] = self.run_parallel(self.market_logger.wait_for(MarketOrderFailureEvent)) - self.assertEqual(order_id, order_failure_event.order_id) - - self.market_logger.clear() - - # Try to put a sell limit maker order that is going to match, this should triggers order failure event. - price: Decimal = self.market.get_price(trading_pair, True) * Decimal('0.98') - price: Decimal = self.market.quantize_order_price(trading_pair, price) - amount = self.market.quantize_order_amount(trading_pair, Decimal(0.01)) - - order_id = self.market.sell(trading_pair, amount, OrderType.LIMIT_MAKER, price) - [order_failure_event] = self.run_parallel(self.market_logger.wait_for(MarketOrderFailureEvent)) - self.assertEqual(order_id, order_failure_event.order_id) - - def test_limit_makers_unfilled(self): - if API_MOCK_ENABLED: - return - trading_pair = "ETH-USDT" - bid_price = self.market.get_price(trading_pair, True) * Decimal("0.8") - quantized_bid_price = self.market.quantize_order_price(trading_pair, bid_price) - quantized_bid_amount = self.market.quantize_order_amount(trading_pair, Decimal(0.01)) - - order_id, _ = self.place_order(True, trading_pair, quantized_bid_amount, OrderType.LIMIT_MAKER, - quantized_bid_price, 10001, - FixtureKucoin.ORDER_PLACE, FixtureKucoin.ORDER_GET_BUY_UNMATCHED) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id, order_created_event.order_id) - - ask_price = self.market.get_price(trading_pair, True) * Decimal("1.2") - quatized_ask_price = self.market.quantize_order_price(trading_pair, ask_price) - quatized_ask_amount = self.market.quantize_order_amount(trading_pair, Decimal(0.01)) - - order_id, _ = self.place_order(False, trading_pair, quatized_ask_amount, OrderType.LIMIT_MAKER, - quatized_ask_price, 10002, - FixtureKucoin.ORDER_PLACE, FixtureKucoin.ORDER_GET_SELL_UNMATCHED) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id, order_created_event.order_id) - - [cancellation_results] = self.run_parallel(self.market.cancel_all(5)) - for cr in cancellation_results: - self.assertEqual(cr.success, True) - - def test_limit_taker_buy(self): - self.assertGreater(self.market.get_balance("ETH"), Decimal("0.05")) - trading_pair = "ETH-USDT" - price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal(0.01) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, _ = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT, price, 10001, - FixtureKucoin.ORDER_PLACE, FixtureKucoin.BUY_MARKET_ORDER) - [buy_order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - buy_order_completed_event: BuyOrderCompletedEvent = buy_order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent)] - base_amount_traded: float = sum(t.amount for t in trade_events) - quote_amount_traded: float = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, buy_order_completed_event.order_id) - self.assertAlmostEqual(float(quantized_amount), buy_order_completed_event.base_asset_amount, places=4) - self.assertEqual("ETH", buy_order_completed_event.base_asset) - self.assertEqual("USDT", buy_order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, float(buy_order_completed_event.base_asset_amount), places=4) - self.assertAlmostEqual(quote_amount_traded, float(buy_order_completed_event.quote_asset_amount), places=4) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - def test_limit_taker_sell(self): - self.assertGreater(self.market.get_balance("ETH"), Decimal("0.05")) - trading_pair = "ETH-USDT" - price: Decimal = self.market.get_price(trading_pair, False) - amount: Decimal = Decimal(0.011) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - order_id, _ = self.place_order(False, trading_pair, amount, OrderType.LIMIT, price, 10001, - FixtureKucoin.ORDER_PLACE, FixtureKucoin.SELL_MARKET_ORDER) - [sell_order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - sell_order_completed_event: SellOrderCompletedEvent = sell_order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, sell_order_completed_event.order_id) - self.assertAlmostEqual(float(quantized_amount), sell_order_completed_event.base_asset_amount) - self.assertEqual("ETH", sell_order_completed_event.base_asset) - self.assertEqual("USDT", sell_order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, float(sell_order_completed_event.base_asset_amount)) - self.assertAlmostEqual(quote_amount_traded, float(sell_order_completed_event.quote_asset_amount)) - self.assertGreater(sell_order_completed_event.fee_amount, Decimal(0)) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - # Reset the logs - self.market_logger.clear() - - def test_cancel(self): - trading_pair = "ETH-USDT" - - current_price: float = self.market.get_price(trading_pair, False) - amount: Decimal = Decimal(0.01) - - price: Decimal = Decimal(current_price) * Decimal(1.1) - quantized_price: Decimal = self.market.quantize_order_price(trading_pair, price) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, exch_order_id = self.place_order(False, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantized_price, 10001, - FixtureKucoin.ORDER_PLACE_2, FixtureKucoin.OPEN_SELL_LIMIT_ORDER) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - if API_MOCK_ENABLED: - resp = FixtureKucoin.CANCEL_ORDER.copy() - resp["data"]["cancelledOrderIds"] = [exch_order_id] - self.web_app.update_response("delete", API_BASE_URL, f"/api/v1/orders/{exch_order_id}", resp) - self.market.cancel(trading_pair, order_id) - if API_MOCK_ENABLED: - resp = FixtureKucoin.GET_CANCELED_ORDER.copy() - resp["data"]["id"] = exch_order_id - resp["data"]["clientOid"] = order_id - self.web_app.update_response("get", API_BASE_URL, f"/api/v1/orders/{exch_order_id}", resp) - [order_cancelled_event] = self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_cancelled_event: OrderCancelledEvent = order_cancelled_event - self.assertEqual(order_cancelled_event.order_id, order_id) - self.market_logger.clear() - - def test_cancel_all(self): - trading_pair = "ETH-USDT" - - bid_price: Decimal = Decimal(self.market_2.get_price(trading_pair, True)) - ask_price: Decimal = Decimal(self.market_2.get_price(trading_pair, False)) - amount: Decimal = Decimal(0.01) - quantized_amount: Decimal = self.market_2.quantize_order_amount(trading_pair, amount) - - # Intentionally setting high price to prevent getting filled - quantize_bid_price: Decimal = self.market_2.quantize_order_price(trading_pair, bid_price * Decimal(0.8)) - quantize_ask_price: Decimal = self.market_2.quantize_order_price(trading_pair, ask_price * Decimal(1.2)) - - _, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, quantize_bid_price, - 10001, FixtureKucoin.ORDER_PLACE, FixtureKucoin.OPEN_BUY_LIMIT_ORDER) - - _, exch_order_id2 = self.place_order(False, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, quantize_ask_price, - 10002, FixtureKucoin.ORDER_PLACE, FixtureKucoin.OPEN_SELL_LIMIT_ORDER) - - self.run_parallel(asyncio.sleep(1)) - if API_MOCK_ENABLED: - resp = FixtureKucoin.ORDERS_BATCH_CANCELED.copy() - resp["data"]["cancelledOrderIds"] = [exch_order_id, exch_order_id2] - self.web_app.update_response("delete", API_BASE_URL, "/api/v1/orders", resp) - [cancellation_results] = self.run_parallel(self.market_2.cancel_all(5)) - for cr in cancellation_results: - self.assertEqual(cr.success, True) - self.market_2_logger.clear() - - def test_orders_saving_and_restoration(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - trading_pair: str = "ETH-USDT" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - self.assertEqual(0, len(self.market.tracking_states)) - - # Try to put limit buy order for 0.04 ETH, and watch for order creation event. - current_bid_price: float = self.market.get_price(trading_pair, True) - bid_price: Decimal = Decimal(current_bid_price * Decimal(0.8)) - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - - amount: Decimal = Decimal(0.04) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, - 10001, FixtureKucoin.ORDER_PLACE, - FixtureKucoin.OPEN_BUY_LIMIT_ORDER) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.market.tracking_states)) - self.assertEqual(order_id, list(self.market.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.market) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.market) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.clock.remove_iterator(self.market) - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market: KucoinExchange = KucoinExchange( - kucoin_api_key=API_KEY, - kucoin_passphrase=API_PASSPHRASE, - kucoin_secret_key=API_SECRET, - trading_pairs=["ETH-USDT"] - ) - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, self.market) - self.clock.add_iterator(self.market) - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - self.market.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(self.market.limit_orders)) - self.assertEqual(1, len(self.market.tracking_states)) - - if API_MOCK_ENABLED: - resp = FixtureKucoin.CANCEL_ORDER.copy() - resp["data"]["cancelledOrderIds"] = exch_order_id - self.web_app.update_response("delete", API_BASE_URL, f"/api/v1/orders/{exch_order_id}", resp) - # Cancel the order and verify that the change is saved. - self.market.cancel(trading_pair, order_id) - if API_MOCK_ENABLED: - resp = FixtureKucoin.GET_CANCELED_ORDER.copy() - resp["data"]["id"] = exch_order_id - resp["data"]["clientOid"] = order_id - self.web_app.update_response("get", API_BASE_URL, f"/api/v1/orders/{exch_order_id}", resp) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_id = None - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, self.market) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.market.cancel(trading_pair, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - self.market_logger.clear() - - def test_order_fill_record(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - trading_pair: str = "ETH-USDT" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - # Try to buy 0.01 ETH from the exchange, and watch for completion event. - price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal(0.01) - order_id, _ = self.place_order(True, trading_pair, amount, OrderType.LIMIT, price, 10001, - FixtureKucoin.ORDER_PLACE, FixtureKucoin.BUY_MARKET_ORDER) - [buy_order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - - # Reset the logs - self.market_logger.clear() - - # Try to sell back the same amount of ETH to the exchange, and watch for completion event. - price: Decimal = self.market.get_price(trading_pair, False) - amount: Decimal = Decimal(buy_order_completed_event.base_asset_amount) - order_id, _ = self.place_order(False, trading_pair, amount, OrderType.LIMIT, price, 10002, - FixtureKucoin.ORDER_PLACE, FixtureKucoin.SELL_MARKET_ORDER) - [sell_order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - - # Query the persisted trade logs - trade_fills: List[TradeFill] = recorder.get_trades_for_config(config_path) - self.assertEqual(2, len(trade_fills)) - buy_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "BUY"] - sell_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "SELL"] - self.assertEqual(1, len(buy_fills)) - self.assertEqual(1, len(sell_fills)) - - order_id = None - - finally: - if order_id is not None: - self.market.cancel(trading_pair, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - self.market_logger.clear() - - def test_update_last_prices(self): - # This is basic test to see if order_book last_trade_price is initiated and updated. - for order_book in self.market.order_books.values(): - for _ in range(5): - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.assertFalse(math.isnan(order_book.last_trade_price)) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/connector/exchange/mexc/fixture_mexc.py b/test/connector/exchange/mexc/fixture_mexc.py deleted file mode 100644 index c87c7996d0..0000000000 --- a/test/connector/exchange/mexc/fixture_mexc.py +++ /dev/null @@ -1,247 +0,0 @@ -class FixtureMEXC: - PING_DATA = {"code": 200} - - MEXC_TICKERS = { - "code": "0", - "msg": "", - "data": [ - { - "symbol": "ETH_USDT", - "volume": "14155.61237", - "high": "4394.18", - "low": "4166.27", - "bid": "4205.44", - "ask": "4206.28", - "open": "4311.7", - "last": "4205.89", - "time": 1635685800000, - "change_rate": "-0.0245402" - } - ] - } - - TICKER_DATA = { - "code": 200, - "data": [ - { - "symbol": "ETH_USDT", - "volume": "0", - "high": "182.4117576", - "low": "182.4117576", - "bid": "182.0017985", - "ask": "183.1983186", - "open": "182.4117576", - "last": "182.4117576", - "time": 1574668200000, - "change_rate": "0.00027307" - } - ] - } - - MEXC_MARKET_SYMBOL = { - "code": 200, - "data": [ - { - "symbol": "ETH_USDT", - "state": "ENABLED", - "vcoinName": "ETH", - "vcoinStatus": 1, - "price_scale": 2, - "quantity_scale": 5, - "min_amount": "5", - "max_amount": "5000000", - "maker_fee_rate": "0.002", - "taker_fee_rate": "0.002", - "limited": False, - "etf_mark": 0, - "symbol_partition": "MAIN" - } - ] - } - - MEXC_ORDER_BOOK = { - "code": 200, - "data": { - "asks": [ - { - "price": "183.1683154", - "quantity": "128.5" - }, - { - "price": "183.1983186", - "quantity": "101.6" - } - ], - "bids": [ - { - "price": "182.4417544", - "quantity": "115.5" - }, - { - "price": "182.4217568", - "quantity": "135.7" - } - ] - } - } - - MEXC_BALANCE_URL = { - "code": 200, - "data": { - "BTC": { - "frozen": "0", - "available": "140" - }, - "ETH": { - "frozen": "8471.296525048", - "available": "483280.9653659222035" - }, - "USDT": { - "frozen": "0", - "available": "27.3629" - }, - "MX": { - "frozen": "30.9863", - "available": "450.0137" - } - } - } - - ORDER_PLACE = { - "code": 200, - "data": "c8663a12a2fc457fbfdd55307b463495" - } - - ORDER_GET_LIMIT_BUY_UNFILLED = { - "code": 200, - "data": [ - { - "id": "2a0ad973f6a8452bae1533164ec3ef72", - "symbol": "ETH_USDT", - "price": "3500", - "quantity": "0.06", - "state": "NEW", - "type": "BID", - "deal_quantity": "0", - "deal_amount": "0", - "create_time": 1635824885000, - "order_type": "LIMIT_ORDER" - } - ] - } - - ORDER_GET_LIMIT_BUY_FILLED = { - "code": 200, - "data": [ - { - "id": "c8663a12a2fc457fbfdd55307b463495", - "symbol": "ETH_USDT", - "price": "4001", - "quantity": "0.06", - "state": "FILLED", - "type": "BID", - "deal_quantity": "0.06", - "deal_amount": "0.06", - "create_time": 1573117266000, - "client_order_id": "aaa" - } - ] - } - - ORDERS_BATCH_CANCELED = { - "code": "0", - "msg": "", - "data": [ - { - "clOrdId": "", - "ordId": "2482659399697407", - "sCode": "0", - "sMsg": "" - }, - { - "clOrdId": "", - "ordId": "2482659399697408", - "sCode": "0", - "sMsg": "" - }, - ] - } - - ORDER_CANCEL = { - "code": 200, - "data": { - "2510832677225473": "success" - } - } - - ORDER_GET_CANCELED = { - "code": 200, - "data": [ - { - "id": "c38a9449ee2e422ca83593833a2595d7", - "symbol": "ETH_USDT", - "price": "3500", - "quantity": "0.06", - "state": "CANCELED", - "type": "BID", - "deal_quantity": "0", - "deal_amount": "0", - "create_time": 1635822195000, - "order_type": "LIMIT_ORDER" - } - ] - } - - ORDER_GET_MARKET_BUY = { - "code": 200, - "data": [ - { - "id": "c8663a12a2fc457fbfdd55307b463495", - "symbol": "ETH_USDT", - "price": "4001", - "quantity": "0.06", - "state": "FILLED", - "type": "BID", - "deal_quantity": "0.06", - "deal_amount": "0.06", - "create_time": 1573117266000, - "client_order_id": "aaa" - } - ] - } - - ORDER_GET_MARKET_SELL = { - "code": 200, - "data": [ - { - "id": "c8663a12a2fc457fbfdd55307b463495", - "symbol": "ETH_USDT", - "price": "4001", - "quantity": "0.06", - "state": "FILLED", - "type": "BID", - "deal_quantity": "0.06", - "deal_amount": "0.06", - "create_time": 1573117266000, - "client_order_id": "aaa" - } - ] - } - - ORDER_DEAL_DETAIL = { - "code": 200, - "data": [ - { - "symbol": "ETH_USDT", - "order_id": "a39ea6b7afcf4f5cbba1e515210ff827", - "quantity": "54.1", - "price": "182.6317377", - "amount": "9880.37700957", - "fee": "9.88037700957", - "trade_type": "BID", - "fee_currency": "USDT", - "is_taker": True, - "create_time": 1572693911000 - } - ] - } diff --git a/test/connector/exchange/mexc/test_mexc_market.py b/test/connector/exchange/mexc/test_mexc_market.py deleted file mode 100644 index 1d3dbd0f15..0000000000 --- a/test/connector/exchange/mexc/test_mexc_market.py +++ /dev/null @@ -1,581 +0,0 @@ -import asyncio -import contextlib -import logging -import math -import os -import time -import unittest -from decimal import Decimal -from os.path import join, realpath -from typing import ( - List, - Optional -) -from unittest import mock - -import conf - -from hummingbot.client.config.fee_overrides_config_map import fee_overrides_config_map -from hummingbot.connector.exchange.mexc.mexc_constants import ( - MEXC_BALANCE_URL, - MEXC_BASE_URL, - MEXC_BATCH_ORDER_CANCEL, - MEXC_DEAL_DETAIL, - MEXC_DEPTH_URL, - MEXC_ORDER_CANCEL, - MEXC_ORDER_DETAILS_URL, - MEXC_PING_URL, - MEXC_PLACE_ORDER, - MEXC_PRICE_URL, - MEXC_SYMBOL_URL, -) -from hummingbot.connector.exchange.mexc.mexc_exchange import MexcExchange -from hummingbot.connector.exchange_base import OrderType -from hummingbot.connector.markets_recorder import MarketsRecorder -from hummingbot.core.clock import ( - Clock, - ClockMode -) -from hummingbot.core.data_type.common import TradeType -from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - MarketOrderFailureEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.mock_api.mock_web_server import MockWebServer -from hummingbot.core.utils.async_utils import ( - safe_ensure_future, - safe_gather, -) -from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL -from hummingbot.model.market_state import MarketState -from hummingbot.model.order import Order -from hummingbot.model.sql_connection_manager import ( - SQLConnectionManager, - SQLConnectionType -) -from hummingbot.model.trade_fill import TradeFill -from test.connector.exchange.mexc.fixture_mexc import FixtureMEXC - -# MOCK_API_ENABLED = conf.mock_api_enabled is not None and conf.mock_api_enabled.lower() in ['true', 'yes', '1'] -MOCK_API_ENABLED = True - -API_KEY = "API_PASSPHRASE_MOCK" if MOCK_API_ENABLED else conf.mexc_api_key -API_SECRET = "API_SECRET_MOCK" if MOCK_API_ENABLED else conf.mexc_secret_key - -API_BASE_URL = MEXC_BASE_URL.replace("https://", "").replace("/", "") - -EXCHANGE_ORDER_ID = 20001 - -logging.basicConfig(level=METRICS_LOG_LEVEL) - - -class MexcExchangeUnitTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.ReceivedAsset, - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.OrderCancelled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled, - MarketEvent.OrderFailure - ] - - market: MexcExchange - market_logger: EventLogger - stack: contextlib.ExitStack - - @classmethod - def strip_host_from_mexc_url(cls, url): - HOST = "https://www.mexc.com" - return url.split(HOST)[-1] - - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - if MOCK_API_ENABLED: - cls.web_app = MockWebServer.get_instance() - cls.web_app.add_host_to_mock(API_BASE_URL, []) - cls.web_app.start() - cls.ev_loop.run_until_complete(cls.web_app.wait_til_started()) - cls._patcher = mock.patch("aiohttp.client.URL") - cls._url_mock = cls._patcher.start() - cls._url_mock.side_effect = cls.web_app.reroute_local - - cls.web_app.update_response("get", API_BASE_URL, cls.strip_host_from_mexc_url(MEXC_SYMBOL_URL), - FixtureMEXC.MEXC_MARKET_SYMBOL) - cls.web_app.update_response("get", API_BASE_URL, MEXC_PRICE_URL.split("?")[:1][0], FixtureMEXC.MEXC_TICKERS, - params={"symbol": "ETH-USDT"}) - cls.web_app.update_response("get", API_BASE_URL, MEXC_DEPTH_URL.split("?")[:1][0], - FixtureMEXC.MEXC_ORDER_BOOK, params={"symbol": "ETH-USDT", "depth": 200}) - # cls.web_app.update_response("get", API_BASE_URL, MEXC_TICKERS_URL, FixtureMEXC.MEXC_TICKERS) - cls.web_app.update_response("get", API_BASE_URL, MEXC_BALANCE_URL, FixtureMEXC.MEXC_BALANCE_URL) - cls.web_app.update_response("get", API_BASE_URL, MEXC_DEAL_DETAIL, FixtureMEXC.ORDER_DEAL_DETAIL) - cls.web_app.update_response("get", API_BASE_URL, MEXC_PING_URL, FixtureMEXC.PING_DATA) - cls._t_nonce_patcher = unittest.mock.patch( - "hummingbot.connector.exchange.mexc.mexc_exchange.get_tracking_nonce") - cls._t_nonce_mock = cls._t_nonce_patcher.start() - cls.clock: Clock = Clock(ClockMode.REALTIME) - cls.market: MexcExchange = MexcExchange( - API_KEY, - API_SECRET, - trading_pairs=["ETH-USDT"] - ) - # Need 2nd instance of market to prevent events mixing up across tests - cls.market_2: MexcExchange = MexcExchange( - API_KEY, - API_SECRET, - trading_pairs=["ETH-USDT"] - ) - # a = cls.market - cls.clock.add_iterator(cls.market) - cls.clock.add_iterator(cls.market_2) - cls.stack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - if MOCK_API_ENABLED: - cls.web_app.stop() - cls._patcher.stop() - cls._t_nonce_patcher.stop() - - @classmethod - async def wait_til_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready and cls.market_2.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../mexc_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.market_logger = EventLogger() - self.market_2_logger = EventLogger() - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - self.market_2.add_listener(event_tag, self.market_2_logger) - - def tearDown(self): - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market_2.remove_listener(event_tag, self.market_2_logger) - self.market_logger = None - self.market_2_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self._clock.run_til(next_iteration) - await asyncio.sleep(0.5) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def test_get_fee(self): - limit_fee: AddedToCostTradeFee = self.market.get_fee("ETH", "USDT", OrderType.LIMIT_MAKER, TradeType.BUY, 1, 10) - self.assertGreater(limit_fee.percent, 0) - self.assertEqual(len(limit_fee.flat_fees), 0) - market_fee: AddedToCostTradeFee = self.market.get_fee("ETH", "USDT", OrderType.LIMIT, TradeType.BUY, 1) - self.assertGreater(market_fee.percent, 0) - self.assertEqual(len(market_fee.flat_fees), 0) - sell_trade_fee: AddedToCostTradeFee = self.market.get_fee( - "ETH", "USDT", OrderType.LIMIT_MAKER, TradeType.SELL, 1, 10 - ) - self.assertGreater(sell_trade_fee.percent, 0) - self.assertEqual(len(sell_trade_fee.flat_fees), 0) - - def test_fee_overrides_config(self): - fee_overrides_config_map["mexc_taker_fee"].value = None - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.002"), taker_fee.percent) - fee_overrides_config_map["mexc_taker_fee"].value = Decimal('0.1') - taker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", "ETH", OrderType.LIMIT, TradeType.BUY, Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.001"), taker_fee.percent) - fee_overrides_config_map["mexc_maker_fee"].value = None - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.002"), maker_fee.percent) - fee_overrides_config_map["mexc_maker_fee"].value = Decimal('0.5') - maker_fee: AddedToCostTradeFee = self.market.get_fee("LINK", - "ETH", - OrderType.LIMIT_MAKER, - TradeType.BUY, - Decimal(1), - Decimal('0.1')) - self.assertAlmostEqual(Decimal("0.005"), maker_fee.percent) - - def place_order(self, is_buy, trading_pair, amount, order_type, price, nonce, get_resp, market_connector=None): - global EXCHANGE_ORDER_ID - order_id, exch_order_id = None, None - if MOCK_API_ENABLED: - exch_order_id = f"MEXC_{EXCHANGE_ORDER_ID}" - EXCHANGE_ORDER_ID += 1 - self._t_nonce_mock.return_value = nonce - resp = FixtureMEXC.ORDER_PLACE.copy() - resp["data"] = exch_order_id - side = 'buy' if is_buy else 'sell' - order_id = f"{side}-{trading_pair}-{nonce}" - self.web_app.update_response("post", API_BASE_URL, MEXC_PLACE_ORDER, resp) - market = self.market if market_connector is None else market_connector - if is_buy: - order_id = market.buy(trading_pair, amount, order_type, price) - else: - order_id = market.sell(trading_pair, amount, order_type, price) - if MOCK_API_ENABLED: - resp = get_resp.copy() - # resp is the response passed by parameter - resp["data"][0]["id"] = exch_order_id - resp["data"][0]["client_order_id"] = order_id - self.web_app.update_response("get", API_BASE_URL, - MEXC_ORDER_DETAILS_URL.format(ordId=exch_order_id, trading_pair="ETH-USDT"), - resp) - return order_id, exch_order_id - - def cancel_order(self, trading_pair, order_id, exchange_order_id, get_resp): - if MOCK_API_ENABLED: - resp = FixtureMEXC.ORDER_CANCEL.copy() - resp.get('data').clear() - resp.get('data')[order_id] = 'success' - self.web_app.update_response("delete", API_BASE_URL, MEXC_ORDER_CANCEL, - resp, params={"order_ids": order_id}) - self.market.cancel(trading_pair, order_id) - if MOCK_API_ENABLED: - resp = get_resp.copy() - resp["data"][0]["id"] = exchange_order_id - resp["data"][0]["client_order_id"] = order_id - self.web_app.update_response("get", API_BASE_URL, MEXC_ORDER_DETAILS_URL.format(ordId=exchange_order_id, - trading_pair="ETH-USDT"), - resp) - - def test_limit_maker_rejections(self): - if MOCK_API_ENABLED: - return - trading_pair = "ETH-USDT" - - # Try to put a buy limit maker order that is going to match, this should triggers order failure event. - price: Decimal = self.market.get_price(trading_pair, True) * Decimal('1.02') - price: Decimal = self.market.quantize_order_price(trading_pair, price) - amount = self.market.quantize_order_amount(trading_pair, Decimal("0.006")) - - order_id = self.market.buy(trading_pair, amount, OrderType.LIMIT_MAKER, price) - [order_failure_event] = self.run_parallel(self.market_logger.wait_for(MarketOrderFailureEvent)) - self.assertEqual(order_id, order_failure_event.order_id) - - self.market_logger.clear() - - # Try to put a sell limit maker order that is going to match, this should triggers order failure event. - price: Decimal = self.market.get_price(trading_pair, True) * Decimal('0.98') - price: Decimal = self.market.quantize_order_price(trading_pair, price) - amount = self.market.quantize_order_amount(trading_pair, Decimal("0.006")) - - order_id = self.market.sell(trading_pair, amount, OrderType.LIMIT_MAKER, price) - [order_failure_event] = self.run_parallel(self.market_logger.wait_for(MarketOrderFailureEvent)) - self.assertEqual(order_id, order_failure_event.order_id) - - def test_limit_makers_unfilled(self): - if MOCK_API_ENABLED: - return - - trading_pair = "ETH-USDT" - - bid_price: Decimal = self.market.get_price(trading_pair, True) * Decimal("0.5") - ask_price: Decimal = self.market.get_price(trading_pair, False) * 2 - amount: Decimal = Decimal("0.006") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - # Intentionally setting invalid price to prevent getting filled - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price * Decimal("0.9")) - quantize_ask_price: Decimal = self.market.quantize_order_price(trading_pair, ask_price * Decimal("1.1")) - - order_id1, exch_order_id1 = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, - 10001, FixtureMEXC.ORDER_GET_LIMIT_BUY_UNFILLED) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id1, order_created_event.order_id) - - order_id2, exch_order_id2 = self.place_order(False, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_ask_price, - 10002, FixtureMEXC.ORDER_GET_LIMIT_SELL_UNFILLED) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - self.assertEqual(order_id2, order_created_event.order_id) - - self.run_parallel(asyncio.sleep(1)) - if MOCK_API_ENABLED: - resp = FixtureMEXC.ORDERS_BATCH_CANCELED.copy() - resp["data"]["success"] = [exch_order_id1, exch_order_id2] - self.web_app.update_response("delete", API_BASE_URL, "/open/api/v2/order/cancel_by_symbol", resp) - [cancellation_results] = self.run_parallel(self.market_2.cancel_all(5)) - for cr in cancellation_results: - self.assertEqual(cr.success, True) - - # Reset the logs - self.market_logger.clear() - - def test_limit_taker_buy(self): - trading_pair = "ETH-USDT" - price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal("0.06") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, _ = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT, price, 10001, - FixtureMEXC.ORDER_GET_MARKET_BUY) - [buy_order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - buy_order_completed_event: BuyOrderCompletedEvent = buy_order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent)] - base_amount_traded: Decimal = sum(t.amount for t in trade_events) - quote_amount_traded: Decimal = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, buy_order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, buy_order_completed_event.base_asset_amount, places=4) - self.assertEqual("ETH", buy_order_completed_event.base_asset) - self.assertEqual("USDT", buy_order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, buy_order_completed_event.base_asset_amount, places=4) - self.assertAlmostEqual(quote_amount_traded, buy_order_completed_event.quote_asset_amount, places=4) - self.assertTrue(any([isinstance(event, BuyOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - self.market_logger.clear() - - def test_limit_taker_sell(self): - trading_pair = "ETH-USDT" - price: Decimal = self.market.get_price(trading_pair, False) - amount: Decimal = Decimal("0.06") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, _ = self.place_order(False, trading_pair, amount, OrderType.LIMIT, price, 10001, - FixtureMEXC.ORDER_GET_MARKET_SELL) - [sell_order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - sell_order_completed_event: SellOrderCompletedEvent = sell_order_completed_event - trade_events: List[OrderFilledEvent] = [t for t in self.market_logger.event_log - if isinstance(t, OrderFilledEvent)] - base_amount_traded = sum(t.amount for t in trade_events) - quote_amount_traded = sum(t.amount * t.price for t in trade_events) - - self.assertTrue([evt.order_type == OrderType.LIMIT for evt in trade_events]) - self.assertEqual(order_id, sell_order_completed_event.order_id) - self.assertAlmostEqual(quantized_amount, sell_order_completed_event.base_asset_amount) - self.assertEqual("ETH", sell_order_completed_event.base_asset) - self.assertEqual("USDT", sell_order_completed_event.quote_asset) - self.assertAlmostEqual(base_amount_traded, sell_order_completed_event.base_asset_amount) - self.assertAlmostEqual(quote_amount_traded, sell_order_completed_event.quote_asset_amount) - self.assertGreater(sell_order_completed_event.fee_amount, Decimal(0)) - self.assertTrue(any([isinstance(event, SellOrderCreatedEvent) and event.order_id == order_id - for event in self.market_logger.event_log])) - self.market_logger.clear() - - def test_cancel_order(self): - trading_pair = "ETH-USDT" - - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal("0.05") - - bid_price: Decimal = current_bid_price - Decimal("0.1") * current_bid_price - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, 10001, FixtureMEXC.ORDER_GET_LIMIT_BUY_UNFILLED) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - self.cancel_order(trading_pair, order_id, exch_order_id, FixtureMEXC.ORDER_GET_CANCELED) - [order_cancelled_event] = self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - order_cancelled_event: OrderCancelledEvent = order_cancelled_event - self.assertEqual(order_cancelled_event.order_id, order_id) - - def test_cancel_all(self): - trading_pair = "ETH-USDT" - - bid_price: Decimal = self.market_2.get_price(trading_pair, True) * Decimal("0.5") - ask_price: Decimal = self.market_2.get_price(trading_pair, False) * 2 - amount: Decimal = Decimal("0.06") - quantized_amount: Decimal = self.market_2.quantize_order_amount(trading_pair, amount) - - # Intentionally setting invalid price to prevent getting filled - quantize_bid_price: Decimal = self.market_2.quantize_order_price(trading_pair, bid_price * Decimal("0.9")) - quantize_ask_price: Decimal = self.market_2.quantize_order_price(trading_pair, ask_price * Decimal("1.1")) - - _, exch_order_id1 = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, - 1001, FixtureMEXC.ORDER_GET_LIMIT_BUY_UNFILLED, self.market_2) - _, exch_order_id2 = self.place_order(False, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_ask_price, - 1002, FixtureMEXC.ORDER_GET_LIMIT_BUY_FILLED, self.market_2) - self.run_parallel(asyncio.sleep(1)) - if MOCK_API_ENABLED: - resp = FixtureMEXC.ORDERS_BATCH_CANCELED.copy() - resp["data"][0]["ordId"] = exch_order_id1 - self.web_app.update_response("delete", API_BASE_URL, '/' + MEXC_BATCH_ORDER_CANCEL, resp) - - [cancellation_results] = self.run_parallel(self.market_2.cancel_all(5)) - for cr in cancellation_results: - self.assertEqual(cr.success, '0') - - def test_orders_saving_and_restoration(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - trading_pair: str = "ETH-USDT" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - self.assertEqual(0, len(self.market.tracking_states)) - - # Try to put limit buy order for 0.04 ETH, and watch for order creation event. - current_bid_price: Decimal = self.market.get_price(trading_pair, True) - bid_price: Decimal = current_bid_price * Decimal("0.8") - quantize_bid_price: Decimal = self.market.quantize_order_price(trading_pair, bid_price) - - amount: Decimal = Decimal("0.06") - quantized_amount: Decimal = self.market.quantize_order_amount(trading_pair, amount) - - order_id, exch_order_id = self.place_order(True, trading_pair, quantized_amount, OrderType.LIMIT_MAKER, - quantize_bid_price, 10001, - FixtureMEXC.ORDER_GET_LIMIT_BUY_UNFILLED) - [order_created_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent)) - order_created_event: BuyOrderCreatedEvent = order_created_event - # self.assertEqual(order_id, order_created_event.order_id) - - # Verify tracking states - self.assertEqual(1, len(self.market.tracking_states)) - self.assertEqual(order_id, list(self.market.tracking_states.keys())[0]) - - # Verify orders from recorder - recorded_orders: List[Order] = recorder.get_orders_for_config_and_market(config_path, self.market) - self.assertEqual(1, len(recorded_orders)) - self.assertEqual(order_id, recorded_orders[0].id) - - # Verify saved market states - saved_market_states: MarketState = recorder.get_market_states(config_path, self.market) - self.assertIsNotNone(saved_market_states) - self.assertIsInstance(saved_market_states.saved_state, dict) - self.assertGreater(len(saved_market_states.saved_state), 0) - - # Close out the current market and start another market. - self.clock.remove_iterator(self.market) - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market: MexcExchange = MexcExchange( - API_KEY, - API_SECRET, - trading_pairs=["ETH-USDT"] - ) - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - recorder.stop() - recorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - saved_market_states = recorder.get_market_states(config_path, self.market) - self.clock.add_iterator(self.market) - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - self.market.restore_tracking_states(saved_market_states.saved_state) - self.assertEqual(1, len(self.market.limit_orders)) - self.assertEqual(1, len(self.market.tracking_states)) - - # Cancel the order and verify that the change is saved. - self.cancel_order(trading_pair, order_id, exch_order_id, FixtureMEXC.ORDER_GET_CANCELED) - # saved_market_states2 = recorder.get_market_states(config_path, self.market) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - # saved_market_states3 = recorder.get_market_states(config_path, self.market) - order_id = None - self.assertEqual(0, len(self.market.limit_orders)) - self.assertEqual(0, len(self.market.tracking_states)) - saved_market_states = recorder.get_market_states(config_path, self.market) - self.assertEqual(0, len(saved_market_states.saved_state)) - finally: - if order_id is not None: - self.market.cancel(trading_pair, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - - def test_order_fill_record(self): - config_path: str = "test_config" - strategy_name: str = "test_strategy" - trading_pair: str = "ETH-USDT" - sql: SQLConnectionManager = SQLConnectionManager(SQLConnectionType.TRADE_FILLS, db_path=self.db_path) - order_id: Optional[str] = None - recorder: MarketsRecorder = MarketsRecorder(sql, [self.market], config_path, strategy_name) - recorder.start() - - try: - # Try to buy 0.04 ETH from the exchange, and watch for completion event. - price: Decimal = self.market.get_price(trading_pair, True) - amount: Decimal = Decimal("0.06") - order_id, _ = self.place_order(True, trading_pair, amount, OrderType.LIMIT, price, 10001, - FixtureMEXC.ORDER_GET_MARKET_BUY) - [buy_order_completed_event] = self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - - # Reset the logs - self.market_logger.clear() - - # Try to sell back the same amount of ETH to the exchange, and watch for completion event. - price: Decimal = self.market.get_price(trading_pair, False) - amount = buy_order_completed_event.base_asset_amount - order_id, _ = self.place_order(False, trading_pair, amount, OrderType.LIMIT, price, 10002, - FixtureMEXC.ORDER_GET_MARKET_SELL) - [sell_order_completed_event] = self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - - # Query the persisted trade logs - trade_fills: List[TradeFill] = recorder.get_trades_for_config(config_path) - self.assertEqual(2, len(trade_fills)) - buy_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "BUY"] - sell_fills: List[TradeFill] = [t for t in trade_fills if t.trade_type == "SELL"] - self.assertEqual(1, len(buy_fills)) - self.assertEqual(1, len(sell_fills)) - - order_id = None - - finally: - if order_id is not None: - self.market.cancel(trading_pair, order_id) - self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent)) - - recorder.stop() - os.unlink(self.db_path) - - def test_update_last_prices(self): - # This is basic test to see if order_book last_trade_price is initiated and updated. - for order_book in self.market.order_books.values(): - for _ in range(5): - self.ev_loop.run_until_complete(asyncio.sleep(1)) - self.assertFalse(math.isnan(order_book.last_trade_price)) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/connector/test_in_flight_order_base.py b/test/connector/test_in_flight_order_base.py deleted file mode 100644 index 79a07f12ed..0000000000 --- a/test/connector/test_in_flight_order_base.py +++ /dev/null @@ -1,149 +0,0 @@ -from decimal import Decimal -from unittest import TestCase - -from hummingbot.connector.in_flight_order_base import InFlightOrderBase -from hummingbot.core.data_type.in_flight_order import OrderState -from hummingbot.core.event.events import LimitOrderStatus, OrderType, TradeType - - -class InFlightOrderBaseTests(TestCase): - - def test_string_repr(self): - order = InFlightOrderBase( - client_order_id="OID1", - exchange_order_id="EOID1", - trading_pair="COINALPHA-HBOT", - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(1000), - amount=Decimal(1), - initial_state=OrderState.PENDING_CREATE.name, - creation_timestamp=1640001112.0 - ) - - expected_repr = ("InFlightOrder(client_order_id='OID1', exchange_order_id='EOID1', " - "creation_timestamp=1640001112, trading_pair='COINALPHA-HBOT', order_type=OrderType.LIMIT, " - "trade_type=TradeType.BUY, price=1000, amount=1, executed_amount_base=0, " - "executed_amount_quote=0, fee_asset='None', fee_paid=0, last_state='PENDING_CREATE')") - - self.assertEqual(expected_repr, repr(order)) - - def test_get_creation_timestamp(self): - order = InFlightOrderBase( - client_order_id="OID1", - exchange_order_id="EOID1", - trading_pair="COINALPHA-HBOT", - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(1000), - amount=Decimal(1), - initial_state=OrderState.PENDING_CREATE.name, - creation_timestamp=1640001112.0 - ) - - self.assertEqual(1640001112, order.creation_timestamp) - - def test_creation_timestamp_taken_from_order_id_when_not_specified(self): - order = InFlightOrderBase( - client_order_id="OID1-1640001112223334", - exchange_order_id="EOID1", - trading_pair="COINALPHA-HBOT", - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(1000), - amount=Decimal(1), - creation_timestamp=-1, - initial_state=OrderState.PENDING_CREATE.name - ) - - self.assertEqual(1640001112.223334, order.creation_timestamp) - - def test_serialize_order_to_json(self): - order = InFlightOrderBase( - client_order_id="OID1", - exchange_order_id="EOID1", - trading_pair="COINALPHA-HBOT", - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(1000), - amount=Decimal(1), - initial_state=OrderState.PENDING_CREATE.name, - creation_timestamp=1640001112.0 - ) - - expected_json = { - "client_order_id": order.client_order_id, - "exchange_order_id": order.exchange_order_id, - "trading_pair": order.trading_pair, - "order_type": order.order_type.name, - "trade_type": order.trade_type.name, - "price": str(order.price), - "amount": str(order.amount), - "executed_amount_base": str(order.executed_amount_base), - "executed_amount_quote": str(order.executed_amount_quote), - "fee_asset": order.fee_asset, - "fee_paid": str(order.fee_paid), - "last_state": order.last_state, - "creation_timestamp": order.creation_timestamp - } - - self.assertEqual(expected_json, order.to_json()) - - def test_deserialize_order_from_json(self): - json = { - "client_order_id": "OID1", - "exchange_order_id": "EOID", - "trading_pair": "COINALPHA-HBOT", - "order_type": OrderType.LIMIT.name, - "trade_type": TradeType.BUY.name, - "price": "1000.0", - "amount": "1.0", - "executed_amount_base": "0.5", - "executed_amount_quote": "510.0", - "fee_asset": "BNB", - "fee_paid": "10.0", - "last_state": OrderState.PARTIALLY_FILLED.name, - "creation_timestamp": 1640001112.0 - } - - order = InFlightOrderBase.from_json(json) - - self.assertEqual(json["client_order_id"], order.client_order_id) - self.assertEqual(json["exchange_order_id"], order.exchange_order_id) - self.assertEqual(json["trading_pair"], order.trading_pair) - self.assertEqual(OrderType.LIMIT, order.order_type) - self.assertEqual(TradeType.BUY, order.trade_type) - self.assertEqual(Decimal(json["price"]), order.price) - self.assertEqual(Decimal(json["amount"]), order.amount) - self.assertEqual(Decimal(json["executed_amount_base"]), order.executed_amount_base) - self.assertEqual(Decimal(json["executed_amount_quote"]), order.executed_amount_quote) - self.assertEqual(json["fee_asset"], order.fee_asset) - self.assertEqual(Decimal(json["fee_paid"]), order.fee_paid) - self.assertEqual(OrderState.PARTIALLY_FILLED.name, order.last_state) - self.assertEqual(json["creation_timestamp"], order.creation_timestamp) - - def test_to_limit_order(self): - order = InFlightOrderBase( - client_order_id="OID1", - exchange_order_id="EOID1", - trading_pair="COINALPHA-HBOT", - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(1000), - amount=Decimal(1), - initial_state=OrderState.PENDING_CREATE.name, - creation_timestamp=1640001112.223330 - ) - - limit_order = order.to_limit_order() - - self.assertEqual("OID1", limit_order.client_order_id) - self.assertEqual("COINALPHA-HBOT", limit_order.trading_pair) - self.assertTrue(limit_order.is_buy) - self.assertEqual("COINALPHA", limit_order.base_currency) - self.assertEqual("HBOT", limit_order.quote_currency) - self.assertEqual(Decimal(1000), limit_order.price) - self.assertEqual(Decimal(1), limit_order.quantity) - self.assertTrue(limit_order.filled_quantity.is_nan()) - self.assertEqual(1640001112223330, limit_order.creation_timestamp) - self.assertEqual(LimitOrderStatus.UNKNOWN, limit_order.status) diff --git a/test/connector/test_parrot.py b/test/connector/test_parrot.py deleted file mode 100644 index 0c8491b8c8..0000000000 --- a/test/connector/test_parrot.py +++ /dev/null @@ -1,29 +0,0 @@ -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../../"))) -import unittest -import asyncio -from hummingbot.connector.parrot import get_active_campaigns, get_campaign_summary - - -class ParrotConnectorUnitTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - - def test_get_active_campaigns(self): - self.ev_loop.run_until_complete(self._test_get_active_campaigns()) - - async def _test_get_active_campaigns(self): - results = await get_active_campaigns("binance") - self.assertGreater(len(results), 0) - for result in results.values(): - print(result) - - def test_get_campaign_summary(self): - self.ev_loop.run_until_complete(self._test_get_campaign_summary()) - - async def _test_get_campaign_summary(self): - results = await get_campaign_summary("binance", ["RLC-BTC", "RLC-ETH"]) - self.assertLessEqual(len(results), 2) - for result in results.values(): - print(result) diff --git a/test/debug/debug_aiohttp_gather.py b/test/debug/debug_aiohttp_gather.py deleted file mode 100644 index b3392be104..0000000000 --- a/test/debug/debug_aiohttp_gather.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -import aiohttp -import asyncio -import pandas as pd -import random -import time -from typing import ( - NamedTuple, - List, - Dict, - Optional -) -from hummingbot.core.utils.async_utils import safe_gather - -shared_client_session: Optional[aiohttp.ClientSession] = None - - -class FetchTask(NamedTuple): - nonce: int - future: asyncio.Future - - @classmethod - def create_task(cls): - global shared_client_session - nonce: int = random.randint(0, 0xffffffff) - future: asyncio.Future = shared_client_session.get("https://postman-echo.com/get", params={"nonce": nonce}) - return FetchTask(nonce, future) - - -async def init_client(): - global shared_client_session - if shared_client_session is None: - shared_client_session = aiohttp.ClientSession() - - -async def generate_tasks(length: int) -> List[FetchTask]: - return [FetchTask.create_task() for _ in range(0, length)] - - -async def main(): - await init_client() - - while True: - try: - tasks: List[FetchTask] = await generate_tasks(10) - results: List[aiohttp.ClientResponse] = await safe_gather(*[t.future for t in tasks]) - data: List[Dict[str, any]] = await safe_gather(*[r.json() for r in results]) - mismatches: int = 0 - - for task, response in zip(tasks, data): - returned_nonce: int = int(response["args"]["nonce"]) - if task.nonce != returned_nonce: - print(f" - Error: requested for {task.nonce} but got {returned_nonce} back.") - mismatches += 1 - - if mismatches < 1: - print(f"[{str(pd.Timestamp.utcnow())}] All fetches passed.") - else: - print(f"[{str(pd.Timestamp.utcnow())}] {mismatches} out of 10 requests failed.") - - now: float = time.time() - next_tick: float = now // 1 + 1 - await asyncio.sleep(next_tick - now) - except asyncio.CancelledError: - raise - - -if __name__ == "__main__": - ev_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() - try: - ev_loop.run_until_complete(main()) - except KeyboardInterrupt: - print("Done!") diff --git a/test/debug/debug_arbitrage.py b/test/debug/debug_arbitrage.py deleted file mode 100644 index 4ebb2bb68c..0000000000 --- a/test/debug/debug_arbitrage.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python -import faulthandler; faulthandler.enable() -import sys -import os; sys.path.insert(0, os.path.realpath(os.path.join(__file__, "../../"))) -import logging; logging.basicConfig(level=logging.INFO) -import pandas as pd -import hummingsim -from hummingsim.backtest.backtest_market import BacktestMarket -from hummingsim.backtest.binance_order_book_loader_v2 import BinanceOrderBookLoaderV2 -from hummingsim.backtest.ddex_order_book_loader import DDEXOrderBookLoader -from hummingsim.backtest.market import QuantizationParams -from hummingbot.core.clock import ( - Clock, - ClockMode -) -from hummingsim.backtest.market_config import ( - MarketConfig, - AssetType -) -from hummingbot.strategy.arbitrage import ( - ArbitrageStrategy, - ArbitrageMarketPair -) - -# Define the data cache path. -hummingsim.set_data_path(os.path.join(os.environ["PWD"], "data")) - -# Define the parameters for the backtest. -start = pd.Timestamp("2018-12-21-00:29:06", tz="UTC") -end = pd.Timestamp("2019-12-24-00:43:00", tz="UTC") -binance_trading_pair = ("ETHUSDT", "ETH", "USDT") -ddex_trading_pair = ("WETH-DAI", "WETH", "DAI") - - -binance_market = BacktestMarket() -ddex_market = BacktestMarket() -binance_loader = BinanceOrderBookLoaderV2(*binance_trading_pair) -ddex_loader = DDEXOrderBookLoader(*ddex_trading_pair) - - -binance_market.config = MarketConfig(AssetType.BASE_CURRENCY, 0.001, AssetType.QUOTE_CURRENCY, 0.001, {}) -ddex_market.config = MarketConfig(AssetType.BASE_CURRENCY, 0.001, AssetType.QUOTE_CURRENCY, 0.001, {}) - -binance_market.add_data(binance_loader) -ddex_market.add_data(ddex_loader) - -binance_market.set_quantization_param(QuantizationParams("ETHUSDT", 5, 3, 5, 3)) -ddex_market.set_quantization_param(QuantizationParams("WETH-DAI", 5, 3, 5, 3)) - -market_pair1 = ArbitrageMarketPair(*([ddex_market] + list(ddex_trading_pair) + [binance_market] + list(binance_trading_pair))) - -strategy = ArbitrageStrategy([market_pair1], 0.025, - logging_options=ArbitrageStrategy.OPTION_LOG_CREATE_ORDER) - -clock = Clock(ClockMode.BACKTEST, start_time=start.timestamp(), end_time=end.timestamp()) -clock.add_iterator(binance_market) -clock.add_iterator(ddex_market) -clock.add_iterator(strategy) - - -binance_market.set_balance("ETH", 100.0) -binance_market.set_balance("USDT", 10000.0) -ddex_market.set_balance("WETH", 100.0) -ddex_market.set_balance("DAI", 10000.0) - -clock.backtest_til(start.timestamp() + 1) - -ddex_weth_price = ddex_market.get_price("WETH-DAI", False) -binance_eth_price = binance_market.get_price("ETHUSDT", False) -start_ddex_portfolio_value = ddex_market.get_balance("DAI") + ddex_market.get_balance("WETH") * ddex_weth_price -start_binance_portfolio_value = binance_market.get_balance("USDT") + binance_market.get_balance("ETH") * binance_eth_price -print(f"start DDEX portfolio value: {start_ddex_portfolio_value}\n" - f"start Binance portfolio value: {start_binance_portfolio_value}") - -clock.backtest_til(end.timestamp()) - -ddex_weth_price = ddex_market.get_price("WETH-DAI", False) -binance_eth_price = binance_market.get_price("ETHUSDT", False) -ddex_portfolio_value = ddex_market.get_balance("DAI") + ddex_market.get_balance("WETH") * ddex_weth_price -binance_portfolio_value = binance_market.get_balance("USDT") + binance_market.get_balance("ETH") * binance_eth_price -print(f"DDEX portfolio value: {ddex_portfolio_value}\nBinance portfolio value: {binance_portfolio_value}\n") -print(f"DDEX balances: {ddex_market.get_all_balances()}\nBinance balances: {binance_market.get_all_balances()}") - -print(f"start DDEX portfolio value: {start_ddex_portfolio_value}\n" - f"start Binance portfolio value: {start_binance_portfolio_value}") - -print(f"Profit DDEX {ddex_portfolio_value/start_ddex_portfolio_value}\n" - f"Profit Binance {binance_portfolio_value/start_binance_portfolio_value}\n" - f"Profit Total " - f"{(ddex_portfolio_value + binance_portfolio_value)/(start_ddex_portfolio_value + start_binance_portfolio_value)}") diff --git a/test/debug/debug_cross_exchange_market_making.py b/test/debug/debug_cross_exchange_market_making.py deleted file mode 100644 index abd46a9d4e..0000000000 --- a/test/debug/debug_cross_exchange_market_making.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python - -import sys -import os; sys.path.insert(0, os.path.realpath(os.path.join(__file__, "../../"))) -import logging; logging.basicConfig(level=logging.DEBUG) -import pandas as pd -import hummingsim -from hummingsim.backtest.backtest_market import BacktestMarket -from hummingsim.backtest.binance_order_book_loader_v2 import BinanceOrderBookLoaderV2 -from hummingsim.backtest.market import QuantizationParams -from hummingsim.backtest.market_config import ( - MarketConfig, - AssetType -) -from hummingsim.backtest.ddex_order_book_loader import DDEXOrderBookLoader -from hummingbot.core.clock import ( - Clock, - ClockMode -) -from hummingbot.strategy.cross_exchange_market_making import ( - CrossExchangeMarketMakingStrategy, - CrossExchangeMarketPair, -) - - -def main(): - # Define the data cache path. - hummingsim.set_data_path(os.path.join(os.environ["PWD"], "data")) - - # Define the parameters for the backtest. - start = pd.Timestamp("2018-12-12", tz="UTC") - end = pd.Timestamp("2019-01-12", tz="UTC") - binance_trading_pair = ("ETHUSDT", "ETH", "USDT") - ddex_trading_pair = ("WETH-DAI", "WETH", "DAI") - - binance_market = BacktestMarket() - ddex_market = BacktestMarket() - binance_market.config = MarketConfig(AssetType.BASE_CURRENCY, 0.001, AssetType.QUOTE_CURRENCY, 0.001, {}) - ddex_market.config = MarketConfig(AssetType.BASE_CURRENCY, 0.001, AssetType.QUOTE_CURRENCY, 0.001, {}) - binance_loader = BinanceOrderBookLoaderV2(*binance_trading_pair) - ddex_loader = DDEXOrderBookLoader(*ddex_trading_pair) - - binance_market.add_data(binance_loader) - ddex_market.add_data(ddex_loader) - - binance_market.set_quantization_param(QuantizationParams("ETHUSDT", 5, 3, 5, 3)) - ddex_market.set_quantization_param(QuantizationParams("WETH-DAI", 5, 3, 5, 3)) - - market_pair = CrossExchangeMarketPair(*( - [ddex_market] + list(ddex_trading_pair) + [binance_market] + list(binance_trading_pair))) - - strategy = CrossExchangeMarketMakingStrategy( - [market_pair], 0.003, - logging_options= - CrossExchangeMarketMakingStrategy.OPTION_LOG_MAKER_ORDER_FILLED) - - clock = Clock(ClockMode.BACKTEST, start_time=start.timestamp(), end_time=end.timestamp()) - clock.add_iterator(binance_market) - clock.add_iterator(ddex_market) - clock.add_iterator(strategy) - - binance_market.set_balance("ETH", 10.0) - binance_market.set_balance("USDT", 1000.0) - ddex_market.set_balance("WETH", 10.0) - ddex_market.set_balance("DAI", 1000.0) - - clock.backtest() - binance_loader.close() - ddex_loader.close() - - -if __name__ == "__main__": - main() diff --git a/test/debug/fixture_configs.py b/test/debug/fixture_configs.py deleted file mode 100644 index ebc3defa12..0000000000 --- a/test/debug/fixture_configs.py +++ /dev/null @@ -1,31 +0,0 @@ -class FixtureConfigs: - in_mem_new_pass_configs = [ - {"prompt": "Enter your new password >>> ", "input": "a"}, - {"prompt": "Please reenter your password >>> ", "input": "a"}, - {"prompt": "Import previous configs or create a new config file? (import/create) >>> ", "input": "create"} - ] - - pure_mm_basic_responses = { - "exchange": "binance", - "market": "LINK-ETH", - "bid_spread": "1", - "ask_spread": "1", - "order_refresh_time": "", - "order_amount": "4", - "advanced_mode": "Hell No!" - } - - global_binance_config = { - "binance_api_key": "", - "binance_api_secret": "", - "kill_switch_enabled": "no", - "send_error_logs": "no" - } - - in_mem_existing_pass_import_configs = [ - {"prompt": "Import previous configs or create a new config file? (import/create) >>> ", "input": "import"} - ] - - in_mem_existing_pass_create_configs = [ - {"prompt": "Import previous configs or create a new config file? (import/create) >>> ", "input": "create"} - ] diff --git a/test/debug/htx_mock_api.py b/test/debug/htx_mock_api.py deleted file mode 100644 index 240a86af30..0000000000 --- a/test/debug/htx_mock_api.py +++ /dev/null @@ -1,289 +0,0 @@ -from aiohttp import web - - -class HtxMockAPI: - MOCK_HTX_USER_ID = 10000000 - MOCK_HTX_LIMIT_BUY_ORDER_ID = 11111 - MOCK_HTX_LIMIT_SELL_ORDER_ID = 22222 - MOCK_HTX_MARKET_BUY_ORDER_ID = 33333 - MOCK_HTX_MARKET_SELL_ORDER_ID = 44444 - MOCK_HTX_LIMIT_CANCEL_ORDER_ID = 55555 - MOCK_HTX_LIMIT_OPEN_ORDER_ID = 66666 - MOCK_HTX_LIMIT_BUY_RESPONSE = { - "status": "ok", - "data": { - "id": MOCK_HTX_LIMIT_BUY_ORDER_ID, - "symbol": "ethusdt", - "account-id": 10055506, - "amount": "0.020000000000000000", - "price": "189.770000000000000000", - "created-at": 1570494069606, - "type": "buy-limit", - "field-amount": "0.020000000000000000", - "field-cash-amount": "3.614600000000000000", - "field-fees": "0.000040000000000000", - "finished-at": 1570494069689, - "user-id": MOCK_HTX_USER_ID, - "source": "spot-api", - "state": "filled", - "canceled-at": 0 - } - } - MOCK_HTX_LIMIT_SELL_RESPONSE = { - "status": "ok", - "data": { - "id": MOCK_HTX_LIMIT_SELL_ORDER_ID, - "symbol": "ethusdt", - "account-id": 10055506, - "amount": "0.020000000000000000", - "price": "189.770000000000000000", - "created-at": 1570494069606, - "type": "sell-limit", - "field-amount": "0.020000000000000000", - "field-cash-amount": "3.614600000000000000", - "field-fees": "0.000040000000000000", - "finished-at": 1570494069689, - "user-id": MOCK_HTX_USER_ID, - "source": "spot-api", - "state": "filled", - "canceled-at": 0 - } - } - MOCK_HTX_MARKET_BUY_RESPONSE = { - "status": "ok", - "data": { - "id": MOCK_HTX_LIMIT_BUY_ORDER_ID, - "symbol": "ethusdt", - "account-id": 10055506, - "amount": "3.580000000000000000", - "price": "0.0", - "created-at": 1570571586091, - "type": "buy-market", - "field-amount": "0.020024611254055263", - "field-cash-amount": "3.579999999999999919", - "field-fees": "0.000040049222508111", - "finished-at": 1570571586178, - "source": "spot-api", - "state": "filled", - "canceled-at": 0 - } - } - MOCK_HTX_MARKET_SELL_RESPONSE = { - "status": "ok", - "data": { - "id": MOCK_HTX_MARKET_SELL_ORDER_ID, - "symbol": "ethusdt", - "account-id": 10055506, - "amount": "0.020000000000000000", - "price": "0.0", - "created-at": 1570494069606, - "type": "sell-market", - "field-amount": "0.020000000000000000", - "field-cash-amount": "3.614600000000000000", - "field-fees": "0.000040000000000000", - "finished-at": 1570494069689, - "user-id": MOCK_HTX_USER_ID, - "source": "spot-api", - "state": "filled", - "canceled-at": 0 - } - } - MOCK_HTX_LIMIT_CANCEL_RESPONSE = { - "status": "ok", - "data": { - "id": MOCK_HTX_LIMIT_CANCEL_ORDER_ID, - "symbol": "ethusdt", - "account-id": 10055506, - "amount": "0.020000000000000000", - "price": "162.670000000000000000", - "created-at": 1570575422098, - "type": "buy-limit", - "field-amount": "0.0", - "field-cash-amount": "0.0", - "field-fees": "0.0", - "finished-at": 1570575423650, - "source": "spot-api", - "state": "submitted", - "canceled-at": 1570575423600 - } - } - MOCK_HTX_LIMIT_OPEN_RESPONSE = { - "status": "ok", - "data": { - "id": MOCK_HTX_LIMIT_OPEN_ORDER_ID, - "symbol": "ethusdt", - "account-id": 10055506, - "amount": "0.040000000000000000", - "price": "162.670000000000000000", - "created-at": 1570575422098, - "type": "buy-limit", - "field-amount": "0.0", - "field-cash-amount": "0.0", - "field-fees": "0.0", - "finished-at": 1570575423650, - "source": "spot-api", - "state": "submitted", - "canceled-at": 1570575423600 - } - } - - def __init__(self): - self.order_id = None - self.cancel_all_order_ids = [] - self.order_response_dict = { - self.MOCK_HTX_LIMIT_BUY_ORDER_ID: self.MOCK_HTX_LIMIT_BUY_RESPONSE, - self.MOCK_HTX_LIMIT_SELL_ORDER_ID: self.MOCK_HTX_LIMIT_SELL_RESPONSE, - self.MOCK_HTX_MARKET_BUY_ORDER_ID: self.MOCK_HTX_MARKET_BUY_RESPONSE, - self.MOCK_HTX_MARKET_SELL_ORDER_ID: self.MOCK_HTX_MARKET_SELL_RESPONSE, - self.MOCK_HTX_LIMIT_CANCEL_ORDER_ID: self.MOCK_HTX_LIMIT_CANCEL_RESPONSE, - self.MOCK_HTX_LIMIT_OPEN_ORDER_ID: self.MOCK_HTX_LIMIT_OPEN_RESPONSE - } - - async def get_mock_snapshot(self, _): - return web.json_response({ - "ch": "market.ethusdt.depth.step0", - "ts": 1570486543309, - "tick": { - "bids": [ - [ - 100.21, - 23.5445 - ], - [ - 100.2, - 86.4019 - ], - [ - 100.17, - 6.1261 - ], - [ - 100.16, - 10.0 - ], - [ - 100.14, - 8.0 - ] - ], - "asks": [ - [ - 100.24, - 2.3602 - ], - [ - 100.25, - 15.1513 - ], - [ - 100.27, - 19.1565 - ], - [ - 100.29, - 12.0 - ], - [ - 100.3, - 23.3643 - ] - ], - "version": 102339771356, - "ts": 1570486543009 - } - }) - - async def get_market_tickers(self, _): - response = { - "status": "ok", - "ts": 1570060262253, - "data": [{ - "symbol": "ethusdt", - "open": 175.57, - "high": 181, - "low": 175, - "close": 180.11, - "amount": 330265.5220692477, - "vol": 58300213.797686026, - "count": 93755 - }] - } - return web.json_response(response, status=200) - - async def get_account_accounts(self, _): - response = { - "status": "ok", - "data": [{ - "id": self.MOCK_HTX_USER_ID, - "type": "spot", - "subtype": "", - "state": "working" - }] - } - return web.json_response(response, status=200) - - async def get_common_timestamp(self, _): - response = {"status": "ok", "data": 1569445000000} - return web.json_response(response, status=200) - - async def get_common_symbols(self, _): - response = { - "status": "ok", - "data": [ - { - "base-currency": "eth", - "quote-currency": "usdt", - "price-precision": 2, - "amount-precision": 4, - "symbol-partition": "main", - "symbol": "ethusdt", - "state": "online", - "value-precision": 8, - "min-order-amt": 0.001, - "max-order-amt": 10000, - "min-order-value": 1 - } - ] - } - return web.json_response(response, status=200) - - async def get_user_balance(self, _): - response = { - "status": "ok", - "data": { - "id": self.MOCK_HTX_USER_ID, - "type": "spot", - "state": "working", - "list": [{ - "currency": "eth", - "type": "trade", - "balance": "0.259942948171422263" - }] - } - } - return web.json_response(response, status=200) - - async def post_order_place(self, req: web.Request): - response = { - "status": "ok", - "data": self.order_id - } - return web.json_response(response, status=200) - - async def post_submit_cancel(self, _): - response = { - "status": "ok", - "data": self.order_id - } - return web.json_response(response, status=200) - - async def get_order_update(self, _): - response = self.order_response_dict[self.order_id] - return web.json_response(response, status=200) - - async def post_batch_cancel(self, _): - response = { - "status": "ok", - "data": {"success": self.cancel_all_order_ids, "failed": []} - } - return web.json_response(response, status=200) diff --git a/test/debug/test_composite_order_book.py b/test/debug/test_composite_order_book.py deleted file mode 100644 index 1955e82c95..0000000000 --- a/test/debug/test_composite_order_book.py +++ /dev/null @@ -1,240 +0,0 @@ -import unittest - -import pandas as pd - -from hummingbot.core.clock import ( - ClockMode, - Clock -) -from hummingbot.core.data_type.common import TradeType - - -# class CompositeOrderBookTestStrategy(UnitTestStrategy): -# """ -# Makes market orders and record fill events -# """ -# -# class OrderFilledEventLogger(EventListener): -# def __init__(self, owner: "CompositeOrderBookTestStrategy"): -# self._owner: "CompositeOrderBookTestStrategy" = owner -# -# def __call__(self, order_filled_event: OrderFilledEvent): -# self._owner.log_order_filled_event(order_filled_event) -# -# def __init__(self, market: Market, trades: Dict[str, Tuple[str, float]]): -# super().__init__(market) -# self.trades = trades -# self.tick_size = 5 -# self._order_filled_event_timestamps: List[float] = [] -# self._order_filled_events: List[OrderFilledEvent] = [] -# self._trade_logger: CompositeOrderBookTestStrategy.OrderFilledEventLogger = self.OrderFilledEventLogger(self) -# market.add_listener(MarketEvent.OrderFilled, self._trade_logger) -# -# self.start_printing = False -# -# def log_order_filled_event(self, evt: OrderFilledEvent): -# self._order_filled_event_timestamps.append(self.current_timestamp) -# self._order_filled_events.append(evt) -# -# def process_tick(self): -# if self.current_timestamp in self.trades: -# for trade in self.trades[self.current_timestamp]: -# if trade[1] == "buy": -# self.market.buy(trade[0], trade[2]) -# elif trade[1] == "sell": -# self.market.sell(trade[0], trade[2]) -# self.start_printing = True -# -# composite_ob = self.market.get_order_book("WETH-DAI") -# composite_bids = list(composite_ob.bid_entries()) -# composite_asks = list(composite_ob.ask_entries()) -# -# if not self.start_printing: -# return -# -# original_bids = list(composite_ob.original_bid_entries()) -# original_asks = list(composite_ob.original_ask_entries()) -# -# filled_bids = list(composite_ob.traded_order_book.bid_entries()) -# filled_asks = list(composite_ob.traded_order_book.ask_entries()) -# -# order_books_top = [composite_bids[i] + composite_asks[i] + original_bids[i] + original_asks[i] for i in range(5)] -# -# filled_order_books = [] -# for i in range(max(len(filled_bids), len(filled_asks))): -# if i + 1 > len(filled_bids): -# fb = (None, None, None) -# else: -# fb = filled_bids[i] -# if i + 1 > len(filled_asks): -# fa = (None, None, None) -# else: -# fa = filled_asks[i] -# filled_order_books.append(fb + fa) -# -# print(str(pd.Timestamp(self.current_timestamp, unit="s", tz="UTC")) + "\n" + -# pd.DataFrame(data=filled_order_books, -# columns=['filled_bid_price', 'filled_bid_amount', 'uid', -# 'filled_ask_price', 'filled_ask_amount', 'uid' -# ] -# ).to_string() -# ) -# -# print(str(pd.Timestamp(self.current_timestamp, unit="s", tz="UTC")) + "\n" + -# pd.DataFrame(data=order_books_top, -# columns=['composite_bid_price', 'composite_bid_amount', 'uid', -# 'composite_ask_price', 'composite_ask_amount', 'uid', -# 'original_bid_price', 'original_bid_amount', 'uid', -# 'original_ask_price', 'original_ask_amount', 'uid', -# ] -# ).to_string() -# ) -# -# @property -# def order_filled_events(self) -> pd.DataFrame: -# retval: pd.DataFrame = pd.DataFrame(data=self._order_filled_events, -# columns=OrderFilledEvent._fields, -# index=pd.Index(self._order_filled_event_timestamps, dtype="float64")) -# retval.index = (retval.index * 1e9).astype("int64").astype("datetime64[ns]") -# return retval - -@unittest.skip("The test seems to be out of date. It requires the hummingsim component that is not present") -class CompositeOrderBookTest(unittest.TestCase): - start: pd.Timestamp = pd.Timestamp("2019-01-25", tz="UTC") - end: pd.Timestamp = pd.Timestamp("2019-01-26", tz="UTC") - - def setUp(self): - # self.weth_dai_data = DDEXOrderBookLoader("WETH-DAI", "WETH", "DAI") - self.clock = Clock(ClockMode.BACKTEST, 1.0, self.start.timestamp(), self.end.timestamp()) - # self.market = BacktestMarket() - self.market.add_data(self.weth_dai_data) - self.market.set_balance("WETH", 200.0) - self.market.set_balance("DAI", 20000.0) - self.clock.add_iterator(self.market) - - def tearDown(self): - self.weth_dai_data.close() - - def verify_filled_order_recorded(self, recorded_filled_events, composite_order_book): - bid_dict = {entry.price: (entry.amount, entry.update_id) - for entry in composite_order_book.traded_order_book.bid_entries()} - ask_dict = {entry.price: (entry.amount, entry.update_id) - for entry in composite_order_book.traded_order_book.ask_entries()} - for index, fill_event in recorded_filled_events.iterrows(): - if fill_event.trade_type is TradeType.SELL: - self.assertTrue(fill_event.price in bid_dict) - self.assertTrue(bid_dict[fill_event.price][0] == fill_event.amount) - self.assertTrue(bid_dict[fill_event.price][1] == fill_event.timestamp) - elif fill_event.trade_type is TradeType.BUY: - self.assertTrue(fill_event.price in ask_dict) - self.assertTrue(ask_dict[fill_event.price][0] == fill_event.amount) - self.assertTrue(ask_dict[fill_event.price][1] == fill_event.timestamp) - - def verify_composite_order_book_correctness(self, composite_order_book): - filled_bid_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.traded_order_book.bid_entries()} - filled_ask_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.traded_order_book.ask_entries()} - - composite_bid_dict = {o.price: (o.amount, o.update_id) for o in composite_order_book.bid_entries()} - composite_ask_dict = {o.price: (o.amount, o.update_id) for o in composite_order_book.ask_entries()} - - original_bid_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.original_bid_entries()} - original_ask_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.original_ask_entries()} - - for filled_bid_price, filled_bid_amount in filled_bid_dict.items(): - if filled_bid_price in original_bid_dict: - if (original_bid_dict[filled_bid_price] - filled_bid_amount) <= 0: - self.assertTrue(filled_bid_price not in composite_bid_dict) - else: - self.assertTrue(composite_bid_dict[filled_bid_price] == - original_bid_dict[filled_bid_price] - filled_bid_amount) - - for filled_ask_price, filled_ask_amount in filled_ask_dict.items(): - if filled_ask_price in original_ask_dict: - if (original_bid_dict[filled_ask_price] - filled_ask_amount) <= 0: - self.assertTrue(filled_ask_price not in composite_ask_dict) - else: - self.assertTrue(composite_bid_dict[filled_ask_price] == - original_bid_dict[filled_ask_price] - filled_ask_amount) - - def verify_composite_order_book_cleanup(self, recorded_filled_events, composite_order_book): - """ - Recorded fill order should be cleaned up when the original order book no longer contain that price entry - """ - filled_bid_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.traded_order_book.bid_entries()} - filled_ask_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.traded_order_book.ask_entries()} - - original_bid_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.original_bid_entries()} - original_ask_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.original_ask_entries()} - - for index, fill_event in recorded_filled_events.iterrows(): - if fill_event.trade_type is TradeType.SELL: - if fill_event.price not in original_bid_dict: - self.assertTrue(fill_event.price not in filled_bid_dict) - - elif fill_event.trade_type is TradeType.BUY: - if fill_event.price not in original_ask_dict: - self.assertTrue(fill_event.price not in filled_ask_dict) - - def verify_composite_order_book_adjustment(self, composite_order_book): - """ - Recorded fill order sohuld adjust it's amount to no larger than the original price entries' amount - """ - filled_bid_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.traded_order_book.bid_entries()} - filled_ask_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.traded_order_book.ask_entries()} - - original_bid_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.original_bid_entries()} - original_ask_dict = {o.price: (o.amount, o.update_id) - for o in composite_order_book.original_ask_entries()} - - for filled_bid_price, filled_bid_entry in filled_bid_dict.items(): - if filled_bid_price in original_bid_dict: - self.assertTrue(original_bid_dict[filled_bid_price][0] >= filled_bid_entry[0]) - - for filled_ask_price, filled_ask_entry in filled_ask_dict.items(): - if filled_ask_price in original_ask_dict: - self.assertTrue(original_ask_dict[filled_ask_price][0] >= filled_ask_entry[0]) - - # def test_market_order(self): - # trades = { - # pd.Timestamp("2019-01-25 00:00:10+00:00").timestamp(): [ - # ("WETH-DAI", "buy", 5.0), - # ("WETH-DAI", "sell", 5.0) - # ] - # } - # strategy: CompositeOrderBookTestStrategy = CompositeOrderBookTestStrategy(self.market, trades) - # self.clock.add_iterator(strategy) - # self.clock.backtest_til(self.start.timestamp() + 10) - # - # self.verify_filled_order_recorded(strategy.order_filled_events, self.market.get_order_book("WETH-DAI")) - # self.verify_composite_order_book_correctness(self.market.get_order_book("WETH-DAI")) - # - # self.clock.backtest_til(self.start.timestamp() + 70) - # - # self.verify_composite_order_book_cleanup(strategy.order_filled_events, self.market.get_order_book("WETH-DAI")) - # - # def test_composite_order_book_adjustment(self): - # trades = { - # pd.Timestamp("2019-01-25 00:02:15+00:00").timestamp(): [ - # ("WETH-DAI", "sell", 93.53 + 23.65) - # ] - # } - # strategy: CompositeOrderBookTestStrategy = CompositeOrderBookTestStrategy(self.market, trades) - # self.clock.add_iterator(strategy) - # self.clock.backtest_til(self.start.timestamp() + 60 * 2 + 15) - # self.clock.backtest_til(self.start.timestamp() + 60 * 2 + 25) - # self.verify_composite_order_book_adjustment(self.market.get_order_book("WETH-DAI")) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/debug/test_config_process.py b/test/debug/test_config_process.py deleted file mode 100644 index 21d9cb8f39..0000000000 --- a/test/debug/test_config_process.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python -import asyncio -import inspect -import os -import time -import unittest -from os.path import join, realpath -from test.debug.fixture_configs import FixtureConfigs - -from bin.hummingbot import main as hb_main -from hummingbot.client import settings -from hummingbot.client.config.global_config_map import global_config_map -from hummingbot.client.config.security import Security -from hummingbot.client.hummingbot_application import HummingbotApplication -from hummingbot.strategy.pure_market_making.pure_market_making_config_map import pure_market_making_config_map - -import sys; sys.path.insert(0, realpath(join(__file__, "../../"))) -import sys; sys.path.append(realpath(join(__file__, "../../bin"))) - - -async def wait_til(condition_func, timeout=10): - start_time = time.perf_counter() - while True: - if condition_func(): - return - elif time.perf_counter() - start_time > timeout: - raise Exception(f"{inspect.getsource(condition_func).strip()} condition is never met. Time out reached.") - else: - await asyncio.sleep(0.1) - - -async def wait_til_notified(text): - await wait_til(lambda: text in HummingbotApplication.main_application().app.output_field.document.lines[:-1]) - - -def user_response(text): - hb = HummingbotApplication.main_application() - hb.app.set_text(text) - hb.app.accept(None) - hb.app.set_text("") - - -def add_files_extension(folder, file_extensions, additional_extension): - for f in os.listdir(folder): - f_path = os.path.join(folder, f) - if os.path.isfile(f_path): - extension = os.path.splitext(f_path)[1] - if extension in file_extensions: - os.rename(f_path, f_path + f"{additional_extension}") - - -def remove_files_extension(folder, file_extension): - for f in os.listdir(folder): - f_path = os.path.join(folder, f) - if os.path.isfile(f_path): - extension = os.path.splitext(f_path)[1] - if extension == file_extension: - os.rename(f_path, os.path.splitext(f_path)[0]) - - -def remove_files(folder, file_extensions): - for f in os.listdir(folder): - f_path = os.path.join(folder, f) - if os.path.isfile(f_path): - extension = os.path.splitext(f_path)[1] - if extension in file_extensions: - os.remove(f_path) - - -class ConfigProcessTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.ev_loop = asyncio.new_event_loop() - cls.ev_loop.run_until_complete(cls.set_up_class()) - cls.file_no = 0 - - @classmethod - def tearDownClass(cls) -> None: - remove_files(settings.STRATEGIES_CONF_DIR_PATH, [".yml", ".json"]) - remove_files_extension(settings.STRATEGIES_CONF_DIR_PATH, ".temp") - user_response("stop") - cls.ev_loop.run_until_complete(wait_til(lambda: cls.hb.markets_recorder is None)) - - @classmethod - async def set_up_class(cls): - add_files_extension(settings.STRATEGIES_CONF_DIR_PATH, [".yml", ".json"], ".temp") - asyncio.ensure_future(hb_main()) - cls.hb = HummingbotApplication.main_application() - await wait_til(lambda: 'Enter "config" to create a bot' in cls.hb.app.output_field.document.text) - - async def check_prompt_and_input(self, expected_prompt_text, input_text): - self.assertEqual(self.hb.app.prompt_text, expected_prompt_text) - last_output = str(self.hb.app.output_field.document.lines[-1]) - user_response(input_text) - await wait_til(lambda: str(self.hb.app.output_field.document.lines[-1]) != last_output) - await asyncio.sleep(0.1) - - async def _test_pure_mm_basic_til_start(self): - config_file_name = f"{settings.CONF_PREFIX}pure_market_making{settings.CONF_POSTFIX}_{self.file_no}.yml" - await self.check_prompt_and_input(">>> ", "config") - # For the second time this test is called, it's to reconfigure the bot - if self.file_no > 0: - await self.check_prompt_and_input("Would you like to reconfigure the bot? (Yes/No) >>> ", "yes") - ConfigProcessTest.file_no += 1 - fixture_in_mem = FixtureConfigs.in_mem_new_pass_configs if Security.password is None \ - else FixtureConfigs.in_mem_existing_pass_create_configs - for fixture_config in fixture_in_mem: - await self.check_prompt_and_input(fixture_config["prompt"], fixture_config["input"]) - await wait_til(lambda: f'A new config file {config_file_name}' in self.hb.app.output_field.document.text) - # configs that are required will be prompted - for config_name, response in FixtureConfigs.pure_mm_basic_responses.items(): - config = pure_market_making_config_map[config_name] - await self.check_prompt_and_input(config.prompt, response) - # advance_mode will be asked again as the previous response is not valid. - await asyncio.sleep(0.2) - self.assertEqual(self.hb.app.output_field.document.lines[-1], - f"{FixtureConfigs.pure_mm_basic_responses['advanced_mode']} " - f"is not a valid advanced_mode value") - await self.check_prompt_and_input(pure_market_making_config_map["advanced_mode"].prompt, "no") - - # input for cancel_order_wait_time is empty, check the assigned value is its default value - self.assertEqual(pure_market_making_config_map["cancel_order_wait_time"].value, - pure_market_making_config_map["cancel_order_wait_time"].default) - - # Check that configs that are not prompted get assigned correct default value - for name, config in pure_market_making_config_map.items(): - if config.default is not None and name not in FixtureConfigs.pure_mm_basic_responses: - self.assertEqual(config.value, config.default) - - # if not conf_global_file_exists: - for name, config in global_config_map.items(): - if config.required and config.value is None: - await self.check_prompt_and_input(config.prompt, FixtureConfigs.global_binance_config[name]) - - self.assertEqual(pure_market_making_config_map["mode"].value, - pure_market_making_config_map["mode"].default) - await wait_til(lambda: "Config process complete." in self.hb.app.output_field.document.text) - - def test_pure_mm_basic_til_start(self): - self.ev_loop.run_until_complete(self._test_pure_mm_basic_til_start()) - - async def _test_pure_mm_basic_import_config_file(self): - config_file_name = f"{settings.CONF_PREFIX}pure_market_making{settings.CONF_POSTFIX}_0.yml" - # update the config file to put in some blank and invalid values. - with open(os.path.join(settings.STRATEGIES_CONF_DIR_PATH, config_file_name), "r+") as f: - content = f.read() # read everything in the file - f.seek(0) # rewind - content = content.replace("bid_place_threshold: 0.01", "bid_place_threshold: ") - content = content.replace("advanced_mode: false", "advanced_mode: better not") - f.write(content) # write the new line before - await self.check_prompt_and_input(">>> ", "stop") - await self.check_prompt_and_input(">>> ", "config") - await self.check_prompt_and_input("Would you like to reconfigure the bot? (Yes/No) >>> ", "yes") - for fixture_config in FixtureConfigs.in_mem_existing_pass_import_configs: - await self.check_prompt_and_input(fixture_config["prompt"], fixture_config["input"]) - # await self.check_prompt_and_input(default_strategy_conf_path_prompt(), config_file_name) - # advanced_mode should be prompted here as its file value not valid. - await self.check_prompt_and_input(pure_market_making_config_map["bid_place_threshold"].prompt, "0.01") - await self.check_prompt_and_input(pure_market_making_config_map["advanced_mode"].prompt, "no") - await wait_til(lambda: "Config process complete." in self.hb.app.output_field.document.text) - - def test_pure_mm_basic_import_config_file(self): - self.ev_loop.run_until_complete(self._test_pure_mm_basic_import_config_file()) - - async def _test_single_configs(self): - await self.check_prompt_and_input(">>> ", "config bid_place_threshold") - # try inputting invalid value - await self.check_prompt_and_input(pure_market_making_config_map["bid_place_threshold"].prompt, "-0.01") - self.assertEqual(self.hb.app.output_field.document.lines[-1], "-0.01 is not a valid bid_place_threshold value") - await self.check_prompt_and_input(pure_market_making_config_map["bid_place_threshold"].prompt, "0.01") - - def test_single_configs(self): - self.ev_loop.run_until_complete(self._test_single_configs()) - - -def suite(): - suite = unittest.TestSuite() - suite.addTest(ConfigProcessTest('test_pure_mm_basic_til_start')) - suite.addTest(ConfigProcessTest('test_single_configs')) - suite.addTest(ConfigProcessTest('test_pure_mm_basic_import_config_file')) - suite.addTest(ConfigProcessTest('test_pure_mm_basic_til_start')) - return suite - - -if __name__ == '__main__': - runner = unittest.TextTestRunner() - runner.run(suite()) diff --git a/test/debug/test_order_expiration.py b/test/debug/test_order_expiration.py deleted file mode 100644 index 9cb19b69e2..0000000000 --- a/test/debug/test_order_expiration.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -from os.path import join, realpath -import sys; sys.path.insert(0, realpath(join(__file__, "../../../"))) -import pandas as pd -from typing import ( - List, - Dict, - Tuple) -import unittest -from hummingsim.backtest.binance_order_book_loader_v2 import BinanceOrderBookLoaderV2 -from hummingsim.backtest.backtest_market import BacktestMarket -from hummingsim.backtest.market import Market, OrderType -from hummingsim.strategy.unit_test_strategy import UnitTestStrategy -from hummingbot.core.clock import ( - ClockMode, - Clock -) -from hummingbot.core.event.events import ( - MarketEvent, - OrderExpiredEvent, -) -from hummingbot.core.event.event_listener import EventListener - - -class OrderExpirationTestStrategy(UnitTestStrategy): - """ - Makes expiring limit orders and record order expired events - """ - - class OrderFilledEventLogger(EventListener): - def __init__(self, owner: "OrderExpirationTestStrategy"): - self._owner: "OrderExpirationTestStrategy" = owner - - def __call__(self, order_expired_event: OrderExpiredEvent): - self._owner.log_order_expired_event(order_expired_event) - - def __init__(self, market: Market, trades: Dict[str, Tuple[str, float]]): - super().__init__(market) - self.trades = trades - self.tick_size = 5 - self._order_expired_event_timestamps: List[float] = [] - self._order_expired_events: List[OrderExpiredEvent] = [] - self._order_expired_logger: OrderExpirationTestStrategy.OrderFilledEventLogger = self.OrderFilledEventLogger(self) - market.add_listener(MarketEvent.OrderExpired, self._order_expired_logger) - - self.start_printing = False - - def log_order_expired_event(self, evt: OrderExpiredEvent): - self._order_expired_event_timestamps.append(self.current_timestamp) - self._order_expired_events.append(evt) - - def process_tick(self): - if self.current_timestamp in self.trades: - for trade in self.trades[self.current_timestamp]: - if trade[1] == "buy": - self.market.buy(trade[0], trade[2], order_type=OrderType.LIMIT, price=trade[3], kwargs=trade[4]) - elif trade[1] == "sell": - self.market.sell(trade[0], trade[2], order_type=OrderType.LIMIT, price=trade[3], kwargs=trade[4]) - self.start_printing = True - if not self.start_printing: - return - # print(self.order_expired_events) - # print(self.market.limit_orders) - print(self.market.order_expirations) - - @property - def order_expired_events(self) -> pd.DataFrame: - retval: pd.DataFrame = pd.DataFrame(data=self._order_expired_events, - columns=OrderExpiredEvent._fields, - index=pd.Index(self._order_expired_event_timestamps, dtype="float64")) - retval.index = (retval.index * 1e9).astype("int64").astype("datetime64[ns]") - return retval - - -class OrderExpirationTest(unittest.TestCase): - start: pd.Timestamp = pd.Timestamp("2019-01-24", tz="UTC") - end: pd.Timestamp = pd.Timestamp("2019-01-26", tz="UTC") - market_name = "ETHUSDT" - quote = "ETH" - base = "USDT" - - def setUp(self): - # self.weth_dai_data = DDEXOrderBookLoader("WETH-DAI", "WETH", "DAI") - self.pair_data = BinanceOrderBookLoaderV2(self.market_name, "ETH", "USDT") - # self.pair_data = HuobiOrderBookLoader(self.market_name, "", "") - self.clock = Clock(ClockMode.BACKTEST, 1.0, self.start.timestamp(), self.end.timestamp()) - self.market = BacktestMarket() - # self.market.add_data(self.weth_dai_data) - self.market.add_data(self.pair_data) - self.market.set_balance(self.quote, 200.0) - self.market.set_balance(self.base, 20000.0) - self.clock.add_iterator(self.market) - - def tearDown(self): - # self.weth_dai_data.close() - # self.eth_usd_data.close() - self.pair_data.close() - - def verify_expired_order_cleanup(self, order_expired_events, limit_orders): - """ - Recorded order expired event should indicate that these orders are no longer in the limit orders - """ - limit_order_dict = {o.client_order_id: o for o in limit_orders} - - for index, order_expired_event in order_expired_events.iterrows(): - self.assertTrue(order_expired_event.order_id not in limit_order_dict) - - def test_ask_order_expiration_clean_up(self): - ts_1 = pd.Timestamp("2019-01-24 00:02:15+00:00").timestamp() - ts_2 = pd.Timestamp("2019-01-24 00:02:20+00:00").timestamp() - trades = { - ts_1: [ - (self.market_name, "sell", 1302, 255, {"expiration_ts": ts_1 + 9}) - ], - ts_2: [ - (self.market_name, "sell", 1302, 250, {"expiration_ts": ts_2 + 9}) - ] - } - strategy: OrderExpirationTestStrategy = OrderExpirationTestStrategy(self.market, trades) - self.clock.add_iterator(strategy) - - # first limit order made - self.clock.backtest_til(self.start.timestamp() + 60 * 2 + 15) - first_order_id = self.market.limit_orders[0].client_order_id - self.assertTrue(len(self.market.limit_orders) == 1) - self.assertTrue(first_order_id in {o.order_id: o for o in self.market.order_expirations}) - - # second limit order made - self.clock.backtest_til(self.start.timestamp() + 60 * 2 + 20) - self.assertTrue(len(self.market.limit_orders) == 2) - - # first limit order expired - self.clock.backtest_til(self.start.timestamp() + 60 * 2 + 25) - # check if order expired event is fired - self.assertTrue(first_order_id in [evt.order_id for i, evt in strategy.order_expired_events.iterrows()]) - # check if the expired limit order is cleaned up - self.verify_expired_order_cleanup(strategy.order_expired_events, self.market.limit_orders) - - self.assertTrue(len(self.market.limit_orders) == 1) - second_order_id = self.market.limit_orders[0].client_order_id - self.assertTrue(second_order_id in {o.order_id: o for o in self.market.order_expirations}) - - # second limit order expired - self.clock.backtest_til(self.start.timestamp() + 60 * 2 + 30) - # check if order expired event is fired - self.assertTrue(second_order_id in [evt.order_id for i, evt in strategy.order_expired_events.iterrows()]) - # check if the expired limit order is cleaned up - self.verify_expired_order_cleanup(strategy.order_expired_events, self.market.limit_orders) - - def test_bid_order_expiration_clean_up(self): - ts_1 = pd.Timestamp("2019-01-24 00:12:15+00:00").timestamp() - ts_2 = pd.Timestamp("2019-01-24 00:12:20+00:00").timestamp() - - trades = { - ts_1: [ - (self.market_name, "buy", 100, 55, {"expiration_ts": ts_1 + 9}) - - ], - ts_2: [ - (self.market_name, "buy", 100, 50, {"expiration_ts": ts_2 + 9}), - (self.market_name, "buy", 100, 55, {"expiration_ts": ts_2 + 9}) - ] - } - strategy: OrderExpirationTestStrategy = OrderExpirationTestStrategy(self.market, trades) - self.clock.add_iterator(strategy) - - # first limit order made - self.clock.backtest_til(self.start.timestamp() + 60 * 12 + 15) - first_order_id = self.market.limit_orders[0].client_order_id - self.assertTrue(len(self.market.limit_orders) == 1) - self.assertTrue(first_order_id in {o.order_id: o for o in self.market.order_expirations}) - - # second limit order made - self.clock.backtest_til(self.start.timestamp() + 60 * 12 + 20) - self.assertTrue(len(self.market.limit_orders) == 3) - - # first limit order expired - self.clock.backtest_til(self.start.timestamp() + 60 * 12 + 25) - # check if order expired event is fired - self.assertTrue(first_order_id in [evt.order_id for i, evt in strategy.order_expired_events.iterrows()]) - # check if the expired limit order is cleaned up - self.verify_expired_order_cleanup(strategy.order_expired_events, self.market.limit_orders) - - self.assertTrue(len(self.market.limit_orders) == 2) - second_order_id_1 = self.market.limit_orders[0].client_order_id - second_order_id_2 = self.market.limit_orders[1].client_order_id - - self.assertTrue(second_order_id_1 in {o.order_id: o for o in self.market.order_expirations}) - self.assertTrue(second_order_id_2 in {o.order_id: o for o in self.market.order_expirations}) - - # second limit order expired - self.clock.backtest_til(self.start.timestamp() + 60 * 12 + 30) - # check if order expired event is fired - self.assertTrue(second_order_id_1 in [evt.order_id for i, evt in strategy.order_expired_events.iterrows()]) - self.assertTrue(second_order_id_2 in [evt.order_id for i, evt in strategy.order_expired_events.iterrows()]) - # check if the expired limit order is cleaned up - self.verify_expired_order_cleanup(strategy.order_expired_events, self.market.limit_orders) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/debug/test_paper_trade_market.py b/test/debug/test_paper_trade_market.py deleted file mode 100644 index 83c7027eba..0000000000 --- a/test/debug/test_paper_trade_market.py +++ /dev/null @@ -1,506 +0,0 @@ -import asyncio -import contextlib -import logging -import os -import sys -import time -import unittest -from os.path import join, realpath -from typing import Dict, Iterator, List, NamedTuple - -import pandas as pd - -from hummingbot.connector.exchange.binance.binance_api_order_book_data_source import BinanceAPIOrderBookDataSource -from hummingbot.connector.exchange.binance.binance_exchange import BinanceExchange -from hummingbot.connector.exchange.paper_trade.paper_trade_exchange import PaperTradeExchange, QueuedOrder -from hummingbot.connector.exchange.paper_trade.trading_pair import TradingPair -from hummingbot.core.clock import Clock, ClockMode -from hummingbot.core.data_type.common import OrderType, TradeType -from hummingbot.core.data_type.limit_order import LimitOrder -from hummingbot.core.data_type.order_book_row import OrderBookRow -from hummingbot.core.data_type.order_book_tracker import OrderBookTracker -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCompletedEvent, - BuyOrderCreatedEvent, - MarketEvent, - OrderBookTradeEvent, - OrderCancelledEvent, - OrderFilledEvent, - SellOrderCompletedEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.utils.async_utils import safe_ensure_future, safe_gather - -logging.basicConfig(level=logging.INFO) -sys.path.insert(0, realpath(join(__file__, "../../../"))) - - -class TestUtils: - @staticmethod - def filter_events_by_type(event_logs, event_type): - return [e for e in event_logs if type(e) == event_type] - - @classmethod - def get_match_events(cls, event_logs: List[NamedTuple], event_type: NamedTuple, match_dict: Dict[str, any]): - match_events = [] - for e in cls.filter_events_by_type(event_logs, event_type): - match = True - for k, v in match_dict.items(): - try: - event_value = getattr(e, k) - if type(v) in [float]: - if abs(v - float(event_value)) <= 1 * 10 ** (-8): - continue - elif event_value != v: - match = False - break - except Exception as err: - print(f"Key {k} does not exist in event {e}. Error: {err}") - if match: - match_events.append(e) - return match_events - - @classmethod - def get_match_limit_orders(cls, limit_orders: List[LimitOrder], match_dict: Dict[str, any]): - match_orders = [] - for o in limit_orders: - match = True - for k, v in match_dict.items(): - try: - order_value = getattr(o, k) - if type(v) in [float]: - if abs(v - float(order_value)) <= 1 * 10 ** (-8): - continue - elif order_value != v: - match = False - break - except Exception as err: - print(f"Key {k} does not exist in LimitOrder {o}. Error: {err}") - if match: - match_orders.append(o) - return match_orders - - -class OrderBookUtils: - @classmethod - def ob_rows_data_frame(cls, ob_rows): - data = [] - try: - for ob_row in ob_rows: - data.append([ - ob_row.price, ob_row.amount - ]) - df = pd.DataFrame(data=data, columns=[ - "price", "amount"]) - df.index = df.price - return df - except Exception as e: - print(f"Error formatting market stats. {e}") - - @classmethod - def get_compare_df(cls, row_it_1: Iterator[OrderBookRow], row_it_2: Iterator[OrderBookRow], - n_rows: int = 20000, diffs_only: bool = False) -> pd.DataFrame: - rows_1 = list(row_it_1) - rows_2 = list(row_it_2) - book_1: pd.DataFrame = cls.ob_rows_data_frame(rows_1) - book_2: pd.DataFrame = cls.ob_rows_data_frame(rows_2) - book_1.index = book_1.price - book_2.index = book_2.price - compare_df: pd.DataFrame = pd.concat([book_1.iloc[0:n_rows], book_2.iloc[0:n_rows]], - axis="columns", keys=["pre", "post"]) - compare_df = compare_df.fillna(0.0) - compare_df['diff'] = compare_df['pre'].amount - compare_df['post'].amount - if not diffs_only: - return compare_df - else: - return compare_df[(compare_df["pre"]["amount"] - compare_df["post"]["amount"]).abs() > 1e-8] - - -class PaperTradeExchangeTest(unittest.TestCase): - events: List[MarketEvent] = [ - MarketEvent.BuyOrderCompleted, - MarketEvent.SellOrderCompleted, - MarketEvent.OrderFilled, - MarketEvent.TransactionFailure, - MarketEvent.BuyOrderCreated, - MarketEvent.SellOrderCreated, - MarketEvent.OrderCancelled - ] - - market: PaperTradeExchange - market_logger: EventLogger - stack: contextlib.ExitStack - - @classmethod - def setUpClass(cls): - global MAINNET_RPC_URL - - cls.clock: Clock = Clock(ClockMode.REALTIME) - connector = BinanceExchange( - binance_api_key="", - binance_api_secret="", - trading_pairs=["ETH-USDT", "BTC-USDT"], - trading_required=False) - cls.market: PaperTradeExchange = PaperTradeExchange( - order_book_tracker=OrderBookTracker( - data_source=BinanceAPIOrderBookDataSource( - trading_pairs=["ETH-USDT", "BTC-USDT"], - connector=connector, - api_factory=connector._api_factory), - trading_pairs=["ETH-USDT", "BTC-USDT"]), - target_market=BinanceExchange, - exchange_name="binance", - ) - print("Initializing PaperTrade execute orders market... this will take about a minute.") - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() - cls.clock.add_iterator(cls.market) - cls.stack: contextlib.ExitStack = contextlib.ExitStack() - cls._clock = cls.stack.enter_context(cls.clock) - cls.ev_loop.run_until_complete(cls.wait_til_ready()) - print("Ready.") - - @classmethod - def tearDownClass(cls) -> None: - cls.stack.close() - - @classmethod - async def wait_til_ready(cls): - while True: - now = time.time() - next_iteration = now // 1.0 + 1 - if cls.market.ready: - break - else: - await cls._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - - def setUp(self): - self.db_path: str = realpath(join(__file__, "../binance_test.sqlite")) - try: - os.unlink(self.db_path) - except FileNotFoundError: - pass - - self.market_logger = EventLogger() - for event_tag in self.events: - self.market.add_listener(event_tag, self.market_logger) - - for trading_pair, orderbook in self.market.order_books.items(): - orderbook.clear_traded_order_book() - - def tearDown(self): - for event_tag in self.events: - self.market.remove_listener(event_tag, self.market_logger) - self.market_logger = None - - async def run_parallel_async(self, *tasks): - future: asyncio.Future = safe_ensure_future(safe_gather(*tasks)) - while not future.done(): - now = time.time() - next_iteration = now // 1.0 + 1 - await self._clock.run_til(next_iteration) - await asyncio.sleep(1.0) - return future.result() - - def run_parallel(self, *tasks): - return self.ev_loop.run_until_complete(self.run_parallel_async(*tasks)) - - def test_place_market_orders(self): - self.market.sell("ETH-USDT", 30, OrderType.MARKET) - list_queued_orders: List[QueuedOrder] = self.market.queued_orders - first_queued_order: QueuedOrder = list_queued_orders[0] - self.assertFalse(first_queued_order.is_buy, msg="Market order is not sell") - self.assertEqual(first_queued_order.trading_pair, "ETH-USDT", msg="Trading pair is incorrect") - self.assertEqual(first_queued_order.amount, 30, msg="Quantity is incorrect") - self.assertEqual(len(list_queued_orders), 1, msg="First market order did not get added") - - # Figure out why this test is failing - self.market.buy("BTC-USDT", 30, OrderType.MARKET) - list_queued_orders: List[QueuedOrder] = self.market.queued_orders - second_queued_order: QueuedOrder = list_queued_orders[1] - self.assertTrue(second_queued_order.is_buy, msg="Market order is not buy") - self.assertEqual(second_queued_order.trading_pair, "BTC-USDT", msg="Trading pair is incorrect") - self.assertEqual(second_queued_order.amount, 30, msg="Quantity is incorrect") - self.assertEqual(second_queued_order.amount, 30, msg="Quantity is incorrect") - self.assertEqual(len(list_queued_orders), 2, msg="Second market order did not get added") - - def test_market_order_simulation(self): - self.market.set_balance("ETH", 20) - self.market.set_balance("USDT", 100) - self.market.sell("ETH-USDT", 10, OrderType.MARKET) - self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - - # Get diff between composite bid entries and original bid entries - compare_df = OrderBookUtils.get_compare_df( - self.market.order_books['ETH-USDT'].original_bid_entries(), - self.market.order_books['ETH-USDT'].bid_entries(), diffs_only=True).sort_index().round(10) - filled_bids = OrderBookUtils.ob_rows_data_frame( - list(self.market.order_books['ETH-USDT'].traded_order_book.bid_entries())).sort_index().round(10) - - # assert filled orders matches diff - diff_bid = compare_df["diff"] - filled_bids["amount"] - - self.assertFalse(diff_bid.to_numpy().any()) - - self.assertEquals(10, self.market.get_balance("ETH"), msg="Balance was not updated.") - self.market.buy("ETH-USDT", 5, OrderType.MARKET) - self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - - # Get diff between composite bid entries and original bid entries - compare_df = OrderBookUtils.get_compare_df( - self.market.order_books['ETH-USDT'].original_ask_entries(), - self.market.order_books['ETH-USDT'].ask_entries(), diffs_only=True).sort_index().round(10) - filled_asks = OrderBookUtils.ob_rows_data_frame( - list(self.market.order_books['ETH-USDT'].traded_order_book.ask_entries())).sort_index().round(10) - - # assert filled orders matches diff - diff_ask = compare_df["diff"] - filled_asks["amount"] - - self.assertFalse(diff_ask.to_numpy().any()) - self.assertEquals(15, self.market.get_balance("ETH"), msg="Balance was not updated.") - - def test_limit_order_crossed(self): - starting_base_balance = 20 - starting_quote_balance = 1000 - self.market.set_balance("ETH", starting_base_balance) - self.market.set_balance("USDT", starting_quote_balance) - self.market.sell("ETH-USDT", 10, OrderType.LIMIT, 100) - self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - self.assertEquals(starting_base_balance - 10, self.market.get_balance("ETH"), - msg="ETH Balance was not updated.") - self.assertEquals(starting_quote_balance + 1000, self.market.get_balance("USDT"), - msg="USDT Balance was not updated.") - self.market.buy("ETH-USDT", 1, OrderType.LIMIT, 500) - self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - self.assertEquals(11, self.market.get_balance("ETH"), - msg="ETH Balance was not updated.") - self.assertEquals(1500, self.market.get_balance("USDT"), - msg="USDT Balance was not updated.") - - def test_bid_limit_order_trade_match(self): - """ - Test bid limit order fill and balance simulation, and market events emission - """ - trading_pair = TradingPair("ETH-USDT", "ETH", "USDT") - base_quantity = 2.0 - starting_base_balance = 200 - starting_quote_balance = 2000 - self.market.set_balance(trading_pair.base_asset, starting_base_balance) - self.market.set_balance(trading_pair.quote_asset, starting_quote_balance) - - best_bid_price = self.market.order_books[trading_pair.trading_pair].get_price(True) - client_order_id = self.market.buy(trading_pair.trading_pair, base_quantity, OrderType.LIMIT, best_bid_price) - - matched_limit_orders = TestUtils.get_match_limit_orders(self.market.limit_orders, { - "client_order_id": client_order_id, - "trading_pair": trading_pair.trading_pair, - "is_buy": True, - "base_currency": trading_pair.base_asset, - "quote_currency": trading_pair.quote_asset, - "price": best_bid_price, - "quantity": base_quantity - }) - # Market should track limit orders - self.assertEqual(1, len(matched_limit_orders)) - - # Market should on hold balance for the created order - self.assertAlmostEqual(float(self.market.on_hold_balances[trading_pair.quote_asset]), - base_quantity * best_bid_price) - # Market should reflect on hold balance in available balance - self.assertAlmostEqual(float(self.market.get_available_balance(trading_pair.quote_asset)), - starting_quote_balance - base_quantity * best_bid_price) - - matched_order_create_events = TestUtils.get_match_events(self.market_logger.event_log, BuyOrderCreatedEvent, { - "type": OrderType.LIMIT, - "amount": base_quantity, - "price": best_bid_price, - "order_id": client_order_id - }) - # Market should emit BuyOrderCreatedEvent - self.assertEqual(1, len(matched_order_create_events)) - - async def delay_trigger_event1(): - await asyncio.sleep(1) - trade_event1 = OrderBookTradeEvent( - trading_pair="ETH-USDT", timestamp=time.time(), type=TradeType.SELL, price=best_bid_price + 1, - amount=1.0) - self.market.order_books['ETH-USDT'].apply_trade(trade_event1) - - safe_ensure_future(delay_trigger_event1()) - self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent)) - - placed_bid_orders: List[LimitOrder] = [o for o in self.market.limit_orders if o.is_buy] - # Market should delete limit order when it is filled - self.assertEqual(0, len(placed_bid_orders)) - - matched_order_complete_events = TestUtils.get_match_events( - self.market_logger.event_log, BuyOrderCompletedEvent, { - "order_type": OrderType.LIMIT, - "quote_asset_amount": base_quantity * best_bid_price, - "order_id": client_order_id - }) - # Market should emit BuyOrderCompletedEvent - self.assertEqual(1, len(matched_order_complete_events)) - - matched_order_fill_events = TestUtils.get_match_events( - self.market_logger.event_log, OrderFilledEvent, { - "order_type": OrderType.LIMIT, - "trade_type": TradeType.BUY, - "trading_pair": trading_pair.trading_pair, - "order_id": client_order_id - }) - # Market should emit OrderFilledEvent - self.assertEqual(1, len(matched_order_fill_events)) - - # Market should have no more on hold balance - self.assertAlmostEqual(float(self.market.on_hold_balances[trading_pair.quote_asset]), 0) - # Market should update balance for the filled order - self.assertAlmostEqual(float(self.market.get_available_balance(trading_pair.quote_asset)), - starting_quote_balance - base_quantity * best_bid_price) - - def test_ask_limit_order_trade_match(self): - """ - Test ask limit order fill and balance simulation, and market events emission - """ - trading_pair = TradingPair("ETH-USDT", "ETH", "USDT") - base_quantity = 2.0 - starting_base_balance = 200 - starting_quote_balance = 2000 - self.market.set_balance(trading_pair.base_asset, starting_base_balance) - self.market.set_balance(trading_pair.quote_asset, starting_quote_balance) - - best_ask_price = self.market.order_books[trading_pair.trading_pair].get_price(False) - client_order_id = self.market.sell(trading_pair.trading_pair, base_quantity, OrderType.LIMIT, best_ask_price) - - matched_limit_orders = TestUtils.get_match_limit_orders(self.market.limit_orders, { - "client_order_id": client_order_id, - "trading_pair": trading_pair.trading_pair, - "is_buy": False, - "base_currency": trading_pair.base_asset, - "quote_currency": trading_pair.quote_asset, - "price": best_ask_price, - "quantity": base_quantity - }) - # Market should track limit orders - self.assertEqual(1, len(matched_limit_orders)) - - # Market should on hold balance for the created order - self.assertAlmostEqual(float(self.market.on_hold_balances[trading_pair.base_asset]), base_quantity) - # Market should reflect on hold balance in available balance - self.assertAlmostEqual(self.market.get_available_balance(trading_pair.base_asset), - starting_base_balance - base_quantity) - - self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent, timeout_seconds=10)) - matched_order_create_events = TestUtils.get_match_events(self.market_logger.event_log, SellOrderCreatedEvent, { - "type": OrderType.LIMIT, - "amount": base_quantity, - "price": best_ask_price, - "order_id": client_order_id - }) - # Market should emit BuyOrderCreatedEvent - self.assertEqual(1, len(matched_order_create_events)) - - async def delay_trigger_event2(): - await asyncio.sleep(1) - trade_event = OrderBookTradeEvent( - trading_pair=trading_pair.trading_pair, timestamp=time.time(), type=TradeType.BUY, - price=best_ask_price - 1, amount=base_quantity) - self.market.order_books[trading_pair.trading_pair].apply_trade(trade_event) - - safe_ensure_future(delay_trigger_event2()) - - self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent)) - - placed_ask_orders: List[LimitOrder] = [o for o in self.market.limit_orders if not o.is_buy] - - # Market should delete limit order when it is filled - self.assertEqual(0, len(placed_ask_orders)) - - matched_order_complete_events = TestUtils.get_match_events( - self.market_logger.event_log, SellOrderCompletedEvent, { - "order_type": OrderType.LIMIT, - "quote_asset_amount": base_quantity * base_quantity, - "order_id": client_order_id - }) - # Market should emit BuyOrderCompletedEvent - self.assertEqual(1, len(matched_order_complete_events)) - - matched_order_fill_events = TestUtils.get_match_events( - self.market_logger.event_log, OrderFilledEvent, { - "order_type": OrderType.LIMIT, - "trade_type": TradeType.SELL, - "trading_pair": trading_pair.trading_pair, - "order_id": client_order_id - }) - # Market should emit OrderFilledEvent - self.assertEqual(1, len(matched_order_fill_events)) - - # Market should have no more on hold balance - self.assertAlmostEqual(float(self.market.on_hold_balances[trading_pair.base_asset]), 0) - # Market should update balance for the filled order - self.assertAlmostEqual(self.market.get_available_balance(trading_pair.base_asset), - starting_base_balance - base_quantity) - - def test_order_cancellation(self): - trading_pair = TradingPair("ETH-USDT", "ETH", "USDT") - base_quantity = 2.0 - starting_base_balance = 200 - starting_quote_balance = 2000 - self.market.set_balance(trading_pair.base_asset, starting_base_balance) - self.market.set_balance(trading_pair.quote_asset, starting_quote_balance) - best_ask_price = self.market.order_books[trading_pair.trading_pair].get_price(False) - ask_client_order_id = self.market.sell(trading_pair.trading_pair, base_quantity, - OrderType.LIMIT, best_ask_price) - best_bid_price = self.market.order_books[trading_pair.trading_pair].get_price(True) - self.market.buy(trading_pair.trading_pair, base_quantity, OrderType.LIMIT, best_bid_price) - - # Market should track limit orders - self.assertEqual(2, len(self.market.limit_orders)) - self.market.cancel(trading_pair.trading_pair, ask_client_order_id) - - matched_limit_orders = TestUtils.get_match_limit_orders(self.market.limit_orders, { - "client_order_id": ask_client_order_id, - "trading_pair": trading_pair.trading_pair, - "is_buy": False, - "base_currency": trading_pair.base_asset, - "quote_currency": trading_pair.quote_asset, - "price": best_ask_price, - "quantity": base_quantity - }) - - # Market should remove canceled orders - self.assertEqual(0, len(matched_limit_orders)) - - matched_order_cancel_events = TestUtils.get_match_events( - self.market_logger.event_log, OrderCancelledEvent, { - "order_id": ask_client_order_id - }) - # Market should emit cancel event - self.assertEqual(1, len(matched_order_cancel_events)) - - def test_order_cancel_all(self): - trading_pair = TradingPair("ETH-USDT", "ETH", "USDT") - base_quantity = 2.0 - starting_base_balance = 200 - starting_quote_balance = 2000 - self.market.set_balance(trading_pair.base_asset, starting_base_balance) - self.market.set_balance(trading_pair.quote_asset, starting_quote_balance) - best_ask_price = self.market.order_books[trading_pair.trading_pair].get_price(False) - self.market.sell(trading_pair.trading_pair, base_quantity, - OrderType.LIMIT, best_ask_price) - best_bid_price = self.market.order_books[trading_pair.trading_pair].get_price(True) - self.market.buy(trading_pair.trading_pair, base_quantity, OrderType.LIMIT, best_bid_price) - - # Market should track limit orders - self.assertEqual(2, len(self.market.limit_orders)) - - asyncio.get_event_loop().run_until_complete(self.market.cancel_all(0)) - - # Market should remove all canceled orders - self.assertEqual(0, len(self.market.limit_orders)) - - matched_order_cancel_events = TestUtils.get_match_events( - self.market_logger.event_log, OrderCancelledEvent, {}) - # Market should emit cancel event - self.assertEqual(2, len(matched_order_cancel_events)) diff --git a/test/hummingbot/client/command/test_config_command.py b/test/hummingbot/client/command/test_config_command.py index 5272519517..f80304e310 100644 --- a/test/hummingbot/client/command/test_config_command.py +++ b/test/hummingbot/client/command/test_config_command.py @@ -82,7 +82,6 @@ def test_list_configs(self, notify_mock, get_strategy_config_map_mock): " | ∟ mqtt_external_events | True |\n" " | ∟ mqtt_autostart | False |\n" " | send_error_logs | True |\n" - " | pmm_script_mode | pmm_script_disabled |\n" " | gateway | |\n" " | ∟ gateway_api_host | localhost |\n" " | ∟ gateway_api_port | 15888 |\n" diff --git a/test/hummingbot/connector/derivative/bit_com_perpetual/test_bit_com_perpetual_user_stream_data_source.py b/test/hummingbot/connector/derivative/bit_com_perpetual/test_bit_com_perpetual_user_stream_data_source.py index 15413b0b13..c40bb3bb09 100644 --- a/test/hummingbot/connector/derivative/bit_com_perpetual/test_bit_com_perpetual_user_stream_data_source.py +++ b/test/hummingbot/connector/derivative/bit_com_perpetual/test_bit_com_perpetual_user_stream_data_source.py @@ -375,11 +375,12 @@ def test_listen_for_user_stream_connection_failed(self, sleep_mock, mock_ws): self._is_logged("ERROR", "Unexpected error while listening to user stream. Retrying after 5 seconds...")) - # @unittest.skip("Test with error") + @patch.object(BitComPerpetualUserStreamDataSource, "get_token") @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) @patch("hummingbot.core.data_type.user_stream_tracker_data_source.UserStreamTrackerDataSource._sleep") - def test_listen_for_user_stream_iter_message_throws_exception(self, sleep_mock, mock_ws): + def test_listen_for_user_stream_iter_message_throws_exception(self, sleep_mock, mock_ws, get_token_mock): msg_queue: asyncio.Queue = asyncio.Queue() + get_token_mock.return_value = "be4ffcc9-2b2b-4c3e-9d47-68bf062cf651" mock_ws.return_value = self.mocking_assistant.create_websocket_mock() mock_ws.return_value.receive.side_effect = Exception("TEST ERROR") sleep_mock.side_effect = asyncio.CancelledError # to finish the task execution diff --git a/test/hummingbot/connector/derivative/bitget_perpetual/test_bitget_perpetual_derivative.py b/test/hummingbot/connector/derivative/bitget_perpetual/test_bitget_perpetual_derivative.py index 7fbaa7132e..310bd09238 100644 --- a/test/hummingbot/connector/derivative/bitget_perpetual/test_bitget_perpetual_derivative.py +++ b/test/hummingbot/connector/derivative/bitget_perpetual/test_bitget_perpetual_derivative.py @@ -619,7 +619,7 @@ def configure_erroneous_http_fill_trade_response( callback: Optional[Callable] = lambda *args, **kwargs: None, ) -> str: url = web_utils.get_rest_url_for_endpoint( - endpoint=CONSTANTS.QUERY_ACTIVE_ORDER_PATH_URL + endpoint=CONSTANTS.USER_TRADE_RECORDS_PATH_URL ) regex_url = re.compile(url + r"\?.*") mock_api.get(regex_url, status=400, callback=callback) diff --git a/test/connector/exchange/bitfinex/__init__.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/__init__.py similarity index 100% rename from test/connector/exchange/bitfinex/__init__.py rename to test/hummingbot/connector/derivative/dydx_v4_perpetual/__init__.py diff --git a/test/connector/exchange/coinbase_pro/__init__.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/__init__.py similarity index 100% rename from test/connector/exchange/coinbase_pro/__init__.py rename to test/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/__init__.py diff --git a/test/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/test_dydx_v4_data_source.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/test_dydx_v4_data_source.py new file mode 100644 index 0000000000..9d87262a39 --- /dev/null +++ b/test/hummingbot/connector/derivative/dydx_v4_perpetual/data_sources/test_dydx_v4_data_source.py @@ -0,0 +1,115 @@ +import asyncio +import time +from decimal import Decimal +from typing import Awaitable +from unittest import TestCase +from unittest.mock import patch + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.derivative.dydx_v4_perpetual import dydx_v4_perpetual_constants as CONSTANTS +from hummingbot.connector.derivative.dydx_v4_perpetual.data_sources.dydx_v4_data_source import DydxPerpetualV4Client +from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_derivative import DydxV4PerpetualDerivative + + +class DydxPerpetualV4ClientTests(TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @patch("hummingbot.core.utils.trading_pair_fetcher.TradingPairFetcher.fetch_all") + def setUp(self, _) -> None: + super().setUp() + self._original_async_loop = asyncio.get_event_loop() + self.async_loop = asyncio.new_event_loop() + self.async_tasks = [] + asyncio.set_event_loop(self.async_loop) + + self.secret_phrase = "mirror actor skill push coach wait confirm orchard " \ + "lunch mobile athlete gossip awake miracle matter " \ + "bus reopen team ladder lazy list timber render wait" + self._dydx_v4_chain_address = "dydx14zzueazeh0hj67cghhf9jypslcf9sh2n5k6art" + self.base_asset = "TRX" + self.quote_asset = "USD" # linear + self.trading_pair = "TRX-USD" + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.exchange = DydxV4PerpetualDerivative( + client_config_map, + self.secret_phrase, + self._dydx_v4_chain_address, + trading_pairs=[self.trading_pair], + ) + self.exchange._margin_fractions[self.trading_pair] = { + "initial": Decimal(0.1), + "maintenance": Decimal(0.05), + "clob_pair_id": "15", + "atomicResolution": -4, + "stepBaseQuantums": 1000000, + "quantumConversionExponent": -9, + "subticksPerTick": 1000000, + } + self.v4_client = DydxPerpetualV4Client( + self.secret_phrase, + self._dydx_v4_chain_address, + self.exchange + ) + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.async_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def create_task(self, coroutine: Awaitable) -> asyncio.Task: + task = self.async_loop.create_task(coroutine) + self.async_tasks.append(task) + return task + + @property + def _order_cancelation_request_successful_mock_response(self): + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", # noqa: mock + "raw_log": "[]"} # noqa: mock + + @property + def order_creation_request_successful_mock_response(self): + return {"txhash": "017C130E3602A48E5C9D661CAC657BF1B79262D4B71D5C25B1DA62DE2338DA0E", # noqa: mock + "raw_log": "[]"} # noqa: mock + + def test_calculate_quantums(self): + result = DydxPerpetualV4Client.calculate_quantums(10, -2, 10) + self.assertEqual(result, 1000) + + def test_calculate_subticks(self): + result = DydxPerpetualV4Client.calculate_subticks(10, -2, -9, 1000000) + self.assertEqual(result, 100000000000000) + + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.data_sources.dydx_v4_data_source.DydxPerpetualV4Client.send_tx_sync_mode") + def test_cancel_order(self, send_tx_sync_mode_mock): + send_tx_sync_mode_mock.return_value = self._order_cancelation_request_successful_mock_response + result = self.async_run_with_timeout(self.v4_client.cancel_order( + client_id=11, + clob_pair_id=15, + order_flags=CONSTANTS.ORDER_FLAGS_LONG_TERM, + good_til_block_time=int(time.time()) + CONSTANTS.ORDER_EXPIRATION + )) + + self.assertIn("txhash", result) + + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.data_sources.dydx_v4_data_source.DydxPerpetualV4Client.send_tx_sync_mode") + def test_place_order(self, send_tx_sync_mode_mock): + send_tx_sync_mode_mock.return_value = self.order_creation_request_successful_mock_response + result = self.async_run_with_timeout(self.v4_client.place_order( + market=self.trading_pair, + type="LIMIT", + side="BUY", + price=10, + size=1, + client_id=11, + post_only=False, + )) + + self.assertIn("txhash", result) + + def test_query_account(self): + sequence, acccount_number = self.async_run_with_timeout(self.v4_client.query_account()) + self.assertEqual(acccount_number, 33356) diff --git a/test/hummingbot/connector/derivative/dydx_v4_perpetual/programmable_v4_client.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/programmable_v4_client.py new file mode 100644 index 0000000000..a12b6ff66d --- /dev/null +++ b/test/hummingbot/connector/derivative/dydx_v4_perpetual/programmable_v4_client.py @@ -0,0 +1,15 @@ +import asyncio + + +class ProgrammableV4Client(): + def __init__(self): + self._cancel_order_responses = asyncio.Queue() + self._place_order_responses = asyncio.Queue() + + async def cancel_order(self, *args, **kwargs): + response = await self._cancel_order_responses.get() + return response + + async def place_order(self, *args, **kwargs): + response = await self._place_order_responses.get() + return response diff --git a/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_api_order_book_data_source.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_api_order_book_data_source.py new file mode 100644 index 0000000000..9988b9e1e4 --- /dev/null +++ b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_api_order_book_data_source.py @@ -0,0 +1,559 @@ +import asyncio +import re +import unittest +from typing import Awaitable, Optional +from unittest.mock import AsyncMock, MagicMock, patch + +import dateutil.parser as dp +import ujson +from aioresponses import aioresponses +from bidict import bidict + +import hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_constants as CONSTANTS +import hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_web_utils as web_utils +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source import ( + DydxV4PerpetualAPIOrderBookDataSource, +) +from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_derivative import DydxV4PerpetualDerivative +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.core.data_type.order_book import OrderBook +from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType + + +class DydxV4PerpetualAPIOrderBookDataSourceUnitTests(unittest.TestCase): + # logging.Level required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + + cls.base_asset = "COINALPHA" + cls.quote_asset = "HBOT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + + def setUp(self) -> None: + super().setUp() + + self.log_records = [] + self.async_task: Optional[asyncio.Task] = None + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = DydxV4PerpetualDerivative( + client_config_map, + dydx_v4_perpetual_secret_phrase="mirror actor skill push coach wait confirm orchard " + "lunch mobile athlete gossip awake miracle matter " + "bus reopen team ladder lazy list timber render wait", + dydx_v4_perpetual_chain_address="dydx14zzueazeh0hj67cghhf9jypslcf9sh2n5k6art", + trading_pairs=[self.trading_pair], + trading_required=False, + ) + self.connector._set_trading_pair_symbol_map( + bidict({f"{self.base_asset}-{self.quote_asset}": self.trading_pair}) + ) + self.data_source = DydxV4PerpetualAPIOrderBookDataSource( + trading_pairs=[self.trading_pair], + connector=self.connector, + api_factory=self.connector._web_assistants_factory, + ) + + self._original_full_order_book_reset_time = self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS + self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = -1 + + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + + self.mocking_assistant = NetworkMockingAssistant() + self.resume_test_event = asyncio.Event() + + def tearDown(self) -> None: + self.async_task and self.async_task.cancel() + self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = self._original_full_order_book_reset_time + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records) + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + @aioresponses() + def test_get_last_trade_prices(self, mock_api): + url = web_utils.public_rest_url(CONSTANTS.PATH_MARKETS) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "markets": { + self.trading_pair: { + "market": self.trading_pair, + "status": "ONLINE", + "baseAsset": self.base_asset, + "quoteAsset": self.quote_asset, + "stepSize": "0.1", + "tickSize": "0.01", + "indexPrice": "12", + "oraclePrice": "101", + "priceChange24H": "0", + "nextFundingRate": "0.0000125000", + "nextFundingAt": "2022-07-06T12:20:53.000Z", + "minOrderSize": "1", + "type": "PERPETUAL", + "initialMarginFraction": "0.10", + "maintenanceMarginFraction": "0.05", + "baselinePositionSize": "1000", + "incrementalPositionSize": "1000", + "incrementalInitialMarginFraction": "0.2", + "volume24H": "0", + "trades24H": "0", + "openInterest": "0", + "maxPositionSize": "10000", + "assetResolution": "10000000", + "syntheticAssetId": "0x4c494e4b2d37000000000000000000", + } + } + } + + mock_api.get(regex_url, body=ujson.dumps(mock_response)) + + result = self.async_run_with_timeout(self.data_source.get_last_traded_prices([self.trading_pair])) + + self.assertEqual(1, len(result)) + self.assertEqual(float("101"), result[self.trading_pair]) + + @aioresponses() + def test_get_snapshot_raise_io_error(self, mock_api): + url = web_utils.public_rest_url(CONSTANTS.PATH_SNAPSHOT + "/" + self.trading_pair) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, status=400, body=ujson.dumps({})) + + with self.assertRaisesRegex( + IOError, + f"Error executing request GET {url}. " f"HTTP status is 400. Error: {{}}", + ): + self.async_run_with_timeout(self.data_source._order_book_snapshot(self.trading_pair)) + + @aioresponses() + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source." + "DydxV4PerpetualAPIOrderBookDataSource._time" + ) + def test_get_snapshot_successful(self, mock_api, mock_time): + mock_time.return_value = 1640780000 + + url = web_utils.public_rest_url(CONSTANTS.PATH_SNAPSHOT + "/" + self.trading_pair) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "asks": [{"size": "2.0", "price": "20.0"}], + "bids": [{"size": "1.0", "price": "10.0"}], + } + mock_api.get(regex_url, body=ujson.dumps(mock_response)) + + result = self.async_run_with_timeout(self.data_source._order_book_snapshot(self.trading_pair)) + + self.assertEqual(mock_response["asks"][0]["size"], str(result.asks[0].amount)) + self.assertEqual(mock_response["asks"][0]["price"], str(result.asks[0].price)) + self.assertEqual(mock_response["bids"][0]["size"], str(result.bids[0].amount)) + self.assertEqual(mock_response["bids"][0]["price"], str(result.bids[0].price)) + + self.assertEqual(result.content["update_id"], 1640780000000000) + + self.assertEqual(self.trading_pair, result.trading_pair) + + @aioresponses() + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source" + ".DydxV4PerpetualAPIOrderBookDataSource._time" + ) + def test_get_snapshot_raises_error(self, mock_api, mock_time): + mock_time.return_value = 1640780000 + + url = web_utils.public_rest_url(CONSTANTS.PATH_SNAPSHOT + "/" + self.trading_pair) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, status=400) + + with self.assertRaisesRegex(IOError, f"Error executing request GET {url}. HTTP status is 400. "): + self.async_run_with_timeout(self.data_source._order_book_snapshot(self.trading_pair)) + + @aioresponses() + def test_get_new_order_book(self, mock_api): + url = web_utils.public_rest_url(CONSTANTS.PATH_SNAPSHOT + "/" + self.trading_pair) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "asks": [{"size": "2.0", "price": "20.0"}], + "bids": [{"size": "1.0", "price": "10.0"}], + } + mock_api.get(regex_url, body=ujson.dumps(mock_response)) + + result = self.async_run_with_timeout(self.data_source.get_new_order_book(self.trading_pair)) + self.assertIsInstance(result, OrderBook) + self.assertEqual(1, len(list(result.bid_entries()))) + self.assertEqual(1, len(list(result.ask_entries()))) + self.assertEqual(float(mock_response["bids"][0]["price"]), list(result.bid_entries())[0].price) + self.assertEqual(float(mock_response["bids"][0]["size"]), list(result.bid_entries())[0].amount) + self.assertEqual(float(mock_response["asks"][0]["price"]), list(result.ask_entries())[0].price) + self.assertEqual(float(mock_response["asks"][0]["size"]), list(result.ask_entries())[0].amount) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source." + "DydxV4PerpetualAPIOrderBookDataSource._sleep" + ) + def test_listen_for_subscriptions_raises_cancelled_exception(self, _, ws_connect_mock): + ws_connect_mock.side_effect = asyncio.CancelledError + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout(self.data_source.listen_for_subscriptions()) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source." + "DydxV4PerpetualAPIOrderBookDataSource._sleep" + ) + def test_listen_for_subscriptions_raises_logs_exception(self, mock_sleep, ws_connect_mock): + mock_sleep.side_effect = lambda: (self.ev_loop.run_until_complete(asyncio.sleep(0.5))) + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + ws_connect_mock.return_value.receive.side_effect = lambda *_: self._create_exception_and_unlock_test_with_event( + Exception("TEST ERROR") + ) + self.async_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions()) + + self.async_run_with_timeout(self.resume_test_event.wait(), 1.0) + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error occurred when listening to order book streams. Retrying in 5 seconds...", + ) + ) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source." + "DydxV4PerpetualAPIOrderBookDataSource._sleep" + ) + def test_listen_for_subscriptions_successful(self, mock_sleep, ws_connect_mock): + mock_sleep.side_effect = lambda: (self.ev_loop.run_until_complete(asyncio.sleep(0.5))) + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + mock_response = { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "connection_id": "d600a0d2-8039-4cd9-a010-2d6f5c336473", + "message_id": 2, + "id": self.trading_pair, + "channel": CONSTANTS.WS_CHANNEL_ORDERBOOK, + "contents": {"offset": "3218381978", "bids": [], "asks": [["36.152", "304.8"]]}, + } + + self.mocking_assistant.add_websocket_aiohttp_message( + ws_connect_mock.return_value, message=ujson.dumps(mock_response) + ) + + self.async_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + self.assertEqual(1, self.data_source._message_queue[self.data_source._diff_messages_queue_key].qsize()) + + message = self.data_source._message_queue[self.data_source._diff_messages_queue_key]._queue[0] + self.assertEqual(message, mock_response) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_subscribe_channels_successful(self, ws_connect_mock): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + ws = self.async_run_with_timeout(self.data_source._connected_websocket_assistant()) + self.async_run_with_timeout(self.data_source._subscribe_channels(ws)) + + sent_messages = self.mocking_assistant.json_messages_sent_through_websocket(ws_connect_mock.return_value) + + self.assertEqual(len(sent_messages), 3) + + self.assertEqual(sent_messages[0]["type"], CONSTANTS.WS_TYPE_SUBSCRIBE) + self.assertEqual(sent_messages[0]["channel"], CONSTANTS.WS_CHANNEL_ORDERBOOK) + self.assertEqual(sent_messages[0]["id"], self.trading_pair) + + self.assertEqual(sent_messages[1]["type"], CONSTANTS.WS_TYPE_SUBSCRIBE) + self.assertEqual(sent_messages[1]["channel"], CONSTANTS.WS_CHANNEL_TRADES) + self.assertEqual(sent_messages[1]["id"], self.trading_pair) + + self.assertEqual(sent_messages[2]["type"], CONSTANTS.WS_TYPE_SUBSCRIBE) + self.assertEqual(sent_messages[2]["channel"], CONSTANTS.WS_CHANNEL_MARKETS) + self.assertEqual(sent_messages[2]["id"], self.trading_pair) + + self.assertTrue(self._is_logged("INFO", "Subscribed to public orderbook and trade channels...")) + + def test_subscribe_channels_canceled(self): + ws = MagicMock() + ws.send.side_effect = asyncio.CancelledError() + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout(self.data_source._subscribe_channels(ws)) + + def test_subscribe_channels_error(self): + ws = MagicMock() + ws.send.side_effect = Exception() + + with self.assertRaises(Exception): + self.async_run_with_timeout(self.data_source._subscribe_channels(ws)) + + self.assertTrue( + self._is_logged("ERROR", "Unexpected error occurred subscribing to order book trading and delta streams...") + ) + + def test_listen_for_trades_logs_exception(self): + incomplete_resp = { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "id": self.trading_pair, + "connection_id": "e2a6c717-6f77-4c1c-ac22-72ce2b7ed77d", + "channel": CONSTANTS.WS_CHANNEL_TRADES, + "message_id": 2, + "contents": { + "trades": [ + { + "side": "BUY", + "size": "100", + }, + {"side": "SELL", "size": "100", "price": "4000", "createdAt": "2020-11-29T14:00:03.382Z"}, + ] + }, + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] + self.data_source._message_queue[self.data_source._trade_messages_queue_key] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_trades(self.ev_loop, msg_queue)) + + try: + self.async_run_with_timeout(self.listening_task) + except asyncio.CancelledError: + pass + + self.assertTrue(self._is_logged("ERROR", "Unexpected error when processing public trade updates from exchange")) + + def test_listen_for_trades_successful(self): + mock_queue = AsyncMock() + trade_event = { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "id": self.trading_pair, + "connection_id": "e2a6c717-6f77-4c1c-ac22-72ce2b7ed77d", + "channel": CONSTANTS.WS_CHANNEL_TRADES, + "message_id": 2, + "contents": { + "trades": [ + {"side": "BUY", "size": "100", "price": "4000", "createdAt": "2020-11-29T00:26:30.759Z"}, + {"side": "SELL", "size": "100", "price": "4000", "createdAt": "2020-11-29T14:00:03.382Z"}, + ] + }, + } + mock_queue.get.side_effect = [trade_event, asyncio.CancelledError()] + self.data_source._message_queue[self.data_source._trade_messages_queue_key] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_trades(self.ev_loop, msg_queue)) + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + timestamp = dp.parse(trade_event["contents"]["trades"][0]["createdAt"]).timestamp() + trade_id = timestamp * 1e3 + + self.assertEqual(OrderBookMessageType.TRADE, msg.type) + self.assertEqual(trade_id, msg.trade_id) + self.assertEqual(timestamp, msg.timestamp) + + def test_listen_for_order_book_diffs_logs_exception(self): + incomplete_resp = { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "id": self.trading_pair, + "connection_id": "e2a6c717-6f77-4c1c-ac22-72ce2b7ed77d", + "channel": CONSTANTS.WS_CHANNEL_ORDERBOOK, + "message_id": 2, + "contents": {"offset": "178", "bids": [["102"]], "asks": [["104", "0"]]}, + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] + self.data_source._message_queue[self.data_source._diff_messages_queue_key] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_diffs(self.ev_loop, msg_queue) + ) + + try: + self.async_run_with_timeout(self.listening_task) + except asyncio.CancelledError: + pass + + self.assertTrue( + self._is_logged("ERROR", "Unexpected error when processing public order book updates from exchange") + ) + + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source." + "DydxV4PerpetualAPIOrderBookDataSource._time" + ) + def test_listen_for_order_book_diffs_successful(self, mock_time): + mock_time.return_value = 1640780000 + + mock_queue = AsyncMock() + diff_event = { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "id": self.trading_pair, + "connection_id": "e2a6c717-6f77-4c1c-ac22-72ce2b7ed77d", + "channel": CONSTANTS.WS_CHANNEL_ORDERBOOK, + "message_id": 2, + "contents": {"offset": "178", "bids": [["102", "11"]], "asks": [["104", "0"]]}, + } + mock_queue.get.side_effect = [diff_event, asyncio.CancelledError()] + self.data_source._message_queue[self.data_source._diff_messages_queue_key] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_diffs(self.ev_loop, msg_queue) + ) + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(OrderBookMessageType.DIFF, msg.type) + self.assertEqual(-1, msg.trade_id) + self.assertEqual(1640780000, msg.timestamp) + # Decreased by 1 because previous nonce is already taked by the execution + expected_update_id = self.data_source._nonce_provider.get_tracking_nonce(timestamp=1640780000) - 1 + self.assertEqual(expected_update_id, msg.update_id) + + bids = msg.bids + asks = msg.asks + self.assertEqual(1, len(bids)) + self.assertEqual(102, bids[0].price) + self.assertEqual(11, bids[0].amount) + self.assertEqual(expected_update_id, bids[0].update_id) + self.assertEqual(1, len(asks)) + self.assertEqual(104, asks[0].price) + self.assertEqual(0, asks[0].amount) + self.assertEqual(expected_update_id, asks[0].update_id) + + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source" + ".DydxV4PerpetualAPIOrderBookDataSource._sleep" + ) + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source" + ".DydxV4PerpetualAPIOrderBookDataSource._time" + ) + def test_listen_for_order_book_snapshots_log_exception(self, mock_time, mock_sleep): + self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = 1 + + mock_time.return_value = 1640780000 + + mock_input_queue = AsyncMock() + mock_output_queue = AsyncMock() + + mock_sleep.side_effect = lambda _: self._create_exception_and_unlock_test_with_event(asyncio.CancelledError()) + + incomplete_resp = { + "type": CONSTANTS.WS_TYPE_SUBSCRIBED, + "connection_id": "87b25218-0170-4111-bfbf-d9f0a506fcab", + "message_id": 1, + "channel": CONSTANTS.WS_CHANNEL_ORDERBOOK, + "id": self.trading_pair, + "contents": { + "bids": [ + { + "price": "1779", + }, + {"price": "1778.5", "size": "18"}, + ], + "asks": [{"price": "1782.8", "size": "10"}, {"price": "1784", "size": "2.81"}], + }, + } + + mock_input_queue.get.side_effect = [incomplete_resp] + self.data_source._message_queue[self.data_source._snapshot_messages_queue_key] = mock_input_queue + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_snapshots(ev_loop=self.ev_loop, output=mock_output_queue) + ) + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self._is_logged("ERROR", "Unexpected error when processing public order book snapshots from exchange") + ) + + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source" + ".DydxV4PerpetualAPIOrderBookDataSource._sleep" + ) + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source" + ".DydxV4PerpetualAPIOrderBookDataSource._time" + ) + def test_listen_for_order_book_snapshots_successful(self, mock_time, mock_sleep): + self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = 1 + + mock_time.return_value = 1640780000 + + mock_input_queue = AsyncMock() + output_queue = asyncio.Queue() + + mock_sleep.side_effect = lambda _: self._create_exception_and_unlock_test_with_event(asyncio.CancelledError()) + + resp = { + "type": CONSTANTS.WS_TYPE_SUBSCRIBED, + "connection_id": "87b25218-0170-4111-bfbf-d9f0a506fcab", + "message_id": 1, + "channel": CONSTANTS.WS_CHANNEL_ORDERBOOK, + "id": self.trading_pair, + "contents": { + "bids": [{"price": "1779", "size": "1"}, {"price": "1778.5", "size": "18"}], + "asks": [{"price": "1782.8", "size": "10"}, {"price": "1784", "size": "2.81"}], + }, + } + + mock_input_queue.get.side_effect = [resp, Exception] + self.data_source._message_queue[self.data_source._snapshot_messages_queue_key] = mock_input_queue + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_snapshots(ev_loop=self.ev_loop, output=output_queue) + ) + self.async_run_with_timeout(self.resume_test_event.wait()) + + msg: OrderBookMessage = self.async_run_with_timeout(output_queue.get()) + + self.assertEqual(OrderBookMessageType.SNAPSHOT, msg.type) + self.assertEqual(-1, msg.trade_id) + self.assertEqual(1640780000, msg.timestamp) + # Decreased by 1 because previous nonce is already taked by the execution + expected_update_id = self.data_source._nonce_provider.get_tracking_nonce(timestamp=1640780000) - 1 + self.assertEqual(expected_update_id, msg.update_id) + + bids = msg.bids + asks = msg.asks + self.assertEqual(2, len(bids)) + self.assertEqual(1779, bids[0].price) + self.assertEqual(1, bids[0].amount) + self.assertEqual(expected_update_id, bids[0].update_id) + self.assertEqual(2, len(asks)) + self.assertEqual(1782.8, asks[0].price) + self.assertEqual(10, asks[0].amount) + self.assertEqual(expected_update_id, asks[0].update_id) diff --git a/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_derivative.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_derivative.py new file mode 100644 index 0000000000..dfa00d161d --- /dev/null +++ b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_derivative.py @@ -0,0 +1,1626 @@ +import asyncio +import json +import re +from decimal import Decimal +from functools import partial +from test.hummingbot.connector.derivative.dydx_v4_perpetual.programmable_v4_client import ProgrammableV4Client +from typing import Any, Callable, Dict, List, Optional, Tuple +from unittest.mock import AsyncMock, patch + +from aioresponses import aioresponses +from aioresponses.core import RequestCall + +import hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_constants as CONSTANTS +import hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_web_utils as web_utils +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_derivative import DydxV4PerpetualDerivative +from hummingbot.connector.test_support.perpetual_derivative_test import AbstractPerpetualDerivativeTests +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState +from hummingbot.core.data_type.order_book import OrderBook +from hummingbot.core.data_type.order_book_row import OrderBookRow +from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase + + +class DydxV4PerpetualDerivativeTests(AbstractPerpetualDerivativeTests.PerpetualDerivativeTests): + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.dydx_v4_perpetual_secret_phrase = "mirror actor skill push coach wait confirm orchard " \ + "lunch mobile athlete gossip awake miracle matter " \ + "bus reopen team ladder lazy list timber render wait" + cls.dydx_v4_perpetual_chain_address = "dydx14zzueazeh0hj67cghhf9jypslcf9sh2n5k6art" + cls.subaccount_id = 0 + cls.base_asset = "TRX" + cls.quote_asset = "USD" # linear + cls.trading_pair = combine_to_hb_trading_pair(cls.base_asset, cls.quote_asset) + + @property + def all_symbols_url(self): + url = web_utils.private_rest_url(CONSTANTS.PATH_MARKETS) + return url + + @property + def latest_prices_url(self): + url = web_utils.private_rest_url(CONSTANTS.PATH_MARKETS + r"\?.*") + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + return regex_url + + @property + def network_status_url(self): + url = web_utils.public_rest_url(CONSTANTS.PATH_TIME) + return url + + @property + def trading_rules_url(self): + url = web_utils.private_rest_url(CONSTANTS.PATH_MARKETS) + return url + + @property + def order_creation_url(self): + url = web_utils.private_rest_url(CONSTANTS.PATH_ORDERS) + return url + + @property + def balance_url(self): + path = f"{CONSTANTS.PATH_SUBACCOUNT}/{self.dydx_v4_perpetual_chain_address}/subaccountNumber/{self.subaccount_id}" + url = web_utils.private_rest_url(path) + return url + + @property + def expected_supported_position_modes(self) -> List[PositionMode]: + return [PositionMode.ONEWAY] + + @property + def order_creation_request_erroneous_mock_response(self): + return {"txhash": "017C130E3602A48E5C9D661CAC657BF1B79262D4B71D5C25B1DA62DE2338DA0E", # noqa: mock + "raw_log": "ERROR"} # noqa: mock + + @property + def order_creation_request_successful_mock_response(self): + return {"txhash": "017C130E3602A48E5C9D661CAC657BF1B79262D4B71D5C25B1DA62DE2338DA0E", # noqa: mock + "raw_log": "[]"} # noqa: mock + + def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", # noqa: mock + "raw_log": "[]"} # noqa: mock + + def _order_cancelation_request_erroneous_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", # noqa: mock + "raw_log": "Error"} # noqa: mock + + @property + def all_symbols_request_mock_response(self): + mock_response = { + "markets": { + self.trading_pair: { + 'clobPairId': '0', 'ticker': self.trading_pair, 'status': 'ACTIVE', 'oraclePrice': '62730.24877', + 'priceChange24H': '-2721.74538', 'volume24H': '547242504.5571', 'trades24H': 115614, + 'nextFundingRate': '0.00000888425925925926', 'initialMarginFraction': '0.05', + 'maintenanceMarginFraction': '0.03', 'openInterest': '594.8603', 'atomicResolution': -10, + 'quantumConversionExponent': -9, 'tickSize': '1', 'stepSize': '0.0001', + 'stepBaseQuantums': 1000000, 'subticksPerTick': 100000 + } + } + } + return mock_response + + @property + def latest_prices_request_mock_response(self): + mock_response = { + "markets": { + self.trading_pair: { + 'clobPairId': '0', 'ticker': self.trading_pair, 'status': 'ACTIVE', 'oraclePrice': '62730.24877', + 'priceChange24H': '-2721.74538', 'volume24H': '547242504.5571', 'trades24H': 115614, + 'nextFundingRate': '0.00000888425925925926', 'initialMarginFraction': '0.05', + 'maintenanceMarginFraction': '0.03', 'openInterest': '594.8603', 'atomicResolution': -10, + 'quantumConversionExponent': -9, 'tickSize': '1', 'stepSize': '0.0001', + 'stepBaseQuantums': 1000000, 'subticksPerTick': 100000 + } + } + } + return mock_response + + @property + def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: + mock_response = { + "markets": { + self.trading_pair: { + 'clobPairId': '0', 'ticker': self.trading_pair, 'status': 'ACTIVE', 'oraclePrice': '62730.24877', + 'priceChange24H': '-2721.74538', 'volume24H': '547242504.5571', 'trades24H': 115614, + 'nextFundingRate': '0.00000888425925925926', 'initialMarginFraction': '0.05', + 'maintenanceMarginFraction': '0.03', 'openInterest': '594.8603', 'atomicResolution': -10, + 'quantumConversionExponent': -9, 'tickSize': '1', 'stepSize': '0.0001', + 'stepBaseQuantums': 1000000, 'subticksPerTick': 100000 + }, + "INVALID-PAIR": { + 'clobPairId': '0', 'ticker': "INVALID-PAIR", 'status': 'INVALID', 'oraclePrice': '62730.24877', + 'priceChange24H': '-2721.74538', 'volume24H': '547242504.5571', 'trades24H': 115614, + 'nextFundingRate': '0.00000888425925925926', 'initialMarginFraction': '0.05', + 'maintenanceMarginFraction': '0.03', 'openInterest': '594.8603', 'atomicResolution': -10, + 'quantumConversionExponent': -9, 'tickSize': '1', 'stepSize': '0.0001', + 'stepBaseQuantums': 1000000, 'subticksPerTick': 100000 + }, + } + } + return "INVALID-PAIR", mock_response + + @property + def network_status_request_successful_mock_response(self): + mock_response = { + "iso": "2021-02-02T18:35:45Z", + "epoch": "1611965998.515", + } + return mock_response + + @property + def trading_rules_request_mock_response(self): + mock_response = { + "markets": { + self.trading_pair: { + 'clobPairId': '0', 'ticker': self.trading_pair, 'status': 'ACTIVE', 'oraclePrice': '62730.24877', + 'priceChange24H': '-2721.74538', 'volume24H': '547242504.5571', 'trades24H': 115614, + 'nextFundingRate': '0.00000888425925925926', 'initialMarginFraction': '0.05', + 'maintenanceMarginFraction': '0.03', 'openInterest': '594.8603', 'atomicResolution': -10, + 'quantumConversionExponent': -9, 'tickSize': '1', 'stepSize': '0.0001', + 'stepBaseQuantums': 1000000, 'subticksPerTick': 100000 + } + } + } + return mock_response + + @property + def trading_rules_request_erroneous_mock_response(self): + mock_response = { + "markets": { + self.trading_pair: { + "ticker": self.trading_pair, + "status": "ACTIVE", + } + } + } + return mock_response + + @property + def balance_request_mock_response_for_base_and_quote(self): + return {} + + @property + def balance_request_mock_response_only_base(self): + return {} + + @property + def balance_request_mock_response_only_quote(self): + mock_response = { + 'subaccount': + { + 'address': 'dydx1nwtryq2dxy3a3wr5zyyvdsl5t40xx8qgvk6cm3', # noqa: mock + 'subaccountNumber': 0, + 'equity': '10000', + 'freeCollateral': '10000', + 'openPerpetualPositions': { + self.trading_pair: { + 'market': self.trading_pair, 'status': 'OPEN', 'side': 'SHORT', 'size': '-100', + 'maxSize': '-100', + 'entryPrice': '0.11123', 'exitPrice': None, 'realizedPnl': '-0.000011', + 'unrealizedPnl': '-0.14263', + 'createdAt': '2024-04-22T13:47:37.066Z', 'createdAtHeight': '13859546', + 'closedAt': None, + 'sumOpen': '100', 'sumClose': '0', 'netFunding': '-0.000011' + } + }, + 'assetPositions': { + 'USDC': {'size': '92.486499', 'symbol': 'USDC', 'side': 'LONG', 'assetId': '0'} + }, + 'marginEnabled': True + } + } + return mock_response + + @property + def balance_event_websocket_update(self): + mock_response = { + 'type': 'subscribed', 'connection_id': '53f4a7b1-410d-4687-9447-d6a367e30c8a', 'message_id': 1, + 'channel': 'v4_subaccounts', + 'id': 'dydx1nwtryq2dxy3a3wr5zyyvdsl5t40xx8qgvk6cm3/0', 'contents': { # noqa: mock + 'subaccount': { + 'address': 'dydx1nwtryq2dxy3a3wr5zyyvdsl5t40xx8qgvk6cm3', 'subaccountNumber': 0, # noqa: mock + 'equity': '0', 'freeCollateral': '700', 'openPerpetualPositions': { + 'TRX-USD': {'market': 'TRX-USD', 'status': 'OPEN', 'side': 'SHORT', 'size': '-100', + 'maxSize': '-100', + 'entryPrice': '0.11123', 'exitPrice': None, 'realizedPnl': '0.001147', + 'unrealizedPnl': '-0.185044469', 'createdAt': '2024-04-22T13:47:37.066Z', + 'createdAtHeight': '13859546', 'closedAt': None, 'sumOpen': '100', 'sumClose': '0', + 'netFunding': '0.001147'}}, + 'assetPositions': { + 'USDC': {'size': '92.487657', 'symbol': 'USDC', 'side': 'LONG', 'assetId': '0'}}, + 'marginEnabled': True + }, 'orders': []} + } + return mock_response + + @property + def expected_latest_price(self): + return 62730.24877 + + @property + def target_funding_info_index_price(self): + return 2 + + @property + def expected_supported_order_types(self): + return [OrderType.LIMIT, OrderType.LIMIT_MAKER, OrderType.MARKET] + + @property + def expected_trading_rule(self): + trading_rules_resp = self.trading_rules_request_mock_response["markets"][self.trading_pair] + return TradingRule( + trading_pair=self.trading_pair, + min_price_increment=Decimal(trading_rules_resp["tickSize"]), + min_base_amount_increment=Decimal(trading_rules_resp["stepSize"]), + supports_limit_orders=True, + supports_market_orders=True, + buy_order_collateral_token=self.quote_asset, + sell_order_collateral_token=self.quote_asset, + ) + + @property + def expected_logged_error_for_erroneous_trading_rule(self): + return "Error updating trading rules" + + @property + def expected_exchange_order_id(self): + return self.exchange_order_id_prefix + "1" + + @property + def is_order_fill_http_update_included_in_status_update(self) -> bool: + return False + + @property + def is_order_fill_http_update_executed_during_websocket_order_event_processing(self) -> bool: + return False + + @property + def expected_partial_fill_price(self) -> Decimal: + return Decimal("100") + + @property + def expected_partial_fill_amount(self) -> Decimal: + return Decimal("10") + + @property + def expected_partial_fill_fee(self) -> TradeFeeBase: + return AddedToCostTradeFee( + percent_token=self.quote_asset, + flat_fees=[TokenAmount(token=self.quote_asset, amount=Decimal("0.1"))], + ) + + @property + def expected_fill_fee(self) -> TradeFeeBase: + return AddedToCostTradeFee( + percent_token=self.quote_asset, + flat_fees=[TokenAmount(token=self.quote_asset, amount=Decimal("10"))], + ) + + @property + def expected_fill_trade_id(self) -> str: + return "someFillId" + + def exchange_symbol_for_tokens(self, base_token: str, quote_token: str) -> str: + return f"{base_token}-{quote_token}" + + @staticmethod + def _callback_wrapper_with_response(callback: Callable, response: Any, *args, **kwargs): + callback(args, kwargs) + if isinstance(response, Exception): + raise response + else: + return response + + def create_exchange_instance(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + exchange = DydxV4PerpetualDerivative( + client_config_map, + self.dydx_v4_perpetual_secret_phrase, + self.dydx_v4_perpetual_chain_address, + trading_pairs=[self.trading_pair], + ) + exchange._tx_client = ProgrammableV4Client() + + exchange._margin_fractions[self.trading_pair] = { + "initial": Decimal(0.1), + "maintenance": Decimal(0.05), + "clob_pair_id": "15", + "atomicResolution": -4, + "stepBaseQuantums": 1000000, + "quantumConversionExponent": -9, + "subticksPerTick": 1000000, + } + return exchange + + def validate_order_creation_request(self, order: InFlightOrder, request_call: RequestCall): + raise NotImplementedError + + def validate_order_cancelation_request(self, order: InFlightOrder, request_call: RequestCall): + raise NotImplementedError + + def validate_order_status_request(self, order: InFlightOrder, request_call: RequestCall): + request_params = request_call.kwargs["params"] + if request_params is not None: + self.assertEqual(self.dydx_v4_perpetual_chain_address, request_params["address"]) + self.assertEqual(CONSTANTS.LAST_FILLS_MAX, request_params["limit"]) + + def validate_trades_request(self, order: InFlightOrder, request_call: RequestCall): + request_params = request_call.kwargs["params"] + if request_params is not None: + self.assertEqual(self.dydx_v4_perpetual_chain_address, request_params["address"]) + self.assertEqual(CONSTANTS.LAST_FILLS_MAX, request_params["limit"]) + + def configure_all_symbols_response( + self, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> List[str]: + + url = self.all_symbols_url + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + response = self.all_symbols_request_mock_response + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return [url] + + def configure_successful_creation_order_status_response( + self, callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + creation_response = self.order_creation_request_successful_mock_response + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, callback=callback, response=creation_response + ) + self.exchange._tx_client._place_order_responses = mock_queue + return "" + + def configure_erroneous_creation_order_status_response( + self, callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + creation_response = self.order_creation_request_erroneous_mock_response + + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, callback=callback, response=creation_response + ) + self.exchange._tx_client._place_order_responses = mock_queue + return "" + + def configure_successful_cancelation_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + response = self._order_cancelation_request_successful_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._tx_client._cancel_order_responses = mock_queue + return "" + + def configure_erroneous_cancelation_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + response = self._order_cancelation_request_erroneous_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._tx_client._cancel_order_responses = mock_queue + return "" + + def configure_one_successful_one_erroneous_cancel_all_response( + self, successful_order: InFlightOrder, erroneous_order: InFlightOrder, mock_api: aioresponses + ) -> List[str]: + response = self._order_cancelation_request_successful_mock_response(order=successful_order) + err_response = self._order_cancelation_request_erroneous_mock_response(order=erroneous_order) + + self.exchange._tx_client._cancel_order_responses.put_nowait(response) + self.exchange._tx_client._cancel_order_responses.put_nowait(err_response) + return [] + + def configure_order_not_found_error_cancelation_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + # Implement the expected not found response when enabling test_cancel_order_not_found_in_the_exchange + raise NotImplementedError + + def configure_order_not_found_error_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + # Implement the expected not found response when enabling + # test_lost_order_removed_if_not_found_during_order_status_update + raise NotImplementedError + + def configure_completely_filled_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + """ + :return: the URL configured + """ + url_order_status = web_utils.private_rest_url(CONSTANTS.PATH_ORDERS) + regex_url = re.compile(f"^{url_order_status}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response_order_status = self._order_status_request_completely_filled_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response_order_status), callback=callback) + + return [url_order_status] + + def configure_canceled_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + """ + :return: the URL configured + """ + url_fills = web_utils.private_rest_url(CONSTANTS.PATH_FILLS) + regex_url = re.compile(f"^{url_fills}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response_fills = self._order_fills_request_canceled_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response_fills), callback=callback) + + url_order_status = web_utils.private_rest_url(CONSTANTS.PATH_ORDERS) + regex_url = re.compile(f"^{url_order_status}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response_order_status = self._order_status_request_canceled_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response_order_status), callback=callback) + + return [url_fills, url_order_status] + + def configure_open_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + """ + :return: the URL configured + """ + url = web_utils.private_rest_url(CONSTANTS.PATH_ORDERS) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + response = self._order_status_request_open_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return [url] + + def configure_http_error_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + """ + :return: the URL configured + """ + url = web_utils.private_rest_url(CONSTANTS.PATH_ORDERS) + + regex_url = re.compile(url + r"\?.*") + mock_api.get(regex_url, status=404, callback=callback) + return url + + def configure_partially_filled_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + # Dydx has no partial fill status + raise NotImplementedError + + def configure_partial_fill_trade_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + # Dydx has no partial fill status + raise NotImplementedError + + def configure_erroneous_http_fill_trade_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + """ + :return: the URL configured + """ + url = web_utils.private_rest_url(CONSTANTS.PATH_ORDERS) + regex_url = re.compile(url + r"\?.*") + mock_api.get(regex_url, status=400, callback=callback) + return url + + def configure_full_fill_trade_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + """ + :return: the URL configured + """ + url = web_utils.private_rest_url(CONSTANTS.PATH_FILLS) + + regex_url = re.compile(url + r"\?.*") + response = self._order_fills_request_full_fill_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return url + + def _order_fills_request_completely_filled_mock_response(self, order: InFlightOrder) -> Any: + return { + "fills": [ + { + "id": self.expected_fill_trade_id, + "side": order.trade_type.name, + "liquidity": "MAKER" if order.order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] else "TAKER", + "market": self.trading_pair, + "orderId": self.exchange_order_id_prefix + "1", + "size": str(order.amount), + "price": str(order.price), + "fee": str(self.expected_fill_fee.flat_fees[0].amount), + "transactionId": "1", + "orderClientId": order.client_order_id, + "createdAt": "2020-09-22T20:25:26.399Z", + } + ] + } + + def _order_fills_request_canceled_mock_response(self, order: InFlightOrder) -> Any: + return {"fills": []} + + def _order_status_request_completely_filled_mock_response(self, order: InFlightOrder) -> Any: + mock_response = [ + { + "id": self.exchange_order_id_prefix + "1", + "clientId": order.client_order_id, + "accountId": "someAccountId", + "market": self.trading_pair, + "side": order.trade_type.name, + "price": str(order.price), + "triggerPrice": None, + "trailingPercent": None, + "size": str(order.amount), + "remainingSize": "0", + "type": "LIMIT", + "createdAt": "2021-01-04T23:44:59.690Z", + "unfillableAt": None, + "expiresAt": "2022-12-21T21:30:20.200Z", + "status": "FILLED", + "timeInForce": "GTT", + "postOnly": False, + "reduceOnly": False, + "cancelReason": None, + } + ] + return mock_response + + def _order_status_request_canceled_mock_response(self, order: InFlightOrder) -> Any: + resp = [ + { + "id": self.exchange_order_id_prefix + "1", + "clientId": order.client_order_id, + "accountId": "someAccountId", + "market": self.trading_pair, + "side": order.trade_type.name, + "price": str(order.price), + "triggerPrice": None, + "trailingPercent": None, + "size": 0, + "remainingSize": "0", + "type": "LIMIT", + "createdAt": "2021-01-04T23:44:59.690Z", + "unfillableAt": None, + "expiresAt": "2022-12-21T21:30:20.200Z", + "status": "CANCELED", + "timeInForce": "GTT", + "postOnly": False, + "reduceOnly": False, + "cancelReason": None, + } + ] + return resp + + def _order_status_request_open_mock_response(self, order: InFlightOrder) -> Any: + resp = self._order_status_request_completely_filled_mock_response(order) + resp[0]["status"] = "OPEN" + resp[0]["remainingSize"] = resp[0]["size"] + return resp + + def _order_fills_request_partial_fill_mock_response(self, order: InFlightOrder): + return { + "fills": [ + { + "id": self.expected_fill_trade_id, + "side": order.trade_type.name, + "liquidity": "MAKER" if order.order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] else "TAKER", + "market": self.trading_pair, + "orderId": order.exchange_order_id, + "size": str(order.amount), + "price": str(order.price), + "fee": str(self.expected_fill_fee.flat_fees[0].amount), + "transactionId": "1", + "orderClientId": order.client_order_id, + "createdAt": "2020-09-22T20:25:26.399Z", + } + ] + } + + def _order_fills_request_full_fill_mock_response(self, order: InFlightOrder): + return { + "fills": [ + { + "id": self.expected_fill_trade_id, + "side": order.trade_type.name, + "liquidity": "MAKER" if order.order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] else "TAKER", + "market": self.trading_pair, + "orderId": order.exchange_order_id, + "size": str(order.amount), + "price": str(order.price), + "fee": str(self.expected_fill_fee.flat_fees[0].amount), + "transactionId": "1", + "orderClientId": order.client_order_id, + "createdAt": "2020-09-22T20:25:26.399Z", + } + ] + } + + def _simulate_trading_rules_initialized(self): + self.exchange._trading_rules = { + self.trading_pair: TradingRule( + trading_pair=self.trading_pair, + min_order_size=Decimal(str(0.01)), + min_price_increment=Decimal(str(0.0001)), + min_base_amount_increment=Decimal(str(0.000001)), + ) + } + self.exchange._margin_fractions[self.trading_pair] = { + "initial": Decimal(0.1), + "maintenance": Decimal(0.05), + "clob_pair_id": "15", + "atomicResolution": -4, + "stepBaseQuantums": 1000000, + "quantumConversionExponent": -9, + "subticksPerTick": 1000000, + } + + def order_event_for_new_order_websocket_update(self, order: InFlightOrder): + return { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "channel": CONSTANTS.WS_CHANNEL_ACCOUNTS, + "connection_id": "someConnectionId", + "message_id": 2, + "contents": { + "orders": [ + { + "id": order.exchange_order_id, + "clientId": self.client_order_id_prefix + "1", + "ticker": self.trading_pair, + "side": order.trade_type.name, + "size": str(order.amount), + "remainingSize": "0", + "price": str(order.price), + "limitFee": str(self.expected_fill_fee.flat_fees[0].amount), + "type": "LIMIT", + "status": "OPEN", + "signature": "0x456...", + "timeInForce": "FOK", + "postOnly": "False", + "expiresAt": "2021-09-22T20:22:26.399Z", + "createdAt": "2020-09-22T20:22:26.399Z", + } + ] + } + } + + def order_event_for_canceled_order_websocket_update(self, order: InFlightOrder): + return { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "channel": CONSTANTS.WS_CHANNEL_ACCOUNTS, + "connection_id": "someConnectionId", + "message_id": 2, + "contents": { + "orders": [ + { + "id": order.exchange_order_id, + "clientId": order.client_order_id, + "ticker": self.trading_pair, + "side": order.trade_type.name, + "size": str(order.amount), + "remainingSize": "0", + "price": str(order.price), + "limitFee": str(self.expected_fill_fee.flat_fees[0].amount), + "type": "LIMIT", + "status": "CANCELED", + "signature": "0x456...", + "timeInForce": "FOK", + "postOnly": "False", + "expiresAt": "2021-09-22T20:22:26.399Z", + "createdAt": "2020-09-22T20:22:26.398Z", + } + ] + } + } + + def order_event_for_full_fill_websocket_update(self, order: InFlightOrder): + return { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "channel": CONSTANTS.WS_CHANNEL_ACCOUNTS, + "connection_id": "someConnectionId", + "message_id": 2, + "contents": { + "orders": [ + { + "id": order.exchange_order_id, + "clientId": order.client_order_id, + "ticker": self.trading_pair, + "side": order.trade_type.name, + "size": str(order.amount), + "remainingSize": "0", + "price": str(order.price), + "limitFee": str(self.expected_fill_fee.flat_fees[0].amount), + "type": "LIMIT", + "status": "FILLED", + "signature": "0x456...", + "timeInForce": "FOK", + "postOnly": "False", + "expiresAt": "2021-09-22T20:22:26.399Z", + "createdAt": "2020-09-22T20:22:26.399Z", + } + ] + } + } + + def trade_event_for_full_fill_websocket_update(self, order: InFlightOrder): + return { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "channel": CONSTANTS.WS_CHANNEL_ACCOUNTS, + "connection_id": "someConnectionId", + "message_id": 2, + "contents": { + "fills": [ + { + "id": self.expected_fill_trade_id, + "side": order.trade_type.name, + "liquidity": "MAKER" + if order.order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] + else "TAKER", + "ticker": self.trading_pair, + "orderId": order.exchange_order_id, + "size": str(order.amount), + "price": str(order.price), + "fee": str(self.expected_fill_fee.flat_fees[0].amount), + "transactionId": "1", + "orderClientId": order.client_order_id, + "createdAt": "2020-09-22T20:25:26.399Z", + } + ] + } + } + + @property + def funding_info_url(self): + url = web_utils.public_rest_url(CONSTANTS.PATH_MARKETS) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def funding_payment_url(self): + pass + + @property + def funding_info_mock_response(self): + mock_response = { + "markets": { + self.trading_pair: { + "market": self.trading_pair, + "status": "ONLINE", + "baseAsset": self.base_asset, + "quoteAsset": self.quote_asset, + "stepSize": "0.1", + "tickSize": "0.01", + "oraclePrice": "2", + "priceChange24H": "0", + "nextFundingRate": "3", + "nextFundingAt": "2022-07-06T09:17:33.000Z", + "minOrderSize": "1", + "type": "PERPETUAL", + "initialMarginFraction": "0.10", + "maintenanceMarginFraction": "0.05", + "baselinePositionSize": "1000", + "incrementalPositionSize": "1000", + "incrementalInitialMarginFraction": "0.2", + "volume24H": "0", + "trades24H": "0", + "openInterest": "0", + "maxPositionSize": "10000", + "assetResolution": "10000000", + "syntheticAssetId": "0x4c494e4b2d37000000000000000000", + } + } + } + return mock_response + + def validate_auth_credentials_present(self, request_call: RequestCall): + request_headers = request_call.kwargs["headers"] + self.assertEqual("application/json", request_headers["Accept"]) + + @property + def funding_payment_mock_response(self): + raise NotImplementedError + + def empty_funding_payment_mock_response(self): + pass + + @aioresponses() + def test_funding_payment_polling_loop_sends_update_event(self, *args, **kwargs): + pass + + def position_event_for_full_fill_websocket_update(self, order: InFlightOrder, unrealized_pnl: float): + return { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "channel": CONSTANTS.WS_CHANNEL_ACCOUNTS, + "connection_id": "someConnectionId", + "message_id": 2, + "contents": { + 'perpetualPositions': [{ + 'address': 'dydx1nwtryq2dxy3a3wr5zyyvdsl5t40xx8qgvk6cm3', 'subaccountNumber': 0, # noqa: mock + 'positionId': '5388e4bc-0e4c-5794-8dec-da4ace4b6189', + 'market': self.trading_pair, + 'side': "LONG" if order.trade_type == TradeType.BUY else "SHORT", + 'status': 'CLOSED', + 'size': str(order.amount) if order.order_type == TradeType.BUY else str( + -order.amount), 'maxSize': '-100', 'netFunding': '0.001147', + 'entryPrice': '10000', 'exitPrice': None, 'sumOpen': '100', 'sumClose': '0', + 'realizedPnl': '0.001147', 'unrealizedPnl': str(unrealized_pnl) + }], + 'assetPositions': [ + {'address': 'dydx1nwtryq2dxy3a3wr5zyyvdsl5t40xx8qgvk6cm3', 'subaccountNumber': 0, # noqa: mock + 'positionId': 'fb5b6131-2871-54c1-86a2-5be9147fe4bc', 'assetId': '0', 'symbol': 'USDC', + 'side': 'LONG', + 'size': '103.802996'}]} + } + + def configure_successful_set_position_mode( + self, + position_mode: PositionMode, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ): + # There's only one way position mode + pass + + def configure_failed_set_position_mode( + self, + position_mode: PositionMode, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> Tuple[str, str]: + # There's only one way position mode, this should never be called + pass + + def configure_failed_set_leverage( + self, + leverage: int, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> Tuple[str, str]: + url = web_utils.public_rest_url(CONSTANTS.PATH_MARKETS) + regex_url = re.compile(f"^{url}") + + # No "markets" in response + mock_response = {} + mock_api.get(regex_url, body=json.dumps(mock_response), callback=callback) + + return url, "Failed to obtain markets information." + + def configure_successful_set_leverage( + self, + leverage: int, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ): + url = web_utils.public_rest_url(CONSTANTS.PATH_MARKETS) + regex_url = re.compile(f"^{url}") + + # No "markets" in response + mock_response = { + "markets": { + self.trading_pair: { + "initialMarginFraction": "0.10", + "maintenanceMarginFraction": "0.05", + "clobPairId": "15", + "atomicResolution": -4, + "stepBaseQuantums": 1000000, + "quantumConversionExponent": -9, + "subticksPerTick": 1000000, + } + } + } + mock_api.get(regex_url, body=json.dumps(mock_response), callback=callback) + + return url + + def funding_info_event_for_websocket_update(self): + return { + "type": CONSTANTS.WS_TYPE_CHANNEL_DATA, + "connection_id": "someConnectionId", + "channel": CONSTANTS.WS_CHANNEL_MARKETS, + "message_id": 2, + "contents": { + "markets": { + self.trading_pair: { + "oraclePrice": "100.23", + "priceChange24H": "0.12", + "initialMarginFraction": "1.23", + } + } + } + } + + def test_get_buy_and_sell_collateral_tokens(self): + self._simulate_trading_rules_initialized() + + linear_buy_collateral_token = self.exchange.get_buy_collateral_token(self.trading_pair) + linear_sell_collateral_token = self.exchange.get_sell_collateral_token(self.trading_pair) + + self.assertEqual(self.quote_asset, linear_buy_collateral_token) + self.assertEqual(self.quote_asset, linear_sell_collateral_token) + + @aioresponses() + def test_update_balances(self, mock_api): + response = self.balance_request_mock_response_only_quote + + self._configure_balance_response(response=response, mock_api=mock_api) + self.async_run_with_timeout(self.exchange._update_balances()) + + available_balances = self.exchange.available_balances + total_balances = self.exchange.get_all_balances() + + self.assertNotIn(self.base_asset, available_balances) + self.assertNotIn(self.base_asset, total_balances) + self.assertEqual(Decimal("10000"), available_balances["USD"]) + self.assertEqual(Decimal("10000"), total_balances["USD"]) + + def test_user_stream_balance_update(self): + if self.exchange.real_time_balance_update: + self.exchange._set_current_timestamp(1640780000) + + balance_event = self.balance_event_websocket_update + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [balance_event, asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertEqual(Decimal("700"), self.exchange.available_balances["USD"]) + self.assertEqual(Decimal("0"), self.exchange.get_balance("USD")) + + @aioresponses() + def test_update_order_status_when_order_has_not_changed_and_one_partial_fill(self, mock_api): + # Dydx has no partial fill status + pass + + @aioresponses() + def test_update_order_status_when_order_partially_filled_and_cancelled(self, mock_api): + # Dydx has no partial fill status + pass + + @aioresponses() + def test_user_stream_update_for_partially_cancelled_order(self, mock_api): + # Dydx has no partial fill status + pass + + @aioresponses() + def test_set_position_mode_success(self, mock_api): + # There's only ONEWAY position mode + pass + + @aioresponses() + def test_set_position_mode_failure(self, mock_api): + # There's only ONEWAY position mode + pass + + @aioresponses() + def test_cancel_order_not_found_in_the_exchange(self, mock_api): + # Disabling this test because the connector has not been updated yet to validate + # order not found during cancellation (check _is_order_not_found_during_cancelation_error) + pass + + @aioresponses() + def test_lost_order_removed_if_not_found_during_order_status_update(self, mock_api): + # Disabling this test because the connector has not been updated yet to validate + # order not found during status update (check _is_order_not_found_during_status_update_error) + pass + + @aioresponses() + def test_update_order_status_when_canceled(self, mock_api): + self._simulate_trading_rules_initialized() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=self.expected_exchange_order_id, + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + self.configure_canceled_order_status_response( + order=order, + mock_api=mock_api) + + self.async_run_with_timeout(self.exchange._update_order_status()) + cancel_event = self.order_cancelled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) + self.assertEqual(order.client_order_id, cancel_event.order_id) + self.assertEqual(order.exchange_order_id, cancel_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue( + self.is_logged("INFO", f"Successfully canceled order {order.client_order_id}.") + ) + + @aioresponses() + def test_update_order_status_when_filled(self, mock_api): + self._simulate_trading_rules_initialized() + self.exchange._set_current_timestamp(1640780000) + request_sent_event = asyncio.Event() + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=self.exchange_order_id_prefix + "1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + position_action=PositionAction.OPEN, + ) + order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + self.configure_completely_filled_order_status_response( + order=order, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) + + if self.is_order_fill_http_update_included_in_status_update: + trade_url = self.configure_full_fill_trade_response( + order=order, + mock_api=mock_api) + else: + # If the fill events will not be requested with the order status, we need to manually set the event + # to allow the ClientOrderTracker to process the last status update + order.completely_filled_event.set() + self.async_run_with_timeout(self.exchange._update_order_status()) + # Execute one more synchronization to ensure the async task that processes the update is finished + self.async_run_with_timeout(request_sent_event.wait()) + + self.async_run_with_timeout(order.wait_until_completely_filled()) + self.assertTrue(order.is_done) + + if self.is_order_fill_http_update_included_in_status_update: + self.assertTrue(order.is_filled) + + if trade_url: + trades_request = self._all_executed_requests(mock_api, trade_url)[0] + self.validate_auth_credentials_present(trades_request) + self.validate_trades_request( + order=order, + request_call=trades_request) + + fill_event = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(order.client_order_id, fill_event.order_id) + self.assertEqual(order.trading_pair, fill_event.trading_pair) + self.assertEqual(order.trade_type, fill_event.trade_type) + self.assertEqual(order.order_type, fill_event.order_type) + self.assertEqual(order.price, fill_event.price) + self.assertEqual(order.amount, fill_event.amount) + self.assertEqual(self.expected_fill_fee, fill_event.trade_fee) + self.assertEqual(PositionAction.OPEN.value, fill_event.position) + + buy_event = self.buy_order_completed_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, buy_event.timestamp) + self.assertEqual(order.client_order_id, buy_event.order_id) + self.assertEqual(order.base_asset, buy_event.base_asset) + self.assertEqual(order.quote_asset, buy_event.quote_asset) + self.assertEqual( + order.amount if self.is_order_fill_http_update_included_in_status_update else Decimal(0), + buy_event.base_asset_amount) + self.assertEqual( + order.amount * order.price + if self.is_order_fill_http_update_included_in_status_update + else Decimal(0), + buy_event.quote_asset_amount) + self.assertEqual(order.order_type, buy_event.order_type) + self.assertEqual(order.exchange_order_id, buy_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue( + self.is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) + ) + + @aioresponses() + def test_lost_order_included_in_order_fills_update_and_not_in_order_status_update(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + request_sent_event = asyncio.Event() + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + for _ in range(self.exchange._order_tracker._lost_order_count_limit + 1): + self.async_run_with_timeout( + self.exchange._order_tracker.process_order_not_found(client_order_id=order.client_order_id)) + + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + + self.configure_completely_filled_order_status_response( + order=order, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) + + if self.is_order_fill_http_update_included_in_status_update: + trade_url = self.configure_full_fill_trade_response( + order=order, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) + else: + # If the fill events will not be requested with the order status, we need to manually set the event + # to allow the ClientOrderTracker to process the last status update + order.completely_filled_event.set() + request_sent_event.set() + + self.async_run_with_timeout(self.exchange._update_order_status()) + # Execute one more synchronization to ensure the async task that processes the update is finished + self.async_run_with_timeout(request_sent_event.wait()) + + self.async_run_with_timeout(order.wait_until_completely_filled()) + self.assertTrue(order.is_done) + self.assertTrue(order.is_failure) + + if self.is_order_fill_http_update_included_in_status_update: + if trade_url: + trades_request = self._all_executed_requests(mock_api, trade_url)[0] + self.validate_auth_credentials_present(trades_request) + self.validate_trades_request( + order=order, + request_call=trades_request) + + fill_event = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(order.client_order_id, fill_event.order_id) + self.assertEqual(order.trading_pair, fill_event.trading_pair) + self.assertEqual(order.trade_type, fill_event.trade_type) + self.assertEqual(order.order_type, fill_event.order_type) + self.assertEqual(order.price, fill_event.price) + self.assertEqual(order.amount, fill_event.amount) + self.assertEqual(self.expected_fill_fee, fill_event.trade_fee) + + self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) + self.assertIn(order.client_order_id, self.exchange._order_tracker.all_fillable_orders) + self.assertFalse( + self.is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) + ) + + request_sent_event.clear() + + # Configure again the response to the order fills request since it is required by lost orders update logic + self.configure_full_fill_trade_response( + order=order, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.async_run_with_timeout(self.exchange._update_lost_orders_status()) + # Execute one more synchronization to ensure the async task that processes the update is finished + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertTrue(order.is_done) + self.assertTrue(order.is_failure) + + self.assertEqual(1, len(self.order_filled_logger.event_log)) + self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) + self.assertNotIn(order.client_order_id, self.exchange._order_tracker.all_fillable_orders) + self.assertFalse( + self.is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) + ) + + @aioresponses() + def test_create_buy_limit_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.configure_successful_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertIn(order_id, self.exchange.in_flight_orders) + + create_event = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position." + ) + ) + + @aioresponses() + def test_create_sell_limit_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.configure_successful_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + order_id = self.place_sell_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertIn(order_id, self.exchange.in_flight_orders) + + create_event = self.sell_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position." + ) + ) + + @aioresponses() + def test_create_buy_market_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + order_book = OrderBook() + self.exchange.order_book_tracker._order_books[self.trading_pair] = order_book + order_book.apply_snapshot( + bids=[], + asks=[OrderBookRow(price=5000, amount=20, update_id=1)], + update_id=1, + ) + + self.configure_successful_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + order_id = self.exchange.buy( + trading_pair=self.trading_pair, + amount=Decimal("10"), + order_type=OrderType.MARKET, + price=Decimal("50000"), + position_action=PositionAction.OPEN, + ) + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertEqual(1, len(self.exchange.in_flight_orders)) + self.assertIn(order_id, self.exchange.in_flight_orders) + + @aioresponses() + def test_create_sell_market_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + order_book = OrderBook() + self.exchange.order_book_tracker._order_books[self.trading_pair] = order_book + order_book.apply_snapshot( + bids=[OrderBookRow(price=5000, amount=20, update_id=1)], + asks=[], + update_id=1, + ) + + self.configure_successful_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + order_id = self.exchange.sell( + trading_pair=self.trading_pair, + amount=Decimal("10"), + order_type=OrderType.MARKET, + price=Decimal("10_000"), + position_action=PositionAction.OPEN, + ) + + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertEqual(1, len(self.exchange.in_flight_orders)) + self.assertIn(order_id, self.exchange.in_flight_orders) + + def test_create_order_fails_and_raises_failure_event(self): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.configure_erroneous_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertNotIn(order_id, self.exchange.in_flight_orders) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(order_id, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Order {order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{order_id}', exchange_order_id=None, misc_updates=None)", + ) + ) + + @aioresponses() + def test_create_order_to_close_long_position(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.configure_successful_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + leverage = 5 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + order_id = self.place_sell_order(position_action=PositionAction.CLOSE) + self.async_run_with_timeout(request_sent_event.wait()) + + create_event = self.sell_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(leverage, create_event.leverage) + self.assertEqual(PositionAction.CLOSE.value, create_event.position) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position." + ) + ) + + @aioresponses() + def test_create_order_to_close_short_position(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.configure_successful_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + leverage = 4 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + order_id = self.place_buy_order(position_action=PositionAction.CLOSE) + self.async_run_with_timeout(request_sent_event.wait()) + + create_event = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, + create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(leverage, create_event.leverage) + self.assertEqual(PositionAction.CLOSE.value, create_event.position) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position." + ) + ) + + @aioresponses() + def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.configure_erroneous_creation_order_status_response( + callback=lambda *args, **kwargs: request_sent_event.set() + ) + + order_id_for_invalid_order = self.place_buy_order( + amount=Decimal("0.0001"), price=Decimal("0.0001") + ) + # The second order is used only to have the event triggered and avoid using timeouts for tests + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertNotIn(order_id_for_invalid_order, self.exchange.in_flight_orders) + self.assertNotIn(order_id, self.exchange.in_flight_orders) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(order_id_for_invalid_order, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "WARNING", + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." + ) + ) + self.assertTrue( + self.is_logged( + "INFO", + f"Order {order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{order_id}', exchange_order_id=None, misc_updates=None)" + ) + ) + + @aioresponses() + def test_cancel_order_successfully(self, mock_api): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=self.exchange_order_id_prefix + "1", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.assertIn(self.client_order_id_prefix + "1", self.exchange.in_flight_orders) + order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + self.configure_successful_cancelation_response( + order=order, mock_api=mock_api, callback=lambda *args, **kwargs: request_sent_event.set() + ) + + self.exchange.cancel(trading_pair=order.trading_pair, client_order_id=order.client_order_id) + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue(order.is_cancelled) + cancel_event = self.order_cancelled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) + self.assertEqual(order.client_order_id, cancel_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Successfully canceled order {order.client_order_id}." + ) + ) + + @aioresponses() + def test_cancel_order_raises_failure_event_when_request_fails(self, mock_api): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=self.exchange_order_id_prefix + "1", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.assertIn(self.client_order_id_prefix + "1", self.exchange.in_flight_orders) + order = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + self.configure_erroneous_cancelation_response( + order=order, mock_api=mock_api, callback=lambda *args, **kwargs: request_sent_event.set() + ) + + self.exchange.cancel(trading_pair=self.trading_pair, client_order_id=self.client_order_id_prefix + "1") + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertEquals(0, len(self.order_cancelled_logger.event_log)) + self.assertTrue( + any( + log.msg.startswith(f"Failed to cancel order {order.client_order_id}") + for log in self.log_records + ) + ) + + @aioresponses() + def test_set_leverage_success(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + target_leverage = 2 + self.configure_successful_set_leverage( + leverage=target_leverage, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set(), + ) + self.exchange.set_leverage(trading_pair=self.trading_pair, leverage=target_leverage) + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertTrue( + self.is_logged( + log_level="INFO", + message=f"Leverage for {self.trading_pair} successfully set to {target_leverage}.", + ) + ) + + @aioresponses() + @patch("asyncio.Queue.get") + @patch("hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_api_order_book_data_source." + "DydxV4PerpetualAPIOrderBookDataSource._next_funding_time") + def test_listen_for_funding_info_update_initializes_funding_info(self, mock_api, _next_funding_time_mock, + mock_queue_get): + _next_funding_time_mock.return_value = self.target_funding_info_next_funding_utc_timestamp + url = self.funding_info_url + + response = self.funding_info_mock_response + mock_api.get(url, body=json.dumps(response)) + + event_messages = [asyncio.CancelledError] + mock_queue_get.side_effect = event_messages + + try: + self.async_run_with_timeout(self.exchange._listen_for_funding_info()) + except asyncio.CancelledError: + pass + + funding_info = self.exchange.get_funding_info(self.trading_pair) + + self.assertEqual(self.trading_pair, funding_info.trading_pair) + self.assertEqual(self.target_funding_info_index_price, funding_info.index_price) + self.assertEqual(self.target_funding_info_mark_price, funding_info.mark_price) + self.assertEqual( + self.target_funding_info_next_funding_utc_timestamp, funding_info.next_funding_utc_timestamp + ) + self.assertEqual(self.target_funding_info_rate, funding_info.rate) diff --git a/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_user_stream_data_source.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_user_stream_data_source.py new file mode 100644 index 0000000000..a889e61ccc --- /dev/null +++ b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_user_stream_data_source.py @@ -0,0 +1,107 @@ +import asyncio +import unittest +from typing import Awaitable, Optional +from unittest.mock import AsyncMock, patch + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_derivative import DydxV4PerpetualDerivative +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant + + +class DydxV4PerpetualUserStreamDataSourceUnitTests(unittest.TestCase): + # logging.Level required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + + cls.base_asset = "COINALPHA" + cls.quote_asset = "HBOT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + + def setUp(self) -> None: + super().setUp() + + self.log_records = [] + self.async_task: Optional[asyncio.Task] = None + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = DydxV4PerpetualDerivative( + client_config_map, + dydx_v4_perpetual_secret_phrase="mirror actor skill push coach wait confirm orchard " + "lunch mobile athlete gossip awake miracle matter " + "bus reopen team ladder lazy list timber render wait", + dydx_v4_perpetual_chain_address="dydx14zzueazeh0hj67cghhf9jypslcf9sh2n5k6art", + trading_pairs=[self.trading_pair], + trading_required=False, + ) + + self.data_source = self.connector._create_user_stream_data_source() + + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + + self.mocking_assistant = NetworkMockingAssistant() + self.resume_test_event = asyncio.Event() + + def tearDown(self) -> None: + self.async_task and self.async_task.cancel() + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records) + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_user_stream_data_source." + "DydxV4PerpetualUserStreamDataSource._sleep" + ) + def test_listen_for_user_stream_raises_cancelled_exception(self, _, ws_connect_mock): + ws_connect_mock.side_effect = asyncio.CancelledError + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout(self.data_source.listen_for_user_stream(asyncio.Queue())) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch( + "hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_user_stream_data_source." + "DydxV4PerpetualUserStreamDataSource._sleep" + ) + def test_listen_for_user_stream_raises_logs_exception(self, mock_sleep, ws_connect_mock): + mock_sleep.side_effect = lambda: (self.ev_loop.run_until_complete(asyncio.sleep(0.5))) + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + ws_connect_mock.return_value.receive.side_effect = lambda *_: self._create_exception_and_unlock_test_with_event( + Exception("TEST ERROR") + ) + self.async_task = self.ev_loop.create_task(self.data_source.listen_for_user_stream(asyncio.Queue())) + + self.async_run_with_timeout(self.resume_test_event.wait(), 1.0) + + self.assertTrue( + self._is_logged("ERROR", "Unexpected error while listening to user stream. Retrying after 5 seconds...") + ) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_ws_authentication_successful(self, ws_connect_mock): + + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + self.async_run_with_timeout(self.data_source._connected_websocket_assistant()) + + json_msgs = self.mocking_assistant.json_messages_sent_through_websocket(ws_connect_mock.return_value) + + self.assertEqual("dydx14zzueazeh0hj67cghhf9jypslcf9sh2n5k6art/0", json_msgs[0]["id"]) diff --git a/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_utils.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_utils.py new file mode 100644 index 0000000000..28e6afe79b --- /dev/null +++ b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_utils.py @@ -0,0 +1,5 @@ +from unittest import TestCase + + +class DydxV4PerpetualUtilsTests(TestCase): + pass diff --git a/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_web_utils.py b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_web_utils.py new file mode 100644 index 0000000000..24c792b7f4 --- /dev/null +++ b/test/hummingbot/connector/derivative/dydx_v4_perpetual/test_dydx_v4_perpetual_web_utils.py @@ -0,0 +1,52 @@ +import asyncio +import json +import unittest +from typing import Awaitable +from unittest.mock import Mock, patch + +from aioresponses import aioresponses + +from hummingbot.connector.derivative.dydx_v4_perpetual import ( + dydx_v4_perpetual_constants as CONSTANTS, + dydx_v4_perpetual_web_utils as web_utils, +) +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + + +class DydxV4PerpetualWebUtilsTest(unittest.TestCase): + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def test_public_rest_url(self): + url = web_utils.public_rest_url(CONSTANTS.PATH_MARKETS) + self.assertEqual("https://indexer.dydx.trade/v4/perpetualMarkets", url) + + @patch("hummingbot.connector.derivative.dydx_v4_perpetual.dydx_v4_perpetual_web_utils" + ".create_throttler", return_value=Mock()) + def test_build_api_factory(self, mock_create_throttler): + throttler = web_utils.create_throttler() + api_factory = web_utils.build_api_factory(throttler) + mock_create_throttler.assert_called_once() + + self.assertIsInstance(api_factory, WebAssistantsFactory) + self.assertIsNone(api_factory._auth) + + @patch.object(WebAssistantsFactory, "__init__", return_value=None) + def test_build_api_factory_without_time_synchronizer_pre_processor(self, mock_factory): + throttler = Mock() + web_utils.build_api_factory_without_time_synchronizer_pre_processor(throttler) + mock_factory.assert_called_once_with(throttler=throttler) + + @aioresponses() + def test_get_current_server_time(self, api_mock): + throttler = web_utils.create_throttler() + url = web_utils.public_rest_url(path_url=CONSTANTS.PATH_TIME) + data = {'iso': '2024-05-15T10:38:19.795Z', 'epoch': 1715769499.795} + + api_mock.get(url=url, body=json.dumps(data)) + + time = self.async_run_with_timeout(web_utils.get_current_server_time(throttler)) + + self.assertEqual(data["epoch"], time) diff --git a/test/hummingbot/connector/derivative/gate_io_perpetual/test_gate_io_perpetual_derivative.py b/test/hummingbot/connector/derivative/gate_io_perpetual/test_gate_io_perpetual_derivative.py index 3d486e99dd..5d4e06f947 100644 --- a/test/hummingbot/connector/derivative/gate_io_perpetual/test_gate_io_perpetual_derivative.py +++ b/test/hummingbot/connector/derivative/gate_io_perpetual/test_gate_io_perpetual_derivative.py @@ -1709,7 +1709,8 @@ def test_create_buy_limit_maker_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT_MAKER.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) diff --git a/test/connector/exchange/gate_io/__init__.py b/test/hummingbot/connector/derivative/hashkey_perpetual/__init__.py similarity index 100% rename from test/connector/exchange/gate_io/__init__.py rename to test/hummingbot/connector/derivative/hashkey_perpetual/__init__.py diff --git a/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_api_order_book_data_source.py b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_api_order_book_data_source.py new file mode 100644 index 0000000000..c7bcbb5c22 --- /dev/null +++ b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_api_order_book_data_source.py @@ -0,0 +1,595 @@ +import asyncio +import json +import re +import unittest +from typing import Awaitable, Dict +from unittest.mock import AsyncMock, MagicMock, patch + +from aioresponses import aioresponses +from bidict import bidict + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.derivative.hashkey_perpetual import ( + hashkey_perpetual_constants as CONSTANTS, + hashkey_perpetual_web_utils as web_utils, +) +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_api_order_book_data_source import ( + HashkeyPerpetualAPIOrderBookDataSource, +) +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_derivative import HashkeyPerpetualDerivative +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.data_type.order_book_message import OrderBookMessage + + +class TestHashkeyPerpetualAPIOrderBookDataSource(unittest.TestCase): + # logging.Level required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "ETH" + cls.quote_asset = "USDT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}{cls.quote_asset}-PERPETUAL" + cls.domain = CONSTANTS.DEFAULT_DOMAIN + + def setUp(self) -> None: + super().setUp() + self.log_records = [] + self.async_task = None + self.mocking_assistant = NetworkMockingAssistant() + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = HashkeyPerpetualDerivative( + client_config_map=client_config_map, + hashkey_perpetual_api_key="", + hashkey_perpetual_secret_key="", + trading_pairs=[self.trading_pair]) + + self.throttler = AsyncThrottler(CONSTANTS.RATE_LIMITS) + self.time_synchronnizer = TimeSynchronizer() + self.time_synchronnizer.add_time_offset_ms_sample(1000) + self.ob_data_source = HashkeyPerpetualAPIOrderBookDataSource( + trading_pairs=[self.trading_pair], + throttler=self.throttler, + connector=self.connector, + api_factory=self.connector._web_assistants_factory, + time_synchronizer=self.time_synchronnizer) + + self._original_full_order_book_reset_time = self.ob_data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS + self.ob_data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = -1 + + self.ob_data_source.logger().setLevel(1) + self.ob_data_source.logger().addHandler(self) + + self.resume_test_event = asyncio.Event() + + self.connector._set_trading_pair_symbol_map(bidict({self.ex_trading_pair: self.trading_pair})) + + def tearDown(self) -> None: + self.async_task and self.async_task.cancel() + self.ob_data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = self._original_full_order_book_reset_time + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message + for record in self.log_records) + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def get_exchange_rules_mock(self) -> Dict: + exchange_rules = { + "filters": [ + { + "minPrice": "0.1", + "maxPrice": "100000.00000000", + "tickSize": "0.1", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.001", + "maxQty": "10", + "stepSize": "0.001", + "marketOrderMinQty": "0", + "marketOrderMaxQty": "0", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "0", + "filterType": "MIN_NOTIONAL" + }, + { + "maxSellPrice": "999999", + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "maxEntrustNum": 200, + "maxConditionNum": 200, + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ], + "exchangeId": "301", + "symbol": "BTCUSDT-PERPETUAL", + "symbolName": "BTCUSDT-PERPETUAL", + "status": "TRADING", + "baseAsset": "BTCUSDT-PERPETUAL", + "baseAssetPrecision": "0.001", + "quoteAsset": "USDT", + "quoteAssetPrecision": "0.1", + "icebergAllowed": False, + "inverse": False, + "index": "USDT", + "marginToken": "USDT", + "marginPrecision": "0.0001", + "contractMultiplier": "0.001", + "underlying": "BTC", + "riskLimits": [ + { + "riskLimitId": "200000722", + "quantity": "1000.00", + "initialMargin": "0.10", + "maintMargin": "0.005", + "isWhite": False + } + ] + } + return exchange_rules + + # ORDER BOOK SNAPSHOT + @staticmethod + def _snapshot_response() -> Dict: + snapshot = { + "t": 1703613017099, + "b": [ + [ + "2500", + "1000" + ] + ], + "a": [ + [ + "25981.04", + "1000" + ], + [ + "25981.76", + "2000" + ], + ] + } + return snapshot + + @staticmethod + def _snapshot_response_processed() -> Dict: + snapshot_processed = { + "t": 1703613017099, + "b": [ + [ + "2500", + "1000" + ] + ], + "a": [ + [ + "25981.04", + "1000" + ], + [ + "25981.76", + "2000" + ], + ] + } + return snapshot_processed + + @aioresponses() + def test_request_order_book_snapshot(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + snapshot_data = self._snapshot_response() + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_URL) + tradingrule_resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(tradingrule_resp)) + mock_api.get(regex_url, body=json.dumps(snapshot_data)) + + ret = self.async_run_with_timeout( + coroutine=self.ob_data_source._request_order_book_snapshot(self.trading_pair) + ) + + self.assertEqual(ret, self._snapshot_response_processed()) # shallow comparison ok + + @aioresponses() + def test_get_snapshot_raises(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_URL) + tradingrule_resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(tradingrule_resp)) + mock_api.get(regex_url, status=500) + + with self.assertRaises(IOError): + self.async_run_with_timeout( + coroutine=self.ob_data_source._order_book_snapshot(self.trading_pair) + ) + + @aioresponses() + def test_get_new_order_book(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + resp = self._snapshot_response() + mock_api.get(regex_url, body=json.dumps(resp)) + + ret = self.async_run_with_timeout(coroutine=self.ob_data_source.get_new_order_book(self.trading_pair)) + bid_entries = list(ret.bid_entries()) + ask_entries = list(ret.ask_entries()) + self.assertEqual(1, len(bid_entries)) + self.assertEqual(2500, bid_entries[0].price) + self.assertEqual(1000, bid_entries[0].amount) + self.assertEqual(int(resp["t"]), bid_entries[0].update_id) + self.assertEqual(2, len(ask_entries)) + self.assertEqual(25981.04, ask_entries[0].price) + self.assertEqual(1000, ask_entries[0].amount) + self.assertEqual(25981.76, ask_entries[1].price) + self.assertEqual(2000, ask_entries[1].amount) + self.assertEqual(int(resp["t"]), ask_entries[0].update_id) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_subscriptions_subscribes_to_trades_and_depth(self, ws_connect_mock): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + result_subscribe_trades = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "trade", + "event": "sub", + "params": { + "binary": False, + "realtimeInterval": "24h", + }, + "f": True, + "sendTime": 1688198964293, + "shared": False, + "id": "1" + } + + result_subscribe_depth = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "depth", + "event": "sub", + "params": { + "binary": False, + }, + "f": True, + "sendTime": 1688198964293, + "shared": False, + "id": "1" + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_trades)) + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_depth)) + + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=ws_connect_mock.return_value) + + self.assertEqual(2, len(sent_subscription_messages)) + expected_trade_subscription = { + "topic": "trade", + "event": "sub", + "symbol": self.ex_trading_pair, + "params": { + "binary": False + } + } + self.assertEqual(expected_trade_subscription, sent_subscription_messages[0]) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch("hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_api_order_book_data_source.HashkeyPerpetualAPIOrderBookDataSource._time") + def test_listen_for_subscriptions_sends_ping_message_before_ping_interval_finishes( + self, + time_mock, + ws_connect_mock): + + time_mock.side_effect = [1000, 1100, 1101, 1102] # Simulate first ping interval is already due + + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + result_subscribe_trades = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "trade", + "event": "sub", + "params": { + "binary": False, + "realtimeInterval": "24h", + }, + "id": "1" + } + + result_subscribe_depth = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "depth", + "event": "sub", + "params": { + "binary": False, + }, + "id": "1" + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_trades)) + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_depth)) + + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + sent_messages = self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=ws_connect_mock.return_value) + + expected_ping_message = { + "ping": int(1101 * 1e3) + } + self.assertEqual(expected_ping_message, sent_messages[-1]) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_subscriptions_raises_cancel_exception(self, _, ws_connect_mock): + ws_connect_mock.side_effect = asyncio.CancelledError + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + self.async_run_with_timeout(self.listening_task) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_subscriptions_logs_exception_details(self, sleep_mock, ws_connect_mock): + sleep_mock.side_effect = asyncio.CancelledError + ws_connect_mock.side_effect = Exception("TEST ERROR.") + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + self.async_run_with_timeout(self.listening_task) + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error occurred when listening to order book streams. Retrying in 5 seconds...")) + + def test_listen_for_trades_cancelled_when_listening(self): + mock_queue = MagicMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + self.async_run_with_timeout(self.listening_task) + + def test_listen_for_trades_logs_exception(self): + incomplete_resp = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "trade", + "event": "sub", + "params": { + "binary": False, + }, + "id": "1", + "data": [ + { + "v": "1447335405363150849", + "t": 1687271825415, + "p": "10001", + "q": "1", + "m": False, + }, + { + "v": "1447337171483901952", + "t": 1687272035953, + "p": "10001.1", + "q": "10", + "m": True + }, + ] + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] + self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + self.async_run_with_timeout(self.listening_task) + + def test_listen_for_trades_successful(self): + mock_queue = AsyncMock() + trade_event = { + "symbol": self.ex_trading_pair, + "symbolName": self.ex_trading_pair, + "topic": "trade", + "params": { + "realtimeInterval": "24h", + "binary": "false" + }, + "data": [ + { + "v": "929681067596857345", + "t": 1625562619577, + "p": "34924.15", + "q": "100", + "m": True + } + ], + "f": True, + "sendTime": 1626249138535, + "shared": False + } + mock_queue.get.side_effect = [trade_event, asyncio.CancelledError()] + self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + try: + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + except asyncio.CancelledError: + pass + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertTrue(trade_event["data"][0]["t"], msg.trade_id) + + def test_listen_for_order_book_snapshots_cancelled_when_fetching_snapshot(self): + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout( + self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + + @aioresponses() + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_order_book_snapshots_log_exception(self, mock_api, sleep_mock): + mock_queue = AsyncMock() + mock_queue.get.side_effect = ['ERROR', asyncio.CancelledError] + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + sleep_mock.side_effect = [asyncio.CancelledError] + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.get(regex_url, exception=Exception) + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout(self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue)) + + @aioresponses() + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_order_book_snapshots_successful_rest(self, mock_api, _): + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.TimeoutError + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + snapshot_data = self._snapshot_response() + mock_api.get(regex_url, body=json.dumps(snapshot_data)) + + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(int(snapshot_data["t"]), msg.update_id) + + def test_listen_for_order_book_snapshots_successful_ws(self): + mock_queue = AsyncMock() + snapshot_event = { + "symbol": self.ex_trading_pair, + "symbolName": self.ex_trading_pair, + "topic": "depth", + "params": { + "realtimeInterval": "24h", + "binary": "false" + }, + "data": [{ + "e": 301, + "s": self.ex_trading_pair, + "t": 1565600357643, + "v": "112801745_18", + "b": [ + ["11371.49", "14"], + ["11371.12", "200"], + ["11369.97", "35"], + ["11369.96", "500"], + ["11369.95", "93"], + ["11369.94", "1680"], + ["11369.6", "47"], + ["11369.17", "300"], + ["11369.16", "200"], + ["11369.04", "1320"]], + "a": [ + ["11375.41", "53"], + ["11375.42", "43"], + ["11375.48", "52"], + ["11375.58", "541"], + ["11375.7", "386"], + ["11375.71", "200"], + ["11377", "2069"], + ["11377.01", "167"], + ["11377.12", "1500"], + ["11377.61", "300"] + ], + "o": 0 + }], + "f": True, + "sendTime": 1626253839401, + "shared": False + } + mock_queue.get.side_effect = [snapshot_event, asyncio.CancelledError()] + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + try: + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + except asyncio.CancelledError: + pass + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get(), + timeout=6) + + self.assertTrue(snapshot_event["data"][0]["t"], msg.update_id) diff --git a/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_auth.py b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_auth.py new file mode 100644 index 0000000000..550db4d6dc --- /dev/null +++ b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_auth.py @@ -0,0 +1,110 @@ +import asyncio +import hashlib +import hmac +from collections import OrderedDict +from typing import Any, Awaitable, Dict, Mapping, Optional +from unittest import TestCase +from unittest.mock import MagicMock +from urllib.parse import urlencode + +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_auth import HashkeyPerpetualAuth +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest, WSJSONRequest + + +class HashkeyPerpetualAuthTests(TestCase): + + def setUp(self) -> None: + super().setUp() + self.api_key = "testApiKey" + self.secret_key = "testSecretKey" + + self.mock_time_provider = MagicMock() + self.mock_time_provider.time.return_value = 1000 + + self.auth = HashkeyPerpetualAuth( + api_key=self.api_key, + secret_key=self.secret_key, + time_provider=self.mock_time_provider, + ) + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): + ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def test_add_auth_params_to_get_request_without_params(self): + request = RESTRequest( + method=RESTMethod.GET, + url="https://test.url/api/endpoint", + is_auth_required=True, + throttler_limit_id="/api/endpoint" + ) + params_expected = self._params_expected(request.params) + + self.async_run_with_timeout(self.auth.rest_authenticate(request)) + + self.assertEqual(self.api_key, request.headers["X-HK-APIKEY"]) + self.assertEqual(params_expected['timestamp'], request.params["timestamp"]) + self.assertEqual(params_expected['signature'], request.params["signature"]) + + def test_add_auth_params_to_get_request_with_params(self): + params = { + "param_z": "value_param_z", + "param_a": "value_param_a" + } + request = RESTRequest( + method=RESTMethod.GET, + url="https://test.url/api/endpoint", + params=params, + is_auth_required=True, + throttler_limit_id="/api/endpoint" + ) + + params_expected = self._params_expected(request.params) + + self.async_run_with_timeout(self.auth.rest_authenticate(request)) + + self.assertEqual(self.api_key, request.headers["X-HK-APIKEY"]) + self.assertEqual(params_expected['timestamp'], request.params["timestamp"]) + self.assertEqual(params_expected['signature'], request.params["signature"]) + self.assertEqual(params_expected['param_z'], request.params["param_z"]) + self.assertEqual(params_expected['param_a'], request.params["param_a"]) + + def test_add_auth_params_to_post_request(self): + params = {"param_z": "value_param_z", "param_a": "value_param_a"} + request = RESTRequest( + method=RESTMethod.POST, + url="https://test.url/api/endpoint", + data=params, + is_auth_required=True, + throttler_limit_id="/api/endpoint" + ) + params_auth = self._params_expected(request.params) + params_request = self._params_expected(request.data) + + self.async_run_with_timeout(self.auth.rest_authenticate(request)) + self.assertEqual(self.api_key, request.headers["X-HK-APIKEY"]) + self.assertEqual(params_auth['timestamp'], request.params["timestamp"]) + self.assertEqual(params_auth['signature'], request.params["signature"]) + self.assertEqual(params_request['param_z'], request.data["param_z"]) + self.assertEqual(params_request['param_a'], request.data["param_a"]) + + def test_no_auth_added_to_wsrequest(self): + payload = {"param1": "value_param_1"} + request = WSJSONRequest(payload=payload, is_auth_required=True) + self.async_run_with_timeout(self.auth.ws_authenticate(request)) + self.assertEqual(payload, request.payload) + + def _generate_signature(self, params: Dict[str, Any]) -> str: + encoded_params_str = urlencode(params) + digest = hmac.new(self.secret_key.encode("utf8"), encoded_params_str.encode("utf8"), hashlib.sha256).hexdigest() + return digest + + def _params_expected(self, request_params: Optional[Mapping[str, str]]) -> Dict: + request_params = request_params if request_params else {} + params = { + 'timestamp': 1000000, + } + params.update(request_params) + params = OrderedDict(sorted(params.items(), key=lambda t: t[0])) + params['signature'] = self._generate_signature(params=params) + return params diff --git a/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_derivative.py b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_derivative.py new file mode 100644 index 0000000000..0f2164273f --- /dev/null +++ b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_derivative.py @@ -0,0 +1,1632 @@ +import asyncio +import json +import logging +import re +from copy import deepcopy +from decimal import Decimal +from typing import Any, Callable, List, Optional, Tuple +from unittest.mock import AsyncMock + +import pandas as pd +from aioresponses import aioresponses +from aioresponses.core import RequestCall + +import hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_constants as CONSTANTS +import hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_web_utils as web_utils +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_derivative import HashkeyPerpetualDerivative +from hummingbot.connector.derivative.position import Position +from hummingbot.connector.test_support.perpetual_derivative_test import AbstractPerpetualDerivativeTests +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair, get_new_client_order_id +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, PositionSide, TradeType +from hummingbot.core.data_type.funding_info import FundingInfo +from hummingbot.core.data_type.in_flight_order import InFlightOrder +from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase + + +class HashkeyPerpetualDerivativeTests(AbstractPerpetualDerivativeTests.PerpetualDerivativeTests): + _logger = logging.getLogger(__name__) + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.api_key = "someKey" + cls.api_secret = "someSecret" + cls.user_id = "someUserId" + cls.base_asset = "BTC" + cls.quote_asset = "USDT" # linear + cls.trading_pair = combine_to_hb_trading_pair(cls.base_asset, cls.quote_asset) + + @property + def all_symbols_url(self): + url = web_utils.rest_url(path_url=CONSTANTS.EXCHANGE_INFO_URL) + return url + + @property + def latest_prices_url(self): + url = web_utils.rest_url( + path_url=CONSTANTS.TICKER_PRICE_URL + ) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def network_status_url(self): + url = web_utils.rest_url(path_url=CONSTANTS.PING_URL) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def trading_rules_url(self): + url = web_utils.rest_url(path_url=CONSTANTS.EXCHANGE_INFO_URL) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def order_creation_url(self): + url = web_utils.rest_url( + path_url=CONSTANTS.ORDER_URL + ) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def balance_url(self): + url = web_utils.rest_url(path_url=CONSTANTS.ACCOUNT_INFO_URL) + return url + + @property + def funding_info_url(self): + url = web_utils.rest_url( + path_url=CONSTANTS.FUNDING_INFO_URL, + ) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def mark_price_url(self): + url = web_utils.rest_url( + path_url=CONSTANTS.MARK_PRICE_URL, + ) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def index_price_url(self): + url = web_utils.rest_url( + path_url=CONSTANTS.INDEX_PRICE_URL, + ) + url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + return url + + @property + def funding_payment_url(self): + pass + + @property + def balance_request_mock_response_only_base(self): + pass + + @property + def all_symbols_request_mock_response(self): + mock_response = { + "contracts": [ + { + "filters": [ + { + "minPrice": "0.1", + "maxPrice": "100000.00000000", + "tickSize": "0.1", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.001", + "maxQty": "10", + "stepSize": "0.001", + "marketOrderMinQty": "0", + "marketOrderMaxQty": "0", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "0", + "filterType": "MIN_NOTIONAL" + }, + { + "maxSellPrice": "999999", + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "maxEntrustNum": 200, + "maxConditionNum": 200, + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ], + "exchangeId": "301", + "symbol": "BTCUSDT-PERPETUAL", + "symbolName": "BTCUSDT-PERPETUAL", + "status": "TRADING", + "baseAsset": "BTCUSDT-PERPETUAL", + "baseAssetPrecision": "0.001", + "quoteAsset": "USDT", + "quoteAssetPrecision": "0.1", + "icebergAllowed": False, + "inverse": False, + "index": "USDT", + "marginToken": "USDT", + "marginPrecision": "0.0001", + "contractMultiplier": "0.001", + "underlying": "BTC", + "riskLimits": [ + { + "riskLimitId": "200000722", + "quantity": "1000.00", + "initialMargin": "0.10", + "maintMargin": "0.005", + "isWhite": False + } + ] + } + ] + } + return mock_response + + @property + def latest_prices_request_mock_response(self): + mock_response = [ + { + "s": "BTCUSDT-PERPETUAL", + "p": "9999.9" + } + ] + return mock_response + + @property + def all_symbols_including_invalid_pair_mock_response(self): + mock_response = mock_response = { + "contracts": [ + { + "filters": [ + { + "minPrice": "0.1", + "maxPrice": "100000.00000000", + "tickSize": "0.1", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.001", + "maxQty": "10", + "stepSize": "0.001", + "marketOrderMinQty": "0", + "marketOrderMaxQty": "0", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "0", + "filterType": "MIN_NOTIONAL" + }, + { + "maxSellPrice": "999999", + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "maxEntrustNum": 200, + "maxConditionNum": 200, + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ], + "exchangeId": "301", + "symbol": "BTCUSDT-PERPETUAL", + "symbolName": "BTCUSDT-PERPETUAL", + "status": "STOPPING", + "baseAsset": "BTCUSDT-PERPETUAL", + "baseAssetPrecision": "0.001", + "quoteAsset": "USDT", + "quoteAssetPrecision": "0.1", + "icebergAllowed": False, + "inverse": False, + "index": "USDT", + "marginToken": "USDT", + "marginPrecision": "0.0001", + "contractMultiplier": "0.001", + "underlying": "BTC", + "riskLimits": [ + { + "riskLimitId": "200000722", + "quantity": "1000.00", + "initialMargin": "0.10", + "maintMargin": "0.005", + "isWhite": False + } + ] + } + ] + } + return "INVALID-PAIR", mock_response + + def empty_funding_payment_mock_response(self): + pass + + @aioresponses() + def test_funding_payment_polling_loop_sends_update_event(self, *args, **kwargs): + pass + + @property + def network_status_request_successful_mock_response(self): + mock_response = {} + return mock_response + + @property + def trading_rules_request_mock_response(self): + return self.all_symbols_request_mock_response + + @property + def trading_rules_request_erroneous_mock_response(self): + _, resp = self.all_symbols_including_invalid_pair_mock_response + return resp + + @property + def order_creation_request_successful_mock_response(self): + mock_response = { + "time": "1723800711177", + "updateTime": "1723800711191", + "orderId": "1753761908689837056", + "clientOrderId": get_new_client_order_id( + is_buy=True, + trading_pair=self.trading_pair, + hbot_order_id_prefix=CONSTANTS.HBOT_BROKER_ID, + max_id_len=CONSTANTS.MAX_ORDER_ID_LEN, + ), + "symbol": self.exchange_trading_pair, + "price": "5050", + "leverage": "5", + "origQty": "100", + "executedQty": "0", + "avgPrice": "0", + "marginLocked": "101", + "type": "LIMIT", + "side": "BUY_OPEN", + "timeInForce": "GTC", + "status": "NEW", + "priceType": "INPUT", + "contractMultiplier": "0.00100000" + } + return mock_response + + @property + def limit_maker_order_creation_request_successful_mock_response(self): + mock_response = { + "time": "1723800711177", + "updateTime": "1723800711191", + "orderId": "1753761908689837056", + "clientOrderId": get_new_client_order_id( + is_buy=True, + trading_pair=self.trading_pair, + hbot_order_id_prefix=CONSTANTS.HBOT_BROKER_ID, + max_id_len=CONSTANTS.MAX_ORDER_ID_LEN, + ), + "symbol": self.exchange_trading_pair, + "price": "5050", + "leverage": "5", + "origQty": "100", + "executedQty": "0", + "avgPrice": "0", + "marginLocked": "101", + "type": "LIMIT", + "side": "BUY_OPEN", + "timeInForce": "GTC", + "status": "NEW", + "priceType": "INPUT", + "contractMultiplier": "0.00100000" + } + return mock_response + + @property + def balance_request_mock_response_for_base_and_quote(self): + mock_response = [ + { + "balance": "3000", + "availableBalance": "2000", + "positionMargin": "500", + "orderMargin": "500", + "asset": "USDT", + "crossUnRealizedPnl": "1000" + } + ] + return mock_response + + @aioresponses() + def test_update_balances(self, mock_api): + response = self.balance_request_mock_response_for_base_and_quote + self._configure_balance_response(response=response, mock_api=mock_api) + + self.async_run_with_timeout(self.exchange._update_balances()) + + available_balances = self.exchange.available_balances + total_balances = self.exchange.get_all_balances() + + self.assertEqual(Decimal("2000"), available_balances[self.quote_asset]) + self.assertEqual(Decimal("3000"), total_balances[self.quote_asset]) + + @property + def balance_event_websocket_update(self): + mock_response = [ + { + "e": "outboundContractAccountInfo", # event type + "E": "1714717314118", # event time + "T": True, # can trade + "W": True, # can withdraw + "D": True, # can deposit + "B": [ # balances changed + { + "a": "USDT", # asset + "f": "474960.65", # free amount + "l": "100000", # locked amount + "r": "" # to be released + } + ] + } + ] + return mock_response + + @property + def position_event_websocket_update(self): + mock_response = [ + { + "e": "outboundContractPositionInfo", # event type + "E": "1715224789008", # event time + "A": "1649292498437183234", # account ID + "s": self.exchange_trading_pair, # symbol + "S": "LONG", # side, LONG or SHORT + "p": "3212.78", # avg Price + "P": "3000", # total position + "a": "3000", # available position + "f": "0", # liquidation price + "m": "13680.323", # portfolio margin + "r": "-3.8819", # realised profit and loss (Pnl) + "up": "-4909.9255", # unrealized profit and loss (unrealizedPnL) + "pr": "-0.3589", # profit rate of current position + "pv": "73579.09", # position value (USDT) + "v": "5.00", # leverage + "mt": "CROSS", # position type, only CROSS, ISOLATED later will support + "mm": "0" # min margin + } + ] + return mock_response + + @property + def position_event_websocket_update_zero(self): + mock_response = [ + { + "e": "outboundContractPositionInfo", # event type + "E": "1715224789008", # event time + "A": "1649292498437183234", # account ID + "s": self.exchange_trading_pair, # symbol + "S": "LONG", # side, LONG or SHORT + "p": "3212.78", # avg Price + "P": "0", # total position + "a": "0", # available position + "f": "0", # liquidation price + "m": "13680.323", # portfolio margin + "r": "-3.8819", # realised profit and loss (Pnl) + "up": "-4909.9255", # unrealized profit and loss (unrealizedPnL) + "pr": "-0.3589", # profit rate of current position + "pv": "73579.09", # position value (USDT) + "v": "5.00", # leverage + "mt": "CROSS", # position type, only CROSS, ISOLATED later will support + "mm": "0" # min margin + } + ] + return mock_response + + @property + def expected_latest_price(self): + return 9999.9 + + @property + def funding_payment_mock_response(self): + raise NotImplementedError + + @property + def expected_supported_position_modes(self) -> List[PositionMode]: + raise NotImplementedError # test is overwritten + + @property + def target_funding_info_next_funding_utc_str(self): + datetime_str = str( + pd.Timestamp.utcfromtimestamp( + self.target_funding_info_next_funding_utc_timestamp) + ).replace(" ", "T") + "Z" + return datetime_str + + @property + def target_funding_info_next_funding_utc_str_ws_updated(self): + datetime_str = str( + pd.Timestamp.utcfromtimestamp( + self.target_funding_info_next_funding_utc_timestamp_ws_updated) + ).replace(" ", "T") + "Z" + return datetime_str + + @property + def target_funding_payment_timestamp_str(self): + datetime_str = str( + pd.Timestamp.utcfromtimestamp( + self.target_funding_payment_timestamp) + ).replace(" ", "T") + "Z" + return datetime_str + + @property + def funding_info_mock_response(self): + mock_response = self.latest_prices_request_mock_response + funding_info = mock_response[0] + funding_info["index_price"] = self.target_funding_info_index_price + funding_info["mark_price"] = self.target_funding_info_mark_price + funding_info["predicted_funding_rate"] = self.target_funding_info_rate + return funding_info + + @property + def funding_rate_mock_response(self): + return [ + { + "symbol": "ETHUSDT-PERPETUAL", + "rate": "0.0001", + "nextSettleTime": "1724140800000" + }, + { + "symbol": "BTCUSDT-PERPETUAL", + "rate": self.target_funding_info_rate, + "nextSettleTime": str(self.target_funding_info_next_funding_utc_timestamp * 1e3) + }, + ] + + @property + def index_price_mock_response(self): + return { + "index": { + f"{self.base_asset}{self.quote_asset}": self.target_funding_info_index_price + }, + "edp": { + f"{self.base_asset}{self.quote_asset}": "2" + } + } + + @property + def mark_price_mock_response(self): + return { + "exchangeId": 301, + "symbolId": self.exchange_trading_pair, + "price": self.target_funding_info_mark_price, + "time": str(self.target_funding_info_next_funding_utc_timestamp * 1e3) + } + + @property + def expected_supported_order_types(self): + return [OrderType.LIMIT, OrderType.MARKET, OrderType.LIMIT_MAKER] + + @property + def expected_trading_rule(self): + rule = self.trading_rules_request_mock_response["contracts"][0] + + trading_pair = f"{rule['underlying']}-{rule['quoteAsset']}" + trading_filter_info = {item["filterType"]: item for item in rule.get("filters", [])} + + min_order_size = trading_filter_info.get("LOT_SIZE", {}).get("minQty") + min_price_increment = trading_filter_info.get("PRICE_FILTER", {}).get("minPrice") + min_base_amount_increment = rule.get("baseAssetPrecision") + min_notional_size = trading_filter_info.get("MIN_NOTIONAL", {}).get("minNotional") + + return TradingRule(trading_pair, + min_order_size=Decimal(min_order_size), + min_price_increment=Decimal(min_price_increment), + min_base_amount_increment=Decimal(min_base_amount_increment), + min_notional_size=Decimal(min_notional_size)) + + @property + def expected_logged_error_for_erroneous_trading_rule(self): + erroneous_rule = self.trading_rules_request_erroneous_mock_response["contracts"][0] + return f"Error parsing the trading pair rule {erroneous_rule}. Skipping." + + @property + def expected_exchange_order_id(self): + return "1753761908689837056" + + @property + def is_order_fill_http_update_included_in_status_update(self) -> bool: + return False + + @property + def is_order_fill_http_update_executed_during_websocket_order_event_processing(self) -> bool: + return False + + @property + def expected_partial_fill_price(self) -> Decimal: + return Decimal("100") + + @property + def expected_partial_fill_amount(self) -> Decimal: + return Decimal("10") + + @property + def expected_fill_fee(self) -> TradeFeeBase: + return AddedToCostTradeFee( + percent_token=self.quote_asset, + flat_fees=[TokenAmount(token=self.quote_asset, amount=Decimal("0.1"))], + ) + + @property + def expected_fill_trade_id(self) -> str: + return "1755540311713595904" + + def async_run_with_timeout(self, coroutine, timeout: int = 1): + ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def exchange_symbol_for_tokens(self, base_token: str, quote_token: str) -> str: + return f"{base_token}{quote_token}-PERPETUAL" + + def create_exchange_instance(self) -> HashkeyPerpetualDerivative: + client_config_map = ClientConfigAdapter(ClientConfigMap()) + exchange = HashkeyPerpetualDerivative( + client_config_map, + self.api_key, + self.api_secret, + trading_pairs=[self.trading_pair], + ) + return exchange + + def validate_auth_credentials_present(self, request_call: RequestCall): + request_headers = request_call.kwargs["headers"] + request_params = request_call.kwargs["params"] + + self.assertIn("X-HK-APIKEY", request_headers) + self.assertIn("timestamp", request_params) + self.assertIn("signature", request_params) + + def validate_order_creation_request(self, order: InFlightOrder, request_call: RequestCall): + request_params = request_call.kwargs["params"] + self.assertEqual(order.trade_type.name.lower(), request_params["side"].split("_")[0].lower()) + self.assertEqual(self.exchange_trading_pair, request_params["symbol"]) + self.assertEqual(order.amount, self.exchange.get_amount_of_contracts( + self.trading_pair, abs(Decimal(str(request_params["quantity"]))))) + self.assertEqual(order.client_order_id, request_params["clientOrderId"]) + + def validate_order_cancelation_request(self, order: InFlightOrder, request_call: RequestCall): + request_params = request_call.kwargs["params"] + request_data = request_call.kwargs["data"] + self.assertIsNotNone(request_params) + self.assertIsNone(request_data) + + def validate_order_status_request(self, order: InFlightOrder, request_call: RequestCall): + request_params = request_call.kwargs["params"] + request_data = request_call.kwargs["data"] + self.assertIsNotNone(request_params) + self.assertIsNone(request_data) + + def validate_trades_request(self, order: InFlightOrder, request_call: RequestCall): + request_params = request_call.kwargs["params"] + self.assertEqual(self.exchange_trading_pair, request_params["symbol"]) + self.assertEqual(order.exchange_order_id, request_params["orderId"]) + + def configure_successful_cancelation_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + """ + :return: the URL configured for the cancelation + """ + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + response = self._order_cancelation_request_successful_mock_response(order=order) + mock_api.delete(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_erroneous_cancelation_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url( + path_url=CONSTANTS.ORDER_URL + ) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + mock_api.delete(regex_url, status=400, callback=callback) + return url + + def configure_one_successful_one_erroneous_cancel_all_response( + self, + successful_order: InFlightOrder, + erroneous_order: InFlightOrder, + mock_api: aioresponses, + ) -> List[str]: + """ + :return: a list of all configured URLs for the cancelations + """ + all_urls = [] + url = self.configure_successful_cancelation_response(order=successful_order, mock_api=mock_api) + all_urls.append(url) + url = self.configure_erroneous_cancelation_response(order=erroneous_order, mock_api=mock_api) + all_urls.append(url) + return all_urls + + def configure_order_not_found_error_cancelation_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + # Implement the expected not found response when enabling test_cancel_order_not_found_in_the_exchange + raise NotImplementedError + + def configure_order_not_found_error_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + # Implement the expected not found response when enabling + # test_lost_order_removed_if_not_found_during_order_status_update + raise NotImplementedError + + def configure_completely_filled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response = self._order_status_request_completely_filled_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_canceled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response = self._order_status_request_canceled_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_open_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response = self._order_status_request_open_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_http_error_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + mock_api.get(regex_url, status=404, callback=callback) + return url + + def configure_partially_filled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response = self._order_status_request_partially_filled_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_partial_fill_trade_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response = self._order_fills_request_partial_fill_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_full_fill_trade_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url( + path_url=CONSTANTS.ACCOUNT_TRADE_LIST_URL, + ) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + response = self._order_fills_request_full_fill_mock_response(order=order) + mock_api.get(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_erroneous_http_fill_trade_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + url = web_utils.rest_url(path_url=CONSTANTS.ORDER_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + mock_api.get(regex_url, status=400, callback=callback) + return url + + def configure_failed_set_position_mode( + self, + position_mode: PositionMode, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ): + url = web_utils.rest_url( + path_url=CONSTANTS.SET_POSITION_MODE_URL + ) + get_position_url = web_utils.rest_url( + path_url=CONSTANTS.POSITION_INFORMATION_URL + ) + regex_url = re.compile(f"^{url}") + regex_get_position_url = re.compile(f"^{get_position_url}") + + error_msg = "" + get_position_mock_response = [ + {"mode": 'single'} + ] + mock_response = { + "label": "1666", + "detail": "", + } + mock_api.get(regex_get_position_url, body=json.dumps(get_position_mock_response), callback=callback) + mock_api.post(regex_url, body=json.dumps(mock_response), callback=callback) + + return url, f"{error_msg}" + + def configure_successful_set_position_mode( + self, + position_mode: PositionMode, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ): + pass + + def configure_failed_set_leverage( + self, + leverage: PositionMode, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> Tuple[str, str]: + url = web_utils.rest_url(path_url=CONSTANTS.SET_LEVERAGE_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + err_msg = "leverage is diff" + mock_response = { + "code": "0001", + "msg": err_msg + } + mock_api.post(regex_url, body=json.dumps(mock_response), callback=callback) + return url, err_msg + + def configure_successful_set_leverage( + self, + leverage: int, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ): + url = web_utils.rest_url(path_url=CONSTANTS.SET_LEVERAGE_URL) + regex_url = re.compile(f"^{url}") + + mock_response = { + "code": "0000", + "symbolId": "BTCUSDT-PERPETUAL", + "leverage": str(leverage) + } + + mock_api.post(regex_url, body=json.dumps(mock_response), callback=callback) + + return url + + def order_event_for_new_order_websocket_update(self, order: InFlightOrder): + self._simulate_trading_rules_initialized() + return [ + { + "e": "contractExecutionReport", # event type + "E": "1714716899100", # event time + "s": self.exchange_trading_pair, # symbol + "c": order.client_order_id, # client order ID + "S": "BUY", # side + "o": "LIMIT", # order type + "f": "GTC", # time in force + "q": self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount), # order quantity + "p": str(order.price), # order price + "X": "NEW", # current order status + "i": order.exchange_order_id, # order ID + "l": "0", # last executed quantity + "z": "0", # cumulative filled quantity + "L": "", # last executed price + "n": "0", # commission amount + "N": "", # commission asset + "u": True, # is the trade normal, ignore for now + "w": True, # is the order working? + "m": False, # is this trade the maker side? + "O": "1714716899068", # order creation time + "Z": "0", # cumulative quote asset transacted quantity + "C": False, # is close, Is the buy close or sell close + "V": "26105.5", # average executed price + "reqAmt": "0", # requested cash amount + "d": "", # execution ID + "r": "10000", # unfilled quantity + "v": "5", # leverage + "P": "30000", # Index price + "lo": True, # Is liquidation Order + "lt": "LIQUIDATION_MAKER" # Liquidation type "LIQUIDATION_MAKER_ADL", "LIQUIDATION_MAKER", "LIQUIDATION_TAKER" (To be released) + } + ] + + def order_event_for_canceled_order_websocket_update(self, order: InFlightOrder): + self._simulate_trading_rules_initialized() + + return [ + { + "e": "contractExecutionReport", # event type + "E": "1714716899100", # event time + "s": self.exchange_trading_pair, # symbol + "c": order.client_order_id, # client order ID + "S": "BUY", # side + "o": "LIMIT", # order type + "f": "GTC", # time in force + "q": self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount), # order quantity + "p": str(order.price), # order price + "X": "CANCELED", # current order status + "i": order.exchange_order_id, # order ID + "l": "0", # last executed quantity + "z": "0", # cumulative filled quantity + "L": "", # last executed price + "n": "0", # commission amount + "N": "", # commission asset + "u": True, # is the trade normal, ignore for now + "w": True, # is the order working? + "m": False, # is this trade the maker side? + "O": "1714716899068", # order creation time + "Z": "0", # cumulative quote asset transacted quantity + "C": False, # is close, Is the buy close or sell close + "V": "26105.5", # average executed price + "reqAmt": "0", # requested cash amount + "d": "", # execution ID + "r": "10000", # unfilled quantity + "v": "5", # leverage + "P": "30000", # Index price + "lo": True, # Is liquidation Order + "lt": "LIQUIDATION_MAKER" # Liquidation type "LIQUIDATION_MAKER_ADL", "LIQUIDATION_MAKER", "LIQUIDATION_TAKER" (To be released) + } + ] + + def order_event_for_full_fill_websocket_update(self, order: InFlightOrder): + self._simulate_trading_rules_initialized() + + quantity = self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount) + return [ + { + "e": "contractExecutionReport", # event type + "E": "1714716899100", # event time + "s": self.exchange_trading_pair, # symbol + "c": order.client_order_id, # client order ID + "S": "BUY", # side + "o": "LIMIT", # order type + "f": "GTC", # time in force + "q": str(quantity), # order quantity + "p": str(order.price), # order price + "X": "FILLED", # current order status + "i": order.exchange_order_id, # order ID + "l": str(quantity), # last executed quantity + "z": "0", # cumulative filled quantity + "L": str(order.price), # last executed price + "n": "0.1", # commission amount + "N": "USDT", # commission asset + "u": True, # is the trade normal, ignore for now + "w": True, # is the order working? + "m": False, # is this trade the maker side? + "O": "1714716899068", # order creation time + "Z": "0", # cumulative quote asset transacted quantity + "C": False, # is close, Is the buy close or sell close + "V": "26105.5", # average executed price + "reqAmt": "0", # requested cash amount + "d": "", # execution ID + "r": "10000", # unfilled quantity + "v": "5", # leverage + "P": "30000", # Index price + "lo": True, # Is liquidation Order + "lt": "LIQUIDATION_MAKER" # Liquidation type "LIQUIDATION_MAKER_ADL", "LIQUIDATION_MAKER", "LIQUIDATION_TAKER" (To be released) + } + ] + + def trade_event_for_full_fill_websocket_update(self, order: InFlightOrder): + self._simulate_trading_rules_initialized() + + return [ + { + "e": "ticketInfo", # event type + "E": "1714717146971", # event time + "s": self.exchange_trading_pair, # symbol + "q": self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount), # quantity + "t": "1714717146957", # time + "p": str(order.price), # price + "T": self.expected_fill_trade_id, # ticketId + "o": order.exchange_order_id, # orderId + "c": order.client_order_id, # clientOrderId + "a": "1649292498437183232", # accountId + "m": True, # isMaker + "S": order.trade_type # side SELL or BUY + } + ] + + def position_event_for_full_fill_websocket_update(self, order: InFlightOrder, unrealized_pnl: float): + mock_response = [ + { + "e": "outboundContractPositionInfo", # event type + "E": "1715224789008", # event time + "A": "1649292498437183234", # account ID + "s": self.exchange_trading_pair, # symbol + "S": "LONG", # side, LONG or SHORT + "p": "3212.78", # avg Price + "P": "3000", # total position + "a": "3000", # available position + "f": "0", # liquidation price + "m": "13680.323", # portfolio margin + "r": "-3.8819", # realised profit and loss (Pnl) + "up": str(unrealized_pnl), # unrealized profit and loss (unrealizedPnL) + "pr": "-0.3589", # profit rate of current position + "pv": "73579.09", # position value (USDT) + "v": "5.00", # leverage + "mt": "CROSS", # position type, only CROSS, ISOLATED later will support + "mm": "0" # min margin + } + ] + return mock_response + + def funding_info_event_for_websocket_update(self): + return [] + + def test_create_order_with_invalid_position_action_raises_value_error(self): + self._simulate_trading_rules_initialized() + + with self.assertRaises(ValueError) as exception_context: + asyncio.get_event_loop().run_until_complete( + self.exchange._create_order( + trade_type=TradeType.BUY, + order_id="C1", + trading_pair=self.trading_pair, + amount=Decimal("1"), + order_type=OrderType.LIMIT, + price=Decimal("46000"), + position_action=PositionAction.NIL, + ), + ) + + self.assertEqual( + f"Invalid position action {PositionAction.NIL}. Must be one of {[PositionAction.OPEN, PositionAction.CLOSE]}", + str(exception_context.exception) + ) + + def test_user_stream_update_for_new_order(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id="11", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders["11"] + + order_event = self.order_event_for_new_order_websocket_update(order=order) + + mock_queue = AsyncMock() + event_messages = [order_event, asyncio.CancelledError] + mock_queue.get.side_effect = event_messages + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertEqual(1, len(self.buy_order_created_logger.event_log)) + self.assertTrue(order.is_open) + + def test_user_stream_balance_update(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + connector = HashkeyPerpetualDerivative( + client_config_map=client_config_map, + hashkey_perpetual_api_key=self.api_key, + hashkey_perpetual_secret_key=self.api_secret, + trading_pairs=[self.trading_pair], + ) + connector._set_current_timestamp(1640780000) + + balance_event = self.balance_event_websocket_update + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [balance_event, asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertEqual(Decimal("474960.65"), self.exchange.available_balances[self.quote_asset]) + self.assertEqual(Decimal("574960.65"), self.exchange.get_balance(self.quote_asset)) + + def test_user_stream_position_update(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + connector = HashkeyPerpetualDerivative( + client_config_map=client_config_map, + hashkey_perpetual_api_key=self.api_key, + hashkey_perpetual_secret_key=self.api_secret, + trading_pairs=[self.trading_pair], + ) + connector._set_current_timestamp(1640780000) + + position_event = self.position_event_websocket_update + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [position_event, asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + self._simulate_trading_rules_initialized() + pos_key = self.exchange._perpetual_trading.position_key(self.trading_pair, PositionSide.LONG) + self.exchange.account_positions[pos_key] = Position( + trading_pair=self.trading_pair, + position_side=PositionSide.LONG, + unrealized_pnl=Decimal('1'), + entry_price=Decimal('1'), + amount=Decimal('1'), + leverage=Decimal('1'), + ) + amount_precision = Decimal(self.exchange.trading_rules[self.trading_pair].min_base_amount_increment) + try: + asyncio.get_event_loop().run_until_complete(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertEqual(len(self.exchange.account_positions), 1) + pos = list(self.exchange.account_positions.values())[0] + self.assertEqual(pos.amount, 3000 * amount_precision) + + def test_user_stream_remove_position_update(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + connector = HashkeyPerpetualDerivative( + client_config_map=client_config_map, + hashkey_perpetual_api_key=self.api_key, + hashkey_perpetual_secret_key=self.api_secret, + trading_pairs=[self.trading_pair], + ) + connector._set_current_timestamp(1640780000) + + position_event = self.position_event_websocket_update_zero + self._simulate_trading_rules_initialized() + pos_key = self.exchange._perpetual_trading.position_key(self.trading_pair, PositionSide.LONG) + self.exchange.account_positions[pos_key] = Position( + trading_pair=self.trading_pair, + position_side=PositionSide.LONG, + unrealized_pnl=Decimal('1'), + entry_price=Decimal('1'), + amount=Decimal('1'), + leverage=Decimal('1'), + ) + mock_queue = AsyncMock() + mock_queue.get.side_effect = [position_event, asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + asyncio.get_event_loop().run_until_complete(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + self.assertEqual(len(self.exchange.account_positions), 0) + + def test_supported_position_modes(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + linear_connector = HashkeyPerpetualDerivative( + client_config_map=client_config_map, + hashkey_perpetual_api_key=self.api_key, + hashkey_perpetual_secret_key=self.api_secret, + trading_pairs=[self.trading_pair], + ) + + expected_result = [PositionMode.HEDGE] + self.assertEqual(expected_result, linear_connector.supported_position_modes()) + + def test_get_buy_and_sell_collateral_tokens(self): + self._simulate_trading_rules_initialized() + buy_collateral_token = self.exchange.get_buy_collateral_token(self.trading_pair) + sell_collateral_token = self.exchange.get_sell_collateral_token(self.trading_pair) + self.assertEqual(self.quote_asset, buy_collateral_token) + self.assertEqual(self.quote_asset, sell_collateral_token) + + @aioresponses() + def test_resolving_trading_pair_symbol_duplicates_on_trading_rules_update_first_is_good(self, mock_api): + self.exchange._set_current_timestamp(1000) + + url = self.trading_rules_url + + response = self.trading_rules_request_mock_response + results = response["contracts"] + duplicate = deepcopy(results[0]) + duplicate["name"] = f"{self.exchange_trading_pair}_12345" + results.append(duplicate) + mock_api.get(url, body=json.dumps(response)) + + self.async_run_with_timeout(coroutine=self.exchange._update_trading_rules()) + + self.assertEqual(1, len(self.exchange.trading_rules)) + self.assertIn(self.trading_pair, self.exchange.trading_rules) + self.assertEqual(repr(self.expected_trading_rule), repr(self.exchange.trading_rules[self.trading_pair])) + + @aioresponses() + def test_resolving_trading_pair_symbol_duplicates_on_trading_rules_update_second_is_good(self, mock_api): + self.exchange._set_current_timestamp(1000) + + url = self.trading_rules_url + + response = self.trading_rules_request_mock_response + results = response["contracts"] + duplicate = deepcopy(results[0]) + duplicate["name"] = f"{self.exchange_trading_pair}_12345" + results.insert(0, duplicate) + mock_api.get(url, body=json.dumps(response)) + + self.async_run_with_timeout(coroutine=self.exchange._update_trading_rules()) + + self.assertEqual(1, len(self.exchange.trading_rules)) + self.assertIn(self.trading_pair, self.exchange.trading_rules) + self.assertEqual(repr(self.expected_trading_rule), repr(self.exchange.trading_rules[self.trading_pair])) + + @aioresponses() + def test_update_trading_rules_ignores_rule_with_error(self, mock_api): + # Response only contains valid trading rule + pass + + @aioresponses() + def test_cancel_lost_order_raises_failure_event_when_request_fails(self, mock_api): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id="11", + exchange_order_id="4", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.assertIn("11", self.exchange.in_flight_orders) + order = self.exchange.in_flight_orders["11"] + + for _ in range(self.exchange._order_tracker._lost_order_count_limit + 1): + self.async_run_with_timeout( + self.exchange._order_tracker.process_order_not_found(client_order_id=order.client_order_id)) + + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + + url = self.configure_erroneous_cancelation_response( + order=order, + mock_api=mock_api, + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.async_run_with_timeout(self.exchange._cancel_lost_orders()) + self.async_run_with_timeout(request_sent_event.wait()) + + cancel_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(cancel_request) + self.validate_order_cancelation_request( + order=order, + request_call=cancel_request) + + self.assertIn(order.client_order_id, self.exchange._order_tracker.lost_orders) + self.assertEqual(0, len(self.order_cancelled_logger.event_log)) + + @aioresponses() + def test_user_stream_update_for_order_full_fill(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + leverage = 2 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + position_action=PositionAction.OPEN, + ) + order = self.exchange.in_flight_orders["OID1"] + + order_event = self.order_event_for_full_fill_websocket_update(order=order) + trade_event = self.trade_event_for_full_fill_websocket_update(order=order) + mock_queue = AsyncMock() + event_messages = [] + if trade_event: + event_messages.append(trade_event) + if order_event: + event_messages.append(order_event) + event_messages.append(asyncio.CancelledError) + mock_queue.get.side_effect = event_messages + self.exchange._user_stream_tracker._user_stream = mock_queue + + if self.is_order_fill_http_update_executed_during_websocket_order_event_processing: + self.configure_full_fill_trade_response( + order=order, + mock_api=mock_api) + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + # Execute one more synchronization to ensure the async task that processes the update is finished + self.async_run_with_timeout(order.wait_until_completely_filled()) + + fill_event = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(order.client_order_id, fill_event.order_id) + self.assertEqual(order.trading_pair, fill_event.trading_pair) + self.assertEqual(order.trade_type, fill_event.trade_type) + self.assertEqual(order.order_type, fill_event.order_type) + self.assertEqual(order.price, fill_event.price) + self.assertEqual(order.amount, fill_event.amount) + expected_fee = self.expected_fill_fee + self.assertEqual(expected_fee, fill_event.trade_fee) + self.assertEqual(leverage, fill_event.leverage) + self.assertEqual(PositionAction.OPEN.value, fill_event.position) + + buy_event = self.buy_order_completed_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, buy_event.timestamp) + self.assertEqual(order.client_order_id, buy_event.order_id) + self.assertEqual(order.base_asset, buy_event.base_asset) + self.assertEqual(order.quote_asset, buy_event.quote_asset) + self.assertEqual(order.amount, buy_event.base_asset_amount) + self.assertEqual(order.amount * fill_event.price, buy_event.quote_asset_amount) + self.assertEqual(order.order_type, buy_event.order_type) + self.assertEqual(order.exchange_order_id, buy_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue(order.is_filled) + self.assertTrue(order.is_done) + + self.assertTrue( + self.is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) + ) + + @aioresponses() + def test_cancel_order_not_found_in_the_exchange(self, mock_api): + # Disabling this test because the connector has not been updated yet to validate + # order not found during cancellation (check _is_order_not_found_during_cancelation_error) + pass + + @aioresponses() + def test_lost_order_removed_if_not_found_during_order_status_update(self, mock_api): + # Disabling this test because the connector has not been updated yet to validate + # order not found during status update (check _is_order_not_found_during_status_update_error) + pass + + def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Any: + self._simulate_trading_rules_initialized() + return { + "time": "1724071031231", + "updateTime": "1724071031274", + "orderId": order.exchange_order_id, + "clientOrderId": order.client_order_id, + "symbol": self.exchange_trading_pair, + "price": "5050", + "leverage": order.leverage, + "origQty": str(self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount)), + "executedQty": str(self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount)), + "avgPrice": "5000", + "marginLocked": "0", + "type": "LIMIT", + "side": "BUY_OPEN", + "timeInForce": "IOC", + "status": "CANCELED", + "priceType": "INPUT", + "isLiquidationOrder": False, + "indexPrice": "0", + "liquidationType": "" + } + + def _order_status_request_completely_filled_mock_response(self, order: InFlightOrder) -> Any: + self._simulate_trading_rules_initialized() + return { + "time": "1724071031231", + "updateTime": "1724071031274", + "orderId": order.exchange_order_id, + "clientOrderId": order.client_order_id, + "symbol": self.exchange_trading_pair, + "price": "5050", + "leverage": order.leverage, + "origQty": str(self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount)), + "executedQty": str(self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount)), + "avgPrice": "5000", + "marginLocked": "0", + "type": "LIMIT", + "side": "BUY_OPEN", + "timeInForce": "IOC", + "status": "FILLED", + "priceType": "INPUT", + "isLiquidationOrder": False, + "indexPrice": "0", + "liquidationType": "" + } + + def _order_status_request_canceled_mock_response(self, order: InFlightOrder) -> Any: + resp = self._order_cancelation_request_successful_mock_response(order) + return resp + + def _order_status_request_open_mock_response(self, order: InFlightOrder) -> Any: + resp = self._order_status_request_completely_filled_mock_response(order) + resp["status"] = "NEW" + return resp + + def _order_status_request_partially_filled_mock_response(self, order: InFlightOrder) -> Any: + resp = self._order_status_request_completely_filled_mock_response(order) + resp["status"] = "PARTIALLY_FILLED" + return resp + + def _order_fills_request_partial_fill_mock_response(self, order: InFlightOrder): + resp = self._order_status_request_completely_filled_mock_response(order) + resp["status"] = "PARTIALLY_FILLED" + return resp + + def _order_fills_request_full_fill_mock_response(self, order: InFlightOrder): + return [ + { + "time": "1723728772839", + "tradeId": "1753158447036129024", + "orderId": order.exchange_order_id, + "symbol": self.exchange_trading_pair, + "price": str(order.price), + "quantity": str(self.exchange.get_quantity_of_contracts(self.trading_pair, order.amount)), + "commissionAsset": order.quote_asset, + "commission": "0", + "makerRebate": "0", + "type": "LIMIT", + "side": f"{'BUY' if order.trade_type == TradeType.BUY else 'SELL'}_{order.position.value}", + "realizedPnl": "0", + "isMaker": True + }, + ] + + @aioresponses() + def test_start_network_update_trading_rules(self, mock_api): + self.exchange._set_current_timestamp(1000) + + url = self.trading_rules_url + + response = self.trading_rules_request_mock_response + results = response["contracts"] + duplicate = deepcopy(results[0]) + duplicate["name"] = f"{self.exchange_trading_pair}_12345" + results.append(duplicate) + mock_api.get(url, body=json.dumps(response)) + + self.async_run_with_timeout(self.exchange.start_network()) + + self.assertEqual(1, len(self.exchange.trading_rules)) + self.assertIn(self.trading_pair, self.exchange.trading_rules) + self.assertEqual(repr(self.expected_trading_rule), repr(self.exchange.trading_rules[self.trading_pair])) + + def place_limit_maker_buy_order( + self, + amount: Decimal = Decimal("100"), + price: Decimal = Decimal("10_000"), + position_action: PositionAction = PositionAction.OPEN, + ): + order_id = self.exchange.buy( + trading_pair=self.trading_pair, + amount=amount, + order_type=OrderType.LIMIT_MAKER, + price=price, + position_action=position_action, + ) + return order_id + + @aioresponses() + def test_create_buy_limit_maker_order_successfully(self, mock_api): + """Open long position""" + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url + + creation_response = self.limit_maker_order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + leverage = 2 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + order_id = self.place_limit_maker_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, + create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT_MAKER, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), + create_event.exchange_order_id) + self.assertEqual(leverage, create_event.leverage) + self.assertEqual(PositionAction.OPEN.value, create_event.position) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT_MAKER.name} {TradeType.BUY.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." + ) + ) + + @aioresponses() + def test_update_position_mode( + self, + mock_api: aioresponses, + ): + self._simulate_trading_rules_initialized() + get_position_url = web_utils.rest_url( + path_url=CONSTANTS.POSITION_INFORMATION_URL + ) + regex_get_position_url = re.compile(f"^{get_position_url}") + response = [ + { + "symbol": "BTCUSDT-PERPETUAL", + "side": "SHORT", + "avgPrice": "3366.01", + "position": "200030", + "available": "200030", + "leverage": "10", + "lastPrice": "2598.09", + "positionValue": "673303.6", + "liquidationPrice": "9553.83", + "margin": "105389.3738", + "marginRate": "", + "unrealizedPnL": "152047.5663", + "profitRate": "1.4427", + "realizedPnL": "-215.2107", + "minMargin": "38059.0138" + }, + ] + mock_api.get(regex_get_position_url, body=json.dumps(response)) + self.async_run_with_timeout(self.exchange._update_positions()) + + pos_key = self.exchange._perpetual_trading.position_key(self.trading_pair, PositionSide.SHORT) + position: Position = self.exchange.account_positions[pos_key] + self.assertEqual(self.trading_pair, position.trading_pair) + self.assertEqual(PositionSide.SHORT, position.position_side) + + get_position_url = web_utils.rest_url( + path_url=CONSTANTS.POSITION_INFORMATION_URL + ) + regex_get_position_url = re.compile(f"^{get_position_url}") + response = [ + { + "symbol": "BTCUSDT-PERPETUAL", + "side": "LONG", + "avgPrice": "3366.01", + "position": "200030", + "available": "200030", + "leverage": "10", + "lastPrice": "2598.09", + "positionValue": "673303.6", + "liquidationPrice": "9553.83", + "margin": "105389.3738", + "marginRate": "", + "unrealizedPnL": "152047.5663", + "profitRate": "1.4427", + "realizedPnL": "-215.2107", + "minMargin": "38059.0138" + }, + ] + mock_api.get(regex_get_position_url, body=json.dumps(response)) + self.async_run_with_timeout(self.exchange._update_positions()) + position: Position = self.exchange.account_positions[f"{self.trading_pair}LONG"] + self.assertEqual(self.trading_pair, position.trading_pair) + self.assertEqual(PositionSide.LONG, position.position_side) + + @aioresponses() + def test_set_position_mode_success(self, mock_api): + # There's only HEDGE position mode + pass + + @aioresponses() + def test_set_position_mode_failure(self, mock_api): + # There's only HEDGE position mode + pass + + @aioresponses() + def test_listen_for_funding_info_update_initializes_funding_info(self, mock_api: aioresponses): + mock_api.get(self.funding_info_url, body=json.dumps(self.funding_rate_mock_response), repeat=True) + mock_api.get(self.mark_price_url, body=json.dumps(self.mark_price_mock_response), repeat=True) + mock_api.get(self.index_price_url, body=json.dumps(self.index_price_mock_response), repeat=True) + + try: + self.async_run_with_timeout(self.exchange._listen_for_funding_info()) + except asyncio.TimeoutError: + pass + + funding_info: FundingInfo = self.exchange.get_funding_info(self.trading_pair) + + self.assertEqual(self.trading_pair, funding_info.trading_pair) + self.assertEqual(self.target_funding_info_index_price, funding_info.index_price) + self.assertEqual(self.target_funding_info_mark_price, funding_info.mark_price) + self.assertEqual( + self.target_funding_info_next_funding_utc_timestamp, funding_info.next_funding_utc_timestamp + ) + self.assertEqual(self.target_funding_info_rate, funding_info.rate) + + @aioresponses() + def test_listen_for_funding_info_update_updates_funding_info(self, mock_api: aioresponses): + # Hashkey global not support update funding info by websocket + pass diff --git a/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_user_stream_data_source.py b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_user_stream_data_source.py new file mode 100644 index 0000000000..ddd77d2952 --- /dev/null +++ b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_user_stream_data_source.py @@ -0,0 +1,351 @@ +import asyncio +import json +import re +import unittest +from typing import Any, Awaitable, Dict, Optional +from unittest.mock import AsyncMock, MagicMock, patch + +from aioresponses import aioresponses +from bidict import bidict + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.derivative.hashkey_perpetual import ( + hashkey_perpetual_constants as CONSTANTS, + hashkey_perpetual_web_utils as web_utils, +) +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_auth import HashkeyPerpetualAuth +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_derivative import HashkeyPerpetualDerivative +from hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_user_stream_data_source import ( + HashkeyPerpetualUserStreamDataSource, +) +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler + + +class HashkeyPerpetualUserStreamDataSourceUnitTests(unittest.TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "ETH" + cls.quote_asset = "USDT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}{cls.quote_asset}-PERPETUAL" + cls.domain = CONSTANTS.DEFAULT_DOMAIN + + cls.listen_key = "TEST_LISTEN_KEY" + + def setUp(self) -> None: + super().setUp() + self.log_records = [] + self.listening_task: Optional[asyncio.Task] = None + self.mocking_assistant = NetworkMockingAssistant() + + self.throttler = AsyncThrottler(rate_limits=CONSTANTS.RATE_LIMITS) + self.mock_time_provider = MagicMock() + self.mock_time_provider.time.return_value = 1000 + self.auth = HashkeyPerpetualAuth(api_key="TEST_API_KEY", secret_key="TEST_SECRET", time_provider=self.mock_time_provider) + self.time_synchronizer = TimeSynchronizer() + self.time_synchronizer.add_time_offset_ms_sample(0) + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = HashkeyPerpetualDerivative( + client_config_map=client_config_map, + hashkey_perpetual_api_key="", + hashkey_perpetual_secret_key="", + trading_pairs=[], + trading_required=False, + domain=self.domain) + self.connector._web_assistants_factory._auth = self.auth + + self.data_source = HashkeyPerpetualUserStreamDataSource( + auth=self.auth, + trading_pairs=[self.trading_pair], + connector=self.connector, + api_factory=self.connector._web_assistants_factory, + domain=self.domain + ) + + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + + self.resume_test_event = asyncio.Event() + + self.connector._set_trading_pair_symbol_map(bidict({self.ex_trading_pair: self.trading_pair})) + + def tearDown(self) -> None: + self.listening_task and self.listening_task.cancel() + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message + for record in self.log_records) + + def _raise_exception(self, exception_class): + raise exception_class + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def _create_return_value_and_unlock_test_with_event(self, value): + self.resume_test_event.set() + return value + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def _error_response(self) -> Dict[str, Any]: + resp = { + "code": "ERROR CODE", + "msg": "ERROR MESSAGE" + } + + return resp + + def _successfully_subscribed_event(self): + resp = { + "result": None, + "id": 1 + } + return resp + + @aioresponses() + def test_get_listen_key_log_exception(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, status=400, body=json.dumps(self._error_response())) + + with self.assertRaises(IOError): + self.async_run_with_timeout(self.data_source._get_listen_key()) + + @aioresponses() + def test_get_listen_key_successful(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + result: str = self.async_run_with_timeout(self.data_source._get_listen_key()) + + self.assertEqual(self.listen_key, result) + + @aioresponses() + def test_ping_listen_key_log_exception(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.put(regex_url, status=400, body=json.dumps(self._error_response())) + + self.data_source._current_listen_key = self.listen_key + result: bool = self.async_run_with_timeout(self.data_source._ping_listen_key()) + + self.assertTrue(self._is_logged("WARNING", f"Failed to refresh the listen key {self.listen_key}: " + f"{self._error_response()}")) + self.assertFalse(result) + + @aioresponses() + def test_ping_listen_key_successful(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.put(regex_url, body=json.dumps({})) + + self.data_source._current_listen_key = self.listen_key + result: bool = self.async_run_with_timeout(self.data_source._ping_listen_key()) + self.assertTrue(result) + + @patch("hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_user_stream_data_source.HashkeyPerpetualUserStreamDataSource" + "._ping_listen_key", + new_callable=AsyncMock) + def test_manage_listen_key_task_loop_keep_alive_failed(self, mock_ping_listen_key): + mock_ping_listen_key.side_effect = (lambda *args, **kwargs: + self._create_return_value_and_unlock_test_with_event(False)) + + self.data_source._current_listen_key = self.listen_key + + # Simulate LISTEN_KEY_KEEP_ALIVE_INTERVAL reached + self.data_source._last_listen_key_ping_ts = 0 + + self.listening_task = self.ev_loop.create_task(self.data_source._manage_listen_key_task_loop()) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue(self._is_logged("ERROR", "Error occurred renewing listen key ...")) + self.assertIsNone(self.data_source._current_listen_key) + self.assertFalse(self.data_source._listen_key_initialized_event.is_set()) + + @patch("hummingbot.connector.derivative.hashkey_perpetual.hashkey_perpetual_user_stream_data_source.HashkeyPerpetualUserStreamDataSource." + "_ping_listen_key", + new_callable=AsyncMock) + def test_manage_listen_key_task_loop_keep_alive_successful(self, mock_ping_listen_key): + mock_ping_listen_key.side_effect = (lambda *args, **kwargs: + self._create_return_value_and_unlock_test_with_event(True)) + + # Simulate LISTEN_KEY_KEEP_ALIVE_INTERVAL reached + self.data_source._current_listen_key = self.listen_key + self.data_source._listen_key_initialized_event.set() + self.data_source._last_listen_key_ping_ts = 0 + + self.listening_task = self.ev_loop.create_task(self.data_source._manage_listen_key_task_loop()) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue(self._is_logged("INFO", f"Refreshed listen key {self.listen_key}.")) + self.assertGreater(self.data_source._last_listen_key_ping_ts, 0) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_does_not_queue_empty_payload(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, "") + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.assertEqual(0, msg_queue.qsize()) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_connection_failed(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + mock_ws.side_effect = lambda *arg, **kwars: self._create_exception_and_unlock_test_with_event( + Exception("TEST ERROR.")) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self._is_logged("ERROR", + "Unexpected error while listening to user stream. Retrying after 5 seconds...")) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_iter_message_throws_exception(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + msg_queue: asyncio.Queue = asyncio.Queue() + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + mock_ws.return_value.receive.side_effect = (lambda *args, **kwargs: + self._create_exception_and_unlock_test_with_event( + Exception("TEST ERROR"))) + mock_ws.close.return_value = None + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error while listening to user stream. Retrying after 5 seconds...")) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_does_not_queue_pong_payload(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + mock_pong = { + "pong": "1545910590801" + } + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, json.dumps(mock_pong)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.assertEqual(1, msg_queue.qsize()) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_does_not_queue_ticket_info(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + ticket_info = [ + { + "e": "ticketInfo", # Event type + "E": "1668693440976", # Event time + "s": "BTCUSDT", # Symbol + "q": "0.001639", # quantity + "t": "1668693440899", # time + "p": "61000.0", # price + "T": "899062000267837441", # ticketId + "o": "899048013515737344", # orderId + "c": "1621910874883", # clientOrderId + "O": "899062000118679808", # matchOrderId + "a": "10086", # accountId + "A": 0, # ignore + "m": True, # isMaker + "S": "BUY", # side SELL or BUY + } + ] + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, json.dumps(ticket_info)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.assertEqual(1, msg_queue.qsize()) diff --git a/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_utils.py b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_utils.py new file mode 100644 index 0000000000..1bbc2dfb14 --- /dev/null +++ b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_utils.py @@ -0,0 +1,85 @@ +from unittest import TestCase + +from hummingbot.connector.derivative.hashkey_perpetual import hashkey_perpetual_utils as utils + + +class HashkeyPerpetualUtilsTests(TestCase): + def test_is_exchange_information_valid(self): + exchange_info = { + "symbol": "ETHUSDT-PERPETUAL", + "symbolName": "ETHUSDT-PERPETUAL", + "status": "TRADING", + "baseAsset": "ETHUSDT-PERPETUAL", + "baseAssetName": "ETHUSDT-PERPETUAL", + "baseAssetPrecision": "0.001", + "quoteAsset": "USDT", + "quoteAssetName": "USDT", + "quotePrecision": "0.00000001", + "retailAllowed": False, + "piAllowed": False, + "corporateAllowed": False, + "omnibusAllowed": False, + "icebergAllowed": False, + "isAggregate": False, + "allowMargin": False, + "filters": [ + { + "minPrice": "0.01", + "maxPrice": "100000.00000000", + "tickSize": "0.01", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.001", + "maxQty": "50", + "stepSize": "0.001", + "marketOrderMinQty": "0", + "marketOrderMaxQty": "0", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "0", + "filterType": "MIN_NOTIONAL" + }, + { + "maxSellPrice": "99999", + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "maxEntrustNum": 200, + "maxConditionNum": 200, + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.05", + "sellPriceDownRate": "0.05", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ] + } + + self.assertTrue(utils.is_exchange_information_valid(exchange_info)) + + exchange_info["status"] = "Closed" + + self.assertFalse(utils.is_exchange_information_valid(exchange_info)) + + del exchange_info["status"] + + self.assertFalse(utils.is_exchange_information_valid(exchange_info)) + + def test_is_linear_perpetual(self): + self.assertTrue(utils.is_linear_perpetual("BTC-USDT")) + self.assertFalse(utils.is_linear_perpetual("BTC-USD")) + + def test_get_next_funding_timestamp(self): + current_timestamp = 1626192000.0 + self.assertEqual(utils.get_next_funding_timestamp(current_timestamp), 1626220800.0) diff --git a/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_web_utils.py b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_web_utils.py new file mode 100644 index 0000000000..8c4db9d074 --- /dev/null +++ b/test/hummingbot/connector/derivative/hashkey_perpetual/test_hashkey_perpetual_web_utils.py @@ -0,0 +1,22 @@ +import unittest + +from hummingbot.connector.derivative.hashkey_perpetual import ( + hashkey_perpetual_constants as CONSTANTS, + hashkey_perpetual_web_utils as web_utils, +) +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + + +class HashkeyPerpetualWebUtilsTest(unittest.TestCase): + + def test_public_rest_url(self): + url = web_utils.rest_url(CONSTANTS.SNAPSHOT_PATH_URL) + self.assertEqual("https://api-glb.hashkey.com/quote/v1/depth", url) + + def test_build_api_factory(self): + api_factory = web_utils.build_api_factory() + + self.assertIsInstance(api_factory, WebAssistantsFactory) + self.assertIsNone(api_factory._auth) + + self.assertTrue(2, len(api_factory._rest_pre_processors)) diff --git a/test/hummingbot/connector/derivative/hyperliquid_perpetual/test_hyperliquid_perpetual_derivative.py b/test/hummingbot/connector/derivative/hyperliquid_perpetual/test_hyperliquid_perpetual_derivative.py index 534799cb1d..136e394fa0 100644 --- a/test/hummingbot/connector/derivative/hyperliquid_perpetual/test_hyperliquid_perpetual_derivative.py +++ b/test/hummingbot/connector/derivative/hyperliquid_perpetual/test_hyperliquid_perpetual_derivative.py @@ -25,6 +25,7 @@ from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState, OrderUpdate from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase +from hummingbot.core.event.events import BuyOrderCreatedEvent, SellOrderCreatedEvent from hummingbot.core.network_iterator import NetworkStatus @@ -1566,3 +1567,188 @@ def _simulate_trading_rules_initialized(self): min_base_amount_increment=Decimal(str(0.000001)), ) } + + @aioresponses() + def test_create_buy_limit_order_successfully(self, mock_api): + """Open long position""" + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url + + creation_response = self.order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + leverage = 2 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, + create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100.000000"), create_event.amount) + self.assertEqual(Decimal("10000.0000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), + create_event.exchange_order_id) + self.assertEqual(leverage, create_event.leverage) + self.assertEqual(PositionAction.OPEN.value, create_event.position) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000')}." + ) + ) + + @aioresponses() + def test_create_order_to_close_long_position(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url + creation_response = self.order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + leverage = 5 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + order_id = self.place_sell_order(position_action=PositionAction.CLOSE) + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event: SellOrderCreatedEvent = self.sell_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), create_event.exchange_order_id) + self.assertEqual(leverage, create_event.leverage) + self.assertEqual(PositionAction.CLOSE.value, create_event.position) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position " + f"at {Decimal('10000')}." + ) + ) + + @aioresponses() + def test_create_order_to_close_short_position(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url + + creation_response = self.order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + leverage = 4 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + order_id = self.place_buy_order(position_action=PositionAction.CLOSE) + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, + create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), + create_event.exchange_order_id) + self.assertEqual(leverage, create_event.leverage) + self.assertEqual(PositionAction.CLOSE.value, create_event.position) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position " + f"at {Decimal('10000')}." + ) + ) + + @aioresponses() + def test_create_sell_limit_order_successfully(self, mock_api): + """Open short position""" + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url + creation_response = self.order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + leverage = 3 + self.exchange._perpetual_trading.set_leverage(self.trading_pair, leverage) + order_id = self.place_sell_order() + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event: SellOrderCreatedEvent = self.sell_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), create_event.exchange_order_id) + self.assertEqual(leverage, create_event.leverage) + self.assertEqual(PositionAction.OPEN.value, create_event.position) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000')}." + ) + ) diff --git a/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_delegated_account.py b/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_delegated_account.py index 12cc6b8ca1..e6a4ba1e2b 100644 --- a/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_delegated_account.py +++ b/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_delegated_account.py @@ -256,6 +256,7 @@ def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("100"), min_quantity_tick_size=Decimal("0.0001"), + min_notional=Decimal("1000000"), ) return ("INVALID_MARKET", response) @@ -296,6 +297,7 @@ def trading_rules_request_erroneous_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=None, min_quantity_tick_size=None, + min_notional=None, ) return {native_market.id: native_market} @@ -303,7 +305,8 @@ def trading_rules_request_erroneous_mock_response(self): @property def order_creation_request_successful_mock_response(self): return {"txhash": "017C130E3602A48E5C9D661CAC657BF1B79262D4B71D5C25B1DA62DE2338DA0E", # noqa: mock - "rawLog": "[]"} # noqa: mock + "rawLog": "[]", + "code": 0} # noqa: mock @property def balance_request_mock_response_for_base_and_quote(self): @@ -409,12 +412,14 @@ def expected_trading_rule(self): min_price_tick_size = (market.min_price_tick_size * Decimal(f"1e{-market.quote_token.decimals}")) min_quantity_tick_size = market.min_quantity_tick_size + min_notional = market.min_notional * Decimal(f"1e{-market.quote_token.decimals}") trading_rule = TradingRule( trading_pair=self.trading_pair, min_order_size=min_quantity_tick_size, min_price_increment=min_price_tick_size, min_base_amount_increment=min_quantity_tick_size, min_quote_amount_increment=min_price_tick_size, + min_notional_size=min_notional, ) return trading_rule @@ -486,6 +491,7 @@ def all_spot_markets_mock_response(self) -> Dict[str, SpotMarket]: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} @@ -518,6 +524,7 @@ def all_derivative_markets_mock_response(self) -> Dict[str, DerivativeMarket]: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("100"), min_quantity_tick_size=Decimal("0.0001"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} @@ -1284,7 +1291,7 @@ def test_create_order_fails_and_raises_failure_event(self, mock_api): self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -1327,7 +1334,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -2320,6 +2327,7 @@ def test_listen_for_funding_info_update_initializes_funding_info(self): "cumulativePrice": "1.432536051546776736", "lastTimestamp": "1689423842" }, + "minNotional": "1000000", } } ) @@ -2444,6 +2452,7 @@ def test_listen_for_funding_info_update_updates_funding_info(self): "cumulativePrice": "1.432536051546776736", "lastTimestamp": "1689423842" }, + "minNotional": "1000000", } } ) @@ -3223,11 +3232,13 @@ def _msg_exec_simulation_mock_response(self) -> Any: def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", # noqa: mock - "rawLog": "[]"} + "rawLog": "[]", + "code": 0} def _order_cancelation_request_erroneous_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", # noqa: mock - "rawLog": "Error"} + "rawLog": "Error", + "code": 11} def _order_status_request_open_mock_response(self, order: GatewayPerpetualInFlightOrder) -> Dict[str, Any]: return { diff --git a/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_offchain_vault.py b/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_offchain_vault.py index 176eb69151..8db7244ea7 100644 --- a/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_offchain_vault.py +++ b/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_derivative_for_offchain_vault.py @@ -215,6 +215,7 @@ def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("100"), min_quantity_tick_size=Decimal("0.0001"), + min_notional=Decimal("1000000"), ) return ("INVALID_MARKET", response) @@ -255,6 +256,7 @@ def trading_rules_request_erroneous_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=None, min_quantity_tick_size=None, + min_notional=None, ) return {native_market.id: native_market} @@ -262,7 +264,8 @@ def trading_rules_request_erroneous_mock_response(self): @property def order_creation_request_successful_mock_response(self): return {"txhash": "017C130E3602A48E5C9D661CAC657BF1B79262D4B71D5C25B1DA62DE2338DA0E", # noqa: mock" - "rawLog": "[]"} + "rawLog": "[]", + "code": 0} @property def balance_request_mock_response_for_base_and_quote(self): @@ -363,12 +366,14 @@ def expected_trading_rule(self): min_price_tick_size = (market.min_price_tick_size * Decimal(f"1e{-market.quote_token.decimals}")) min_quantity_tick_size = market.min_quantity_tick_size + min_notional = market.min_notional * Decimal(f"1e{-market.quote_token.decimals}") trading_rule = TradingRule( trading_pair=self.trading_pair, min_order_size=min_quantity_tick_size, min_price_increment=min_price_tick_size, min_base_amount_increment=min_quantity_tick_size, min_quote_amount_increment=min_price_tick_size, + min_notional_size=min_notional, ) return trading_rule @@ -440,6 +445,7 @@ def all_spot_markets_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} @@ -472,6 +478,7 @@ def all_derivative_markets_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("100"), min_quantity_tick_size=Decimal("0.0001"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} @@ -1173,7 +1180,7 @@ def test_create_order_fails_and_raises_failure_event(self, mock_api): self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -1216,7 +1223,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -2250,6 +2257,7 @@ def test_listen_for_funding_info_update_initializes_funding_info(self): "cumulativePrice": "1.432536051546776736", "lastTimestamp": "1689423842" }, + "minNotional": "1000000", } } ) @@ -2374,6 +2382,7 @@ def test_listen_for_funding_info_update_updates_funding_info(self): "cumulativePrice": "1.432536051546776736", "lastTimestamp": "1689423842" }, + "minNotional": "1000000", } } ) @@ -3043,10 +3052,10 @@ def _orders_creation_transaction_response(self, orders: List[GatewayPerpetualInF return transaction_response def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: - return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "[]"} # noqa: mock + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "[]", "code": 0} # noqa: mock def _order_cancelation_request_erroneous_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: - return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "Error"} # noqa: mock + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "Error", "code": 11} # noqa: mock def _order_status_request_partially_filled_mock_response(self, order: GatewayPerpetualInFlightOrder) -> Dict[str, Any]: return { diff --git a/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_order_book_data_source.py b/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_order_book_data_source.py index d23ec7cced..cd38eaf95c 100644 --- a/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_order_book_data_source.py +++ b/test/hummingbot/connector/derivative/injective_v2_perpetual/test_injective_v2_perpetual_order_book_data_source.py @@ -621,6 +621,7 @@ def test_listen_for_funding_info_logs_exception(self, _): "cumulativePrice": "1.432536051546776736", "lastTimestamp": "1689423842" }, + "minNotional": "1000000", } } ) @@ -766,6 +767,7 @@ def test_listen_for_funding_info_successful(self, _): "cumulativePrice": "1.432536051546776736", "lastTimestamp": "1689423842" }, + "minNotional": "1000000", } } self.query_executor._derivative_market_responses.put_nowait(derivative_market_info) @@ -912,6 +914,7 @@ def test_get_funding_info(self): "cumulativePrice": "1.432536051546776736", "lastTimestamp": "1689423842" }, + "minNotional": "1000000", } } self.query_executor._derivative_market_responses.put_nowait(derivative_market_info) @@ -961,6 +964,7 @@ def _spot_markets_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} @@ -992,6 +996,7 @@ def _derivative_markets_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("100"), min_quantity_tick_size=Decimal("0.0001"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} diff --git a/test/hummingbot/connector/derivative/kucoin_perpetual/test_kucoin_perpetual_derivative.py b/test/hummingbot/connector/derivative/kucoin_perpetual/test_kucoin_perpetual_derivative.py index 5a39966907..f002fda1ef 100644 --- a/test/hummingbot/connector/derivative/kucoin_perpetual/test_kucoin_perpetual_derivative.py +++ b/test/hummingbot/connector/derivative/kucoin_perpetual/test_kucoin_perpetual_derivative.py @@ -1184,13 +1184,14 @@ def test_get_buy_and_sell_collateral_tokens(self): self.assertEqual(self.non_linear_quote_asset, non_linear_buy_collateral_token) self.assertEqual(self.non_linear_quote_asset, non_linear_sell_collateral_token) - def test_time_synchronizer_related_reqeust_error_detection(self): - error_code_str = self.exchange._format_ret_code_for_print(ret_code=CONSTANTS.RET_CODE_AUTH_TIMESTAMP_ERROR) - exception = IOError(f"{error_code_str} - Failed to cancel order for timestamp reason.") + def test_time_synchronizer_related_request_error_detection(self): + error_code = CONSTANTS.RET_CODE_AUTH_TIMESTAMP_ERROR + response = {"code": error_code, "msg": "Invalid KC-API-TIMESTAMP"} + exception = IOError(f"Error executing request GET https://someurl. HTTP status is 400. Error: {json.dumps(response)}") self.assertTrue(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) - error_code_str = self.exchange._format_ret_code_for_print(ret_code=CONSTANTS.RET_CODE_ORDER_NOT_EXISTS) - exception = IOError(f"{error_code_str} - Failed to cancel order because it was not found.") + error_code = CONSTANTS.RET_CODE_ORDER_NOT_EXISTS + exception = IOError(f"{error_code} - Failed to cancel order because it was not found.") self.assertFalse(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) def place_buy_limit_maker_order( diff --git a/test/hummingbot/connector/derivative/okx_perpetual/test_okx_perpetual_derivative.py b/test/hummingbot/connector/derivative/okx_perpetual/test_okx_perpetual_derivative.py index 1807d0b516..65ec5ada59 100644 --- a/test/hummingbot/connector/derivative/okx_perpetual/test_okx_perpetual_derivative.py +++ b/test/hummingbot/connector/derivative/okx_perpetual/test_okx_perpetual_derivative.py @@ -2389,7 +2389,8 @@ def test_create_order_to_close_short_position(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) @@ -2433,7 +2434,8 @@ def test_create_order_to_close_long_position(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.CLOSE.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) @@ -2482,7 +2484,8 @@ def test_create_buy_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) @@ -2572,7 +2575,8 @@ def test_create_sell_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position." + f"{Decimal('100.000000')} to {PositionAction.OPEN.name} a {self.trading_pair} position " + f"at {Decimal('10000.0000')}." ) ) diff --git a/test/hummingbot/connector/exchange/ascend_ex/test_ascend_ex_exchange.py b/test/hummingbot/connector/exchange/ascend_ex/test_ascend_ex_exchange.py index f608ec1d0e..a72c59e577 100644 --- a/test/hummingbot/connector/exchange/ascend_ex/test_ascend_ex_exchange.py +++ b/test/hummingbot/connector/exchange/ascend_ex/test_ascend_ex_exchange.py @@ -1112,6 +1112,6 @@ def test_create_buy_market_order_successfully(self, mock_api, get_price_mock): self.is_logged( "INFO", f"Created {OrderType.MARKET.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} {self.trading_pair}." + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000')}." ) ) diff --git a/test/hummingbot/connector/exchange/bitmart/test_bitmart_api_user_stream_data_source.py b/test/hummingbot/connector/exchange/bitmart/test_bitmart_api_user_stream_data_source.py index c1d8d139ef..bd0dcf980b 100644 --- a/test/hummingbot/connector/exchange/bitmart/test_bitmart_api_user_stream_data_source.py +++ b/test/hummingbot/connector/exchange/bitmart/test_bitmart_api_user_stream_data_source.py @@ -260,11 +260,11 @@ def test_subscribe_channels_raises_cancel_exception(self): self.data_source._subscribe_channels(ws_assistant)) self.ev_loop.run_until_complete(self.listening_task) - # @unittest.skip("Test with error") @patch('aiohttp.ClientSession.ws_connect', new_callable=AsyncMock) @patch("hummingbot.core.data_type.user_stream_tracker_data_source.UserStreamTrackerDataSource._sleep") def test_listening_process_logs_exception_during_events_subscription(self, sleep_mock, mock_ws): - self.connector._set_trading_pair_symbol_map({}) + # This is to force a KeyError in _subscribe_channels + self.connector._set_trading_pair_symbol_map(bidict({'some-pair': 'some-pair'})) messages = asyncio.Queue() sleep_mock.side_effect = asyncio.CancelledError diff --git a/test/connector/exchange/hitbtc/__init__.py b/test/hummingbot/connector/exchange/bitstamp/__init__.py similarity index 100% rename from test/connector/exchange/hitbtc/__init__.py rename to test/hummingbot/connector/exchange/bitstamp/__init__.py diff --git a/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_api_order_book_data_source.py b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_api_order_book_data_source.py new file mode 100644 index 0000000000..79c2002e11 --- /dev/null +++ b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_api_order_book_data_source.py @@ -0,0 +1,455 @@ +import asyncio +import json +import re +from typing import Awaitable +from unittest import TestCase +from unittest.mock import AsyncMock, MagicMock, patch + +from aioresponses.core import aioresponses +from bidict import bidict + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.bitstamp import bitstamp_constants as CONSTANTS, bitstamp_web_utils as web_utils +from hummingbot.connector.exchange.bitstamp.bitstamp_api_order_book_data_source import BitstampAPIOrderBookDataSource +from hummingbot.connector.exchange.bitstamp.bitstamp_exchange import BitstampExchange +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.core.data_type.order_book import OrderBook +from hummingbot.core.data_type.order_book_message import OrderBookMessage + + +class BitstampApiOrderBookDataSourceTests(TestCase): + # logging.Level required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "COINALPHA" + cls.quote_asset = "HBOT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.domain = "" + + def setUp(self) -> None: + super().setUp() + self.log_records = [] + self.listening_task = None + self.mocking_assistant = NetworkMockingAssistant() + self.mock_time_provider = MagicMock() + self.mock_time_provider.time.return_value = 1000 + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = BitstampExchange( + client_config_map=client_config_map, + bitstamp_api_key="", + bitstamp_api_secret="", + trading_pairs=[], + trading_required=False, + domain=self.domain, + time_provider=self.mock_time_provider) + self.data_source = BitstampAPIOrderBookDataSource(trading_pairs=[self.trading_pair], + connector=self.connector, + api_factory=self.connector._web_assistants_factory, + domain=self.domain) + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + + self._original_full_order_book_reset_time = self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS + self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = -1 + + self.resume_test_event = asyncio.Event() + + self.connector._set_trading_pair_symbol_map(bidict({self.ex_trading_pair: self.trading_pair})) + + def tearDown(self) -> None: + self.listening_task and self.listening_task.cancel() + self.data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = self._original_full_order_book_reset_time + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message + for record in self.log_records) + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def _successfully_subscribed_event(self): + resp = { + "result": None, + "id": 1 + } + return resp + + def _trade_update_event(self): + resp = { + "data": { + "id": 345726198, + "timestamp": "1719272808", + "amount": 0.00015112, + "amount_str": "0.00015112", + "price": 60331, + "price_str": "60331", + "type": 0, + "microtimestamp": "1719272808613000", + "buy_order_id": 1763073367883776, + "sell_order_id": 1763073362448385}, + "channel": "live_trades_COINALPHAHBOT", + "event": "trade" + } + return resp + + def _order_diff_event(self): + resp = { + "data": { + "timestamp": "1719273313", + "microtimestamp": "1719273313441554", + "bids": [ + ["60362", "0.11602627"] + ], + "asks": [ + ["60341", "0.22347000"] + ] + }, + "channel": "diff_order_book_COINALPHAHBOT", + "event": "data" + } + return resp + + def _snapshot_response(self): + resp = { + "asks": [ + ["4.000002", "12"] + ], + "bids": [ + ["4", "431"] + ], + "microtimestamp": "1643643584684047", + "timestamp": "1643643584" + } + return resp + + @aioresponses() + def test_get_new_order_book_successful(self, mock_api): + url = web_utils.public_rest_url(path_url=CONSTANTS.ORDER_BOOK_URL.format(self.ex_trading_pair), domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?") + ".*") + + resp = self._snapshot_response() + + mock_api.get(regex_url, body=json.dumps(resp)) + + order_book: OrderBook = self.async_run_with_timeout( + self.data_source.get_new_order_book(self.trading_pair) + ) + + expected_update_id = float(resp["timestamp"]) + + self.assertEqual(expected_update_id, order_book.snapshot_uid) + bids = list(order_book.bid_entries()) + asks = list(order_book.ask_entries()) + self.assertEqual(1, len(bids)) + self.assertEqual(4, bids[0].price) + self.assertEqual(431, bids[0].amount) + self.assertEqual(expected_update_id, bids[0].update_id) + self.assertEqual(1, len(asks)) + self.assertEqual(4.000002, asks[0].price) + self.assertEqual(12, asks[0].amount) + self.assertEqual(expected_update_id, asks[0].update_id) + + @aioresponses() + def test_get_new_order_book_raises_exception(self, mock_api): + url = web_utils.public_rest_url(path_url=CONSTANTS.ORDER_BOOK_URL.format(self.ex_trading_pair), domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, status=400) + with self.assertRaises(IOError): + self.async_run_with_timeout( + self.data_source.get_new_order_book(self.trading_pair) + ) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_subscriptions_subscribes_to_trades_and_order_diffs(self, ws_connect_mock): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + result_subscribe_trades = { + "event": "bts:subscription_succeeded", + "channel": CONSTANTS.WS_PUBLIC_LIVE_TRADES.format(self.ex_trading_pair), + "data": {} + } + result_subscribe_diffs = { + "event": "bts:subscription_succeeded", + "channel": CONSTANTS.WS_PUBLIC_DIFF_ORDER_BOOK.format(self.ex_trading_pair), + "data": {} + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_trades)) + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_diffs)) + + self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=ws_connect_mock.return_value) + + self.assertEqual(2, len(sent_subscription_messages)) + expected_trade_subscription = { + 'data': { + 'channel': CONSTANTS.WS_PUBLIC_LIVE_TRADES.format(self.ex_trading_pair) + }, + 'event': 'bts:subscribe' + } + self.assertEqual(expected_trade_subscription, sent_subscription_messages[0]) + expected_diff_subscription = { + 'data': { + 'channel': CONSTANTS.WS_PUBLIC_DIFF_ORDER_BOOK.format(self.ex_trading_pair) + }, + 'event': 'bts:subscribe' + } + self.assertEqual(expected_diff_subscription, sent_subscription_messages[1]) + + self.assertTrue(self._is_logged( + "INFO", + "Subscribed to public order book and trade channels...")) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_subscriptions_subscribes_to_trades_and_order_diffs2(self, ws_connect_mock): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + reconnect_event = { + "event": "bts:request_reconnect", + "channel": "", + "data": "" + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(reconnect_event)) + + self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + self.assertTrue(self._is_logged( + "WARNING", + "The websocket connection was closed (Received request to reconnect. Reconnecting...)")) + self.assertTrue(self._is_logged( + "INFO", + "Subscribed to public order book and trade channels...")) + + @patch("aiohttp.ClientSession.ws_connect") + def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws: AsyncMock): + mock_ws.side_effect = asyncio.CancelledError + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions()) + self.async_run_with_timeout(self.listening_task) + + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock): + mock_ws.side_effect = Exception("TEST ERROR.") + sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event(asyncio.CancelledError()) + + self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions()) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error occurred when listening to order book streams. Retrying in 5 seconds...")) + + def test_subscribe_channels_raises_cancel_exception(self): + mock_ws = MagicMock() + mock_ws.send.side_effect = asyncio.CancelledError + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.data_source._subscribe_channels(mock_ws)) + self.async_run_with_timeout(self.listening_task) + + def test_subscribe_channels_raises_exception_and_logs_error(self): + mock_ws = MagicMock() + mock_ws.send.side_effect = Exception("Test Error") + + with self.assertRaises(Exception): + self.listening_task = self.ev_loop.create_task(self.data_source._subscribe_channels(mock_ws)) + self.async_run_with_timeout(self.listening_task) + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error occurred subscribing to order book trading and delta streams...") + ) + + def test_listen_for_trades_cancelled_when_listening(self): + mock_queue = MagicMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + self.async_run_with_timeout(self.listening_task) + + def test_listen_for_trades_logs_exception(self): + incomplete_resp = { + "data": {}, + "channel": "live_trades_COINALPHAHBOT", + "event": "trade" + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] + self.data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + + try: + self.async_run_with_timeout(self.listening_task) + except asyncio.CancelledError: + pass + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error when processing public trade updates from exchange")) + + def test_listen_for_trades_successful(self): + mock_queue = AsyncMock() + mock_queue.get.side_effect = [self._trade_update_event(), asyncio.CancelledError()] + self.data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_trades(self.ev_loop, msg_queue)) + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(str(345726198), msg.trade_id) + + def test_listen_for_order_book_diffs_cancelled(self): + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.data_source._message_queue[CONSTANTS.DIFF_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_diffs(self.ev_loop, msg_queue) + ) + self.async_run_with_timeout(self.listening_task) + + def test_listen_for_order_book_diffs_logs_exception(self): + incomplete_resp = { + "data": {}, + "channel": "diff_order_book_COINALPHAHBOT", + "event": "data" + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] + self.data_source._message_queue[CONSTANTS.DIFF_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_diffs(self.ev_loop, msg_queue) + ) + + try: + self.async_run_with_timeout(self.listening_task) + except asyncio.CancelledError: + pass + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error when processing public order book updates from exchange")) + + def test_listen_for_order_book_diffs_successful(self): + mock_queue = AsyncMock() + diff_event = self._order_diff_event() + mock_queue.get.side_effect = [diff_event, asyncio.CancelledError()] + self.data_source._message_queue[CONSTANTS.DIFF_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_diffs(self.ev_loop, msg_queue)) + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(float(diff_event["data"]["timestamp"]), msg.update_id) + + @aioresponses() + def test_listen_for_order_book_snapshots_cancelled_when_fetching_snapshot(self, mock_api): + url = web_utils.public_rest_url(path_url=CONSTANTS.ORDER_BOOK_URL.format(self.ex_trading_pair), domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, exception=asyncio.CancelledError, repeat=True) + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout( + self.data_source.listen_for_order_book_snapshots(self.ev_loop, asyncio.Queue()) + ) + + @aioresponses() + @patch("hummingbot.connector.exchange.bitstamp.bitstamp_api_order_book_data_source" + ".BitstampAPIOrderBookDataSource._sleep") + def test_listen_for_order_book_snapshots_log_exception(self, mock_api, sleep_mock): + msg_queue: asyncio.Queue = asyncio.Queue() + sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event(asyncio.CancelledError()) + + url = web_utils.public_rest_url(path_url=CONSTANTS.ORDER_BOOK_URL.format(self.ex_trading_pair), domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, exception=Exception, repeat=True) + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self._is_logged( + "ERROR", + f"Unexpected error fetching order book snapshot for {self.trading_pair}.")) + + @aioresponses() + def test_listen_for_order_book_snapshots_successful(self, mock_api): + msg_queue: asyncio.Queue = asyncio.Queue() + url = web_utils.public_rest_url(path_url=CONSTANTS.ORDER_BOOK_URL.format(self.ex_trading_pair), domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, body=json.dumps(self._snapshot_response()), repeat=True) + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(1643643584, msg.update_id) diff --git a/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_api_user_stream_data_source.py b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_api_user_stream_data_source.py new file mode 100644 index 0000000000..69f9127e84 --- /dev/null +++ b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_api_user_stream_data_source.py @@ -0,0 +1,306 @@ +import asyncio +import json +import re +from typing import Awaitable, Optional +from unittest import TestCase +from unittest.mock import AsyncMock, MagicMock, patch + +from aioresponses import aioresponses +from bidict import bidict + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.bitstamp import bitstamp_constants as CONSTANTS, bitstamp_web_utils as web_utils +from hummingbot.connector.exchange.bitstamp.bitstamp_api_user_stream_data_source import BitstampAPIUserStreamDataSource +from hummingbot.connector.exchange.bitstamp.bitstamp_exchange import BitstampExchange +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant + + +class BitstampUserStreamDataSourceTests(TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "COINALPHA" + cls.quote_asset = "HBOT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.domain = "" + + def setUp(self) -> None: + super().setUp() + self.log_records = [] + self.listening_task: Optional[asyncio.Task] = None + self.mocking_assistant = NetworkMockingAssistant() + self.mock_time_provider = MagicMock() + self.mock_time_provider.time.return_value = 1000 + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = BitstampExchange( + client_config_map=client_config_map, + bitstamp_api_key="TEST_API_KEY", + bitstamp_api_secret="TEST_SECRET", + trading_pairs=[], + trading_required=False, + domain=self.domain, + time_provider=self.mock_time_provider + ) + + self.data_source = BitstampAPIUserStreamDataSource( + auth=self.connector.authenticator, + trading_pairs=[self.trading_pair], + connector=self.connector, + api_factory=self.connector._web_assistants_factory, + domain=self.domain + ) + + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + + self.resume_test_event = asyncio.Event() + + self.connector._set_trading_pair_symbol_map(bidict({self.ex_trading_pair: self.trading_pair})) + + def tearDown(self) -> None: + self.listening_task and self.listening_task.cancel() + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message + for record in self.log_records) + + def _raise_exception(self, exception_class): + raise exception_class + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def _create_return_value_and_unlock_test_with_event(self, value): + self.resume_test_event.set() + return value + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def _authentication_response(self, user_id: int) -> str: + message = { + "token": "some-token", + "user_id": user_id, + "valid_sec": 60 + } + + return json.dumps(message) + + def _subscription_response(self, channel: str, user_id: int) -> str: + private_channel = f"{channel}-{user_id}" + message = { + "event": "bts:subscribe", + "data": { + "channel": private_channel + } + } + + return json.dumps(message) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @aioresponses() + def test_listening_process_authenticates_and_subscribes_to_events(self, mock_ws, mock_api): + user_id = 1 + url = web_utils.private_rest_url(CONSTANTS.WEBSOCKET_TOKEN_URL, self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, body=self._authentication_response(user_id)) + + self.listening_task = self.ev_loop.create_task( + self.data_source._subscribe_channels(mock_ws)) + self.ev_loop.run_until_complete(self.listening_task) + + self.assertTrue( + self._is_logged("INFO", "Subscribed to private account and orders channels...") + ) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @aioresponses() + def test_subscribe_channels_raises_cancel_exception(self, mock_ws, mock_api): + user_id = 1 + url = web_utils.private_rest_url(CONSTANTS.WEBSOCKET_TOKEN_URL, self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, body=self._authentication_response(user_id)) + + mock_ws.send.side_effect = asyncio.CancelledError + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task( + self.data_source._subscribe_channels(mock_ws)) + self.ev_loop.run_until_complete(self.listening_task) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @aioresponses() + def test_subscribe_channels_raises_exception_and_logs_error(self, mock_ws, mock_api): + user_id = 1 + url = web_utils.private_rest_url(CONSTANTS.WEBSOCKET_TOKEN_URL, self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, body=self._authentication_response(user_id), repeat=True) + + mock_ws.send.side_effect = ConnectionError("Test Error") + + with self.assertRaises(ConnectionError, msg="Test Error"): + self.listening_task = self.ev_loop.create_task( + self.data_source._subscribe_channels(mock_ws)) + self.ev_loop.run_until_complete(self.listening_task) + + self.assertTrue( + self._is_logged("ERROR", "Unexpected error occurred subscribing to order book trading...") + ) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @aioresponses() + def test_listen_for_user_stream_logs_subscribed_message(self, mock_ws, mock_api): + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.configure_http_request_mock(mock_api) + + user_id = 1 + url = web_utils.private_rest_url(CONSTANTS.WEBSOCKET_TOKEN_URL, self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, body=self._authentication_response(user_id)) + + message_event_subscription_success = { + "event": "bts:subscription_succeeded", + "channel": CONSTANTS.WS_PRIVATE_MY_TRADES.format(self.ex_trading_pair, user_id), + "data": {} + } + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=mock_ws.return_value, + message=json.dumps(message_event_subscription_success)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=mock_ws.return_value) + + self.assertEqual(0, msg_queue.qsize()) + + self.assertTrue(self._is_logged("INFO", f"Successfully subscribed to '{message_event_subscription_success['channel']}'...")) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @aioresponses() + def test_listen_for_user_stream_does_queue_valid_payload(self, mock_ws, mock_api): + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.configure_http_request_mock(mock_api) + + user_id = 1 + url = web_utils.private_rest_url(CONSTANTS.WEBSOCKET_TOKEN_URL, self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, body=self._authentication_response(user_id)) + + valid_message = { + 'data': { + 'id': 1, + 'amount': '3600.00000000', + 'price': '0.12200', + 'microtimestamp': '1000', + 'fee': '1.3176', + 'order_id': 12345, + 'trade_account_id': 0, + 'side': 'buy' + }, + 'channel': 'private-my_trades_coinalphahbot-1', + 'event': 'trade' + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=mock_ws.return_value, + message=json.dumps(valid_message)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=mock_ws.return_value) + + self.assertEqual(1, msg_queue.qsize()) + self.assertEqual(valid_message, msg_queue.get_nowait()) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @aioresponses() + def test_listen_for_user_stream_does_not_queue_invalid_payload(self, mock_ws, mock_api): + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.configure_http_request_mock(mock_api) + + user_id = 1 + url = web_utils.private_rest_url(CONSTANTS.WEBSOCKET_TOKEN_URL, self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, body=self._authentication_response(user_id)) + + message_with_unknown_event_type = { + "event": "unknown-event" + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=mock_ws.return_value, + message=json.dumps(message_with_unknown_event_type)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=mock_ws.return_value) + + self.assertEqual(0, msg_queue.qsize()) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @aioresponses() + def test_listen_for_user_stream_reconnects_on_request(self, mock_ws, mock_api): + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.configure_http_request_mock(mock_api) + + user_id = 1 + url = web_utils.private_rest_url(CONSTANTS.WEBSOCKET_TOKEN_URL, self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, body=self._authentication_response(user_id), repeat=True) + + reconnect_event = { + "event": "bts:request_reconnect", + "channel": "", + "data": "" + } + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=mock_ws.return_value, + message=json.dumps(reconnect_event)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.assertEqual(0, msg_queue.qsize()) + self.assertTrue(self._is_logged("WARNING", "The websocket connection was closed (Received request to reconnect. Reconnecting...)")) diff --git a/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_auth.py b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_auth.py new file mode 100644 index 0000000000..1e6177fd26 --- /dev/null +++ b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_auth.py @@ -0,0 +1,61 @@ +import asyncio +from unittest import TestCase +from unittest.mock import MagicMock +from urllib.parse import urlencode + +from typing_extensions import Awaitable + +from hummingbot.connector.exchange.bitstamp.bitstamp_auth import BitstampAuth +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest + + +class BitstampAuthTests(TestCase): + + def setUp(self) -> None: + self._api_key = "testApiKey" + self._secret_key = "testApiKey" + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def test_rest_authenticate(self): + now = 1234567890.000 + mock_time_provider = MagicMock() + mock_time_provider.time.return_value = now + + auth = BitstampAuth(api_key=self._api_key, secret_key=self._secret_key, time_provider=mock_time_provider) + request = RESTRequest(url="https://www.test.com/url", method=RESTMethod.GET, is_auth_required=True, headers={}) + configured_request = self.async_run_with_timeout(auth.rest_authenticate(request)) + + self.assertEqual(f"BITSTAMP {self._api_key}", configured_request.headers["X-Auth"]) + self.assertEqual(auth.AUTH_VERSION, configured_request.headers["X-Auth-Version"]) + self.assertEqual(str(int(now * 1e3)), configured_request.headers["X-Auth-Timestamp"]) + self.assertIn("X-Auth-Nonce", configured_request.headers) + self.assertIn("X-Auth-Signature", configured_request.headers) + + def test_generate_message(self): + now = "1640000000000000" + mock_time_provider = MagicMock() + mock_time_provider.time.return_value = float(now) + + nonce = "nonce" + auth = BitstampAuth(self._api_key, self._secret_key, mock_time_provider) + + msg = auth._generate_message(RESTMethod.POST, "https://www.test.com/url", None, None, nonce, now) + + self.assertEqual(f"BITSTAMP {self._api_key}POSTwww.test.com/url{nonce}{now}{auth.AUTH_VERSION}", msg) + + def test_generate_message_with_payload(self): + now = "1640000000000000" + mock_time_provider = MagicMock() + mock_time_provider.time.return_value = float(now) + + nonce = "nonce" + content_type = "application/x-www-form-urlencoded" + payload = {"key": "value", "key2": "value2"} + auth = BitstampAuth(self._api_key, self._secret_key, mock_time_provider) + + msg = auth._generate_message(RESTMethod.POST, "https://www.test.com/url", content_type, payload, nonce, now) + + self.assertEqual(f"BITSTAMP {self._api_key}POSTwww.test.com/url{content_type}{nonce}{now}{auth.AUTH_VERSION}{urlencode(payload)}", msg) diff --git a/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_exchange.py b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_exchange.py new file mode 100644 index 0000000000..565251d03b --- /dev/null +++ b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_exchange.py @@ -0,0 +1,982 @@ +import asyncio +import json +import re +from decimal import Decimal +from typing import Any, Callable, Dict, List, Optional, Tuple +from unittest.mock import AsyncMock + +from aioresponses import aioresponses +from aioresponses.core import RequestCall + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.bitstamp import bitstamp_constants as CONSTANTS, bitstamp_web_utils as web_utils +from hummingbot.connector.exchange.bitstamp.bitstamp_exchange import BitstampExchange +from hummingbot.connector.exchange.bitstamp.bitstamp_utils import DEFAULT_FEES +from hummingbot.connector.test_support.exchange_connector_test import AbstractExchangeConnectorTests +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState +from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase, TradeFeeSchema +from hummingbot.core.event.events import ( + BuyOrderCreatedEvent, + MarketOrderFailureEvent, + OrderFilledEvent, + SellOrderCreatedEvent, +) + + +class BitstampExchangeTests(AbstractExchangeConnectorTests.ExchangeConnectorTests): + + maxDiff = None + + @property + def all_symbols_url(self): + return web_utils.public_rest_url(path_url=CONSTANTS.EXCHANGE_INFO_PATH_URL, domain=self.exchange._domain) + + @property + def latest_prices_url(self): + symbol = self.exchange_trading_pair + url = web_utils.public_rest_url(path_url=CONSTANTS.TICKER_URL.format(symbol), domain=self.exchange._domain) + return url + + @property + def network_status_url(self): + url = web_utils.public_rest_url(CONSTANTS.STATUS_URL, domain=self.exchange._domain) + return url + + @property + def trading_rules_url(self): + url = web_utils.private_rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL, domain=self.exchange._domain) + return url + + @property + def order_creation_url(self): + url = web_utils.private_rest_url("/", domain=self.exchange._domain) + return url + + def order_creation_url_for_trade_type(self, trade_type: TradeType, trading_pair: str): + type = "buy" if trade_type == TradeType.BUY else "sell" + url = web_utils.private_rest_url(f"/{type}/{trading_pair}/", domain=self.exchange._domain) + return url + + @property + def balance_url(self): + url = web_utils.private_rest_url(CONSTANTS.ACCOUNT_BALANCES_URL, domain=self.exchange._domain) + return url + + @property + def all_symbols_request_mock_response(self): + return [ + { + "name": f"{self.base_asset}/{self.quote_asset}", + "url_symbol": f"{self.base_asset.lower()}{self.quote_asset.lower()}", + "base_decimals": 8, + "counter_decimals": 2, + "instant_order_counter_decimals": 2, + "minimum_order": "20.0 USD", + "trading": "Enabled", + "instant_and_market_orders": "Enabled", + "description": f"{self.base_asset} / {self.quote_asset}" + } + ] + + @property + def latest_prices_request_mock_response(self): + return { + "ask": "2211.00", + "bid": "2188.97", + "high": "2811.00", + "last": "2211.00", + "low": "2188.97", + "open": "2211.00", + "open_24": "2211.00", + "percent_change_24": "13.57", + "side": "0", + "timestamp": "1643640186", + "volume": "213.26801100", + "vwap": "2189.80" + } + + @property + def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: + response = [ + { + "name": f"{self.base_asset}/{self.quote_asset}", + "url_symbol": f"{self.base_asset.lower()}{self.quote_asset.lower()}", + "base_decimals": 8, + "counter_decimals": 2, + "instant_order_counter_decimals": 2, + "minimum_order": "20.0 USD", + "trading": "Enabled", + "instant_and_market_orders": "Enabled", + "description": f"{self.base_asset} / {self.quote_asset}" + }, + { + "name": "INVALID/PAIR", + "url_symbol": self.exchange_symbol_for_tokens("INVALID", "PAIR"), + "base_decimals": 8, + "counter_decimals": 2, + "instant_order_counter_decimals": 2, + "minimum_order": "20.0 PAIR", + "trading": "Disabled", + "instant_and_market_orders": "Enabled", + "description": f"{self.base_asset} / {self.quote_asset}" + } + ] + + return "INVALID-PAIR", response + + @property + def network_status_request_successful_mock_response(self): + return { + "server_time": 1719654227271 + } + + @property + def trading_rules_request_mock_response(self): + return [ + { + "name": f"{self.base_asset}/{self.quote_asset}", + "url_symbol": f"{self.base_asset.lower()}{self.quote_asset.lower()}", + "base_decimals": 8, + "counter_decimals": 2, + "instant_order_counter_decimals": 2, + "minimum_order": "20.0 USD", + "trading": "Enabled", + "instant_and_market_orders": "Enabled", + "description": f"{self.base_asset} / {self.quote_asset}" + } + ] + + @property + def trading_rules_request_erroneous_mock_response(self): + return [ + { + "url_symbol": f"{self.base_asset.lower()}{self.quote_asset.lower()}", + "trading": "Enabled", + } + ] + + @property + def order_creation_request_successful_mock_response(self): + return { + "id": self.expected_exchange_order_id, + "market": f"{self.base_asset}/{self.quote_asset}", + "datetime": "2022-01-31 14:43:15.796000", + "type": "0", + "price": "10000", + "amount": "100", + "client_order_id": "" + } + + @property + def trading_fees_mock_response(self): + return [ + { + "currency_pair": self.exchange_trading_pair, + "market": self.exchange_trading_pair, + "fees": { + "maker": "1.0000", + "taker": "2.0000" + } + }, + { + "currency_pair": "btcusd", + "market": "btcusd", + "fees": { + "maker": "0.3000", + "taker": "0.4000" + } + }, + ] + + @property + def balance_request_mock_response_for_base_and_quote(self): + return [ + { + "available": "10.00", + "currency": self.base_asset, + "reserved": "5.00", + "total": "15.00" + }, + { + "available": "2000.00", + "currency": self.quote_asset, + "reserved": "0.00", + "total": "2000.00" + } + ] + + @property + def balance_request_mock_response_only_base(self): + return [ + { + "available": "10.00", + "currency": self.base_asset, + "reserved": "5.00", + "total": "15.00" + } + ] + + @property + def balance_event_websocket_update(self): + raise NotImplementedError + + @property + def expected_latest_price(self): + return 2211.00 + + @property + def expected_supported_order_types(self): + return [OrderType.LIMIT, OrderType.LIMIT_MAKER, OrderType.MARKET] + + @property + def expected_trading_rule(self): + return TradingRule( + trading_pair=self.trading_pair, + min_price_increment=Decimal("1e-2"), + min_base_amount_increment=Decimal("1e-8"), + min_quote_amount_increment=Decimal("1e-2"), + min_notional_size=Decimal("20.0"), + ) + + @property + def expected_logged_error_for_erroneous_trading_rule(self): + erroneous_rule = self.trading_rules_request_erroneous_mock_response[0] + return f"Error parsing the trading pair rule {erroneous_rule}. Skipping." + + @property + def expected_exchange_order_id(self): + return 28 + + @property + def is_order_fill_http_update_included_in_status_update(self) -> bool: + return True + + @property + def is_order_fill_http_update_executed_during_websocket_order_event_processing(self) -> bool: + return False + + @property + def expected_partial_fill_price(self) -> Decimal: + return Decimal(10500) + + @property + def expected_partial_fill_amount(self) -> Decimal: + return Decimal("0.5") + + @property + def expected_fill_fee(self) -> TradeFeeBase: + return AddedToCostTradeFee( + flat_fees=[TokenAmount(token=self.quote_asset, amount=Decimal("30"))] + ) + + @property + def expected_fill_trade_id(self) -> str: + return str(30000) + + def exchange_symbol_for_tokens(self, base_token: str, quote_token: str) -> str: + return f"{base_token.lower()}{quote_token.lower()}" + + def create_exchange_instance(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + return BitstampExchange( + client_config_map=client_config_map, + bitstamp_api_key="testAPIKey", + bitstamp_api_secret="testSecret", + trading_pairs=[self.trading_pair], + ) + + def validate_auth_credentials_present(self, request_call: RequestCall): + request_headers = request_call.kwargs["headers"] + expected_headers = [ + "X-Auth", + "X-Auth-Signature", + "X-Auth-Nonce", + "X-Auth-Timestamp", + "X-Auth-Version" + ] + self.assertEqual("BITSTAMP testAPIKey", request_headers["X-Auth"]) + for header in expected_headers: + self.assertIn(header, request_headers) + + def validate_order_creation_request(self, order: InFlightOrder, request_call: RequestCall): + request_data = dict(request_call.kwargs["data"]) + self.assertEqual(Decimal("100"), Decimal(request_data["amount"])) + self.assertEqual(Decimal("10000"), Decimal(request_data["price"])) + self.assertEqual(order.client_order_id, request_data["client_order_id"]) + + def validate_order_cancelation_request(self, order: InFlightOrder, request_call: RequestCall): + request_data = request_call.kwargs["data"] + self.assertEqual(order.exchange_order_id, str(request_data["id"])) + + def validate_order_status_request(self, order: InFlightOrder, request_call: RequestCall): + request_data = request_call.kwargs["data"] + self.assertEqual(order.client_order_id, str(request_data["client_order_id"])) + + def validate_trades_request(self, order: InFlightOrder, request_call: RequestCall): + request_data = request_call.kwargs["data"] + self.assertEqual(order.client_order_id, str(request_data["client_order_id"])) + + def configure_successful_cancelation_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_CANCEL_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._order_cancelation_request_successful_mock_response(order=order) + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_erroneous_cancelation_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_CANCEL_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.post(regex_url, status=400, callback=callback) + return url + + def configure_order_not_found_error_cancelation_response( + self, order: InFlightOrder, mock_api: aioresponses, callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_CANCEL_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._get_error_response(CONSTANTS.ORDER_NOT_EXIST_ERROR_CODE, CONSTANTS.ORDER_NOT_EXIST_MESSAGE) + mock_api.post(regex_url, status=200, body=json.dumps(response), callback=callback) + return url + + def configure_one_successful_one_erroneous_cancel_all_response( + self, + successful_order: InFlightOrder, + erroneous_order: InFlightOrder, + mock_api: aioresponses) -> List[str]: + """ + :return: a list of all configured URLs for the cancelations + """ + all_urls = [] + url = self.configure_successful_cancelation_response(order=successful_order, mock_api=mock_api) + all_urls.append(url) + url = self.configure_erroneous_cancelation_response(order=erroneous_order, mock_api=mock_api) + all_urls.append(url) + return all_urls + + def configure_completely_filled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._order_status_request_completely_filled_mock_response(order=order) + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_canceled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._order_status_request_canceled_mock_response(order=order) + + # It's called twice, once during the _request_order_status call and once during _all_trade_updates_for_order + # TODO: Refactor the code to avoid calling the same endpoint twice + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_erroneous_http_fill_trade_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.post(regex_url, status=400, callback=callback) + return url + + def configure_open_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + """ + :return: the URL configured + """ + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._order_status_request_open_mock_response(order=order) + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_http_error_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.post(regex_url, status=401, callback=callback) + return url + + def configure_partially_filled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._order_status_request_partially_filled_mock_response(order=order) + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_order_not_found_error_order_status_response( + self, order: InFlightOrder, mock_api: aioresponses, callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._get_error_response(CONSTANTS.ORDER_NOT_EXIST_ERROR_CODE, CONSTANTS.ORDER_NOT_EXIST_MESSAGE) + mock_api.post(regex_url, status=200, body=json.dumps(response), callback=callback) + return url + + def configure_partial_fill_trade_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._order_fills_request_partial_fill_mock_response(order=order) + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_full_fill_trade_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.ORDER_STATUS_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self._order_fills_request_full_fill_mock_response(order=order) + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def configure_trading_fees_response( + self, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + url = web_utils.private_rest_url(CONSTANTS.TRADING_FEES_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + response = self.trading_fees_mock_response + mock_api.post(regex_url, body=json.dumps(response), callback=callback) + return url + + def _configure_balance_response( + self, + response: Dict[str, Any], + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + + url = self.balance_url + mock_api.post( + re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")), + body=json.dumps(response), + callback=callback) + return url + + def order_event_for_new_order_websocket_update(self, order: InFlightOrder): + return { + 'data': { + 'id': order.exchange_order_id, + 'id_str': str(order.exchange_order_id), + 'order_type': 1, + 'datetime': '1719221608', + 'microtimestamp': '1719221607521000', + 'amount': 300.00000000, + 'amount_str': '300.00000000', + 'amount_traded': '0', + 'amount_at_create': '300.00000000', + 'price': 0.12619, + 'price_str': '0.12619', + 'trade_account_id': 0, + 'client_order_id': order.client_order_id, + }, + 'channel': CONSTANTS.WS_PRIVATE_MY_ORDERS.format(self.exchange_trading_pair, 1), + 'event': 'order_created' + } + + def order_event_for_canceled_order_websocket_update(self, order: InFlightOrder): + return { + 'data': { + 'id': order.exchange_order_id, + 'id_str': str(order.exchange_order_id), + 'order_type': 1, + 'datetime': '1719221608', + 'microtimestamp': '1719221607521000', + 'amount': 300.00000000, + 'amount_str': '300.00000000', + 'amount_traded': '0', + 'amount_at_create': '300.00000000', + 'price': 0.12619, + 'price_str': '0.12619', + 'trade_account_id': 0, + 'client_order_id': order.client_order_id, + }, + 'channel': CONSTANTS.WS_PRIVATE_MY_ORDERS.format(self.exchange_trading_pair, 1), + 'event': 'order_deleted' + } + + def order_event_for_full_fill_websocket_update(self, order: InFlightOrder): + return { + 'data': { + 'id': order.exchange_order_id, + 'id_str': str(order.exchange_order_id), + 'order_type': 1, + 'datetime': '1719221608', + 'microtimestamp': '1719221607521000', + 'amount': 0, + 'amount_str': '0', + 'amount_traded': '300.00000000', + 'amount_at_create': '300.00000000', + 'price': 0.12619, + 'price_str': '0.12619', + 'trade_account_id': 0, + 'client_order_id': order.client_order_id, + }, + 'channel': CONSTANTS.WS_PRIVATE_MY_ORDERS.format(self.exchange_trading_pair, 1), + 'event': 'order_deleted' + } + + def trade_event_for_full_fill_websocket_update(self, order: InFlightOrder): + return { + 'data': { + 'id': int(order.exchange_order_id), + 'amount': str(order.amount), + 'price': str(order.price), + 'microtimestamp': '1719221608330000', + 'fee': str(self.expected_fill_fee.flat_fees[0].amount), + 'order_id': 1762863651524616, + 'client_order_id': order.client_order_id, + 'trade_account_id': 0, + 'side': order.trade_type.name.lower(), + }, + 'channel': CONSTANTS.WS_PRIVATE_MY_TRADES.format(self.exchange_trading_pair, 1), + 'event': 'trade' + } + + def trade_event_for_self_trade_websocket_update(self, buy_order: InFlightOrder, sell_order: InFlightOrder): + return { + 'data': { + 'timestamp': 1720288033, + 'amount': buy_order.amount, + 'amount_str': str(buy_order.amount), + 'price': buy_order.price, + 'price_str': str(buy_order.price), + 'type': 0, + 'microtimestamp': '1720288033933000', + 'buy_order_id': buy_order.exchange_order_id, + 'sell_order_id': sell_order.exchange_order_id, + 'sellers_trade_account_id': 0, + 'buyers_trade_account_id': 0 + }, + 'channel': CONSTANTS.WS_PRIVATE_MY_SELF_TRADES.format(self.exchange_trading_pair, 1), + 'event': 'self_trade' + } + + def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Any: + return { + "id": int(order.exchange_order_id), + "amount": str(order.amount), + "price": str(order.price), + "type": 0 if order.trade_type == TradeType.BUY else 1, + "market": f"{order.base_asset}/{order.quote_asset}", + } + + def _order_status_request_completely_filled_mock_response(self, order: InFlightOrder) -> Any: + return { + "id": order.exchange_order_id, + "datetime": "2022-01-31 14:43:15", + "type": "0", + "status": "Finished", + "market": f"{self.base_asset}/{self.quote_asset}", + "transactions": [], + "amount_remaining": "0", + "client_order_id": order.client_order_id, + } + + def _order_status_request_canceled_mock_response(self, order: InFlightOrder) -> Any: + return { + "id": order.exchange_order_id, + "datetime": "2022-01-31 14:43:15", + "type": "0", + "status": "Canceled", + "market": f"{self.base_asset}/{self.quote_asset}", + "transactions": [], + "amount_remaining": str(order.amount), + "client_order_id": order.client_order_id, + } + + def _order_status_request_open_mock_response(self, order: InFlightOrder) -> Any: + return { + "id": order.exchange_order_id, + "datetime": "2022-01-31 14:43:15", + "type": "0", + "status": "Open", + "market": f"{self.base_asset}/{self.quote_asset}", + "transactions": [], + "amount_remaining": str(order.amount), + "client_order_id": order.client_order_id, + } + + def _order_status_request_partially_filled_mock_response(self, order: InFlightOrder) -> Any: + return { + "id": order.exchange_order_id, + "datetime": "2022-01-31 14:43:15", + "type": "0", + "status": "Open", + "market": f"{self.base_asset}/{self.quote_asset}", + "transactions": [], + "amount_remaining": str(order.amount - self.expected_partial_fill_amount), + "client_order_id": order.client_order_id, + } + + def _order_fills_request_partial_fill_mock_response(self, order: InFlightOrder): + return { + "id": order.exchange_order_id, + "datetime": "2022-01-31 14:43:15", + "type": "0", + "status": "Open", + "market": f"{self.base_asset}/{self.quote_asset}", + "transactions": [ + { + "tid": self.expected_fill_trade_id, + "price": str(self.expected_partial_fill_price), + order.base_asset.lower(): str(self.expected_partial_fill_amount), + order.quote_asset.lower(): str(self.expected_partial_fill_price * self.expected_partial_fill_amount), + "fee": str(self.expected_fill_fee.flat_fees[0].amount), + "datetime": "2022-01-31 14:43:16.000", + "type": 0 + } + ], + "amount_remaining": str(order.amount - self.expected_partial_fill_amount), + "client_order_id": order.client_order_id, + } + + def _order_fills_request_full_fill_mock_response(self, order: InFlightOrder): + return { + "id": order.exchange_order_id, + "datetime": "2022-01-31 14:43:15", + "type": "0", + "status": "Finished", + "market": f"{self.base_asset}/{self.quote_asset}", + "transactions": [ + { + "tid": self.expected_fill_trade_id, + "price": str(order.price), + order.base_asset.lower(): str(order.amount), + order.quote_asset.lower(): str(order.price * order.amount), + "fee": str(self.expected_fill_fee.flat_fees[0].amount), + "datetime": "2022-01-31 14:43:16.000", + "type": 0 + } + ], + "amount_remaining": "0", + "client_order_id": order.client_order_id, + } + + def test_user_stream_balance_update(self): + """ + The balance update event is not supported by the Bitstamp exchange + """ + pass + + @aioresponses() + def test_create_buy_limit_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url_for_trade_type(TradeType.BUY, self.exchange_trading_pair) + + creation_response = self.order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), create_event.exchange_order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " + f"{Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000.0000')}." + ) + ) + + @aioresponses() + def test_create_sell_limit_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url_for_trade_type(TradeType.SELL, self.exchange_trading_pair) + creation_response = self.order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + order_id = self.place_sell_order() + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event: SellOrderCreatedEvent = self.sell_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), create_event.exchange_order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}." + ) + ) + + @aioresponses() + def test_create_order_fails_and_raises_failure_event(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + url = self.order_creation_url_for_trade_type(TradeType.BUY, self.exchange_trading_pair) + mock_api.post(url, + status=400, + callback=lambda *args, **kwargs: request_sent_event.set()) + + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertNotIn(order_id, self.exchange.in_flight_orders) + order_to_validate_request = InFlightOrder( + client_order_id=order_id, + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("100"), + creation_timestamp=self.exchange.current_timestamp, + price=Decimal("10000") + ) + self.validate_order_creation_request( + order=order_to_validate_request, + request_call=order_request) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(order_id, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Order {order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{order_id}', exchange_order_id=None, misc_updates=None)" + ) + ) + + @aioresponses() + def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url_for_trade_type(TradeType.BUY, self.exchange_trading_pair) + mock_api.post(url, + status=400, + callback=lambda *args, **kwargs: request_sent_event.set()) + + order_id_for_invalid_order = self.place_buy_order( + amount=Decimal("0.0001"), price=Decimal("0.0001") + ) + # The second order is used only to have the event triggered and avoid using timeouts for tests + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait(), timeout=3) + + self.assertNotIn(order_id_for_invalid_order, self.exchange.in_flight_orders) + self.assertNotIn(order_id, self.exchange.in_flight_orders) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(order_id_for_invalid_order, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "WARNING", + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." + ) + ) + self.assertTrue( + self.is_logged( + "INFO", + f"Order {order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{order_id}', exchange_order_id=None, misc_updates=None)" + ) + ) + + @aioresponses() + def test_update_trading_fees(self, mock_api): + self.configure_trading_fees_response(mock_api=mock_api) + resp = self.trading_fees_mock_response + + self.async_run_with_timeout(self.exchange._update_trading_fees()) + + expected_trading_fees = TradeFeeSchema( + maker_percent_fee_decimal=Decimal(resp[0]["fees"]["maker"]), + taker_percent_fee_decimal=Decimal(resp[0]["fees"]["taker"]), + ) + + self.assertEqual(expected_trading_fees, self.exchange._trading_fees[self.trading_pair]) + self.assertEqual(1, len(self.exchange._trading_fees)) + + def test_get_fee_default(self): + expected_maker_fee = AddedToCostTradeFee(percent=DEFAULT_FEES.maker_percent_fee_decimal) + maker_fee = self.exchange._get_fee(self.base_asset, self.quote_asset, OrderType.LIMIT, TradeType.BUY, 1, 2, is_maker=True) + + exptected_taker_fee = AddedToCostTradeFee(percent=DEFAULT_FEES.taker_percent_fee_decimal) + taker_fee = self.exchange._get_fee(self.base_asset, self.quote_asset, OrderType.MARKET, TradeType.BUY, 1, 2, is_maker=False) + + self.assertEqual(expected_maker_fee, maker_fee) + self.assertEqual(exptected_taker_fee, taker_fee) + + @aioresponses() + def test_get_fee(self, mock_api): + self.configure_trading_fees_response(mock_api=mock_api) + resp = self.trading_fees_mock_response + + self.async_run_with_timeout(self.exchange._update_trading_fees()) + + expected_maker_fee = AddedToCostTradeFee(percent=Decimal(resp[0]["fees"]["maker"])) + maker_fee = self.exchange._get_fee(self.base_asset, self.quote_asset, OrderType.LIMIT, TradeType.BUY, 1, 2, is_maker=True) + + expected_taker_fee = AddedToCostTradeFee(percent=Decimal(resp[0]["fees"]["taker"])) + taker_fee = self.exchange._get_fee(self.base_asset, self.quote_asset, OrderType.MARKET, TradeType.BUY, 1, 2, is_maker=False) + + self.assertEqual(expected_maker_fee, maker_fee) + self.assertEqual(expected_taker_fee, taker_fee) + + def test_time_synchronizer_related_request_error_detection(self): + response = self._get_error_response(CONSTANTS.TIMESTAMP_ERROR_CODE, CONSTANTS.TIMESTAMP_ERROR_MESSAGE) + exception = IOError(f"'Error executing request POST {self.balance_url}. HTTP status is 403. Error: {json.dumps(response)}'") + self.assertEqual(True, self.exchange._is_request_exception_related_to_time_synchronizer(exception)) + + def test_user_stream_update_for_self_trade_fill(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.MARKET, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + buy_order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "2", + exchange_order_id=str(self.expected_exchange_order_id) + "1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.SELL, + price=Decimal("10000"), + amount=Decimal("10"), + ) + sell_order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "2"] + + trade_event = self.trade_event_for_self_trade_websocket_update(buy_order=buy_order, sell_order=sell_order) + + mock_queue = AsyncMock() + event_messages = [] + if trade_event: + event_messages.append(trade_event) + + event_messages.append(asyncio.CancelledError) + mock_queue.get.side_effect = event_messages + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(buy_order.client_order_id, fill_event.order_id) + self.assertEqual(buy_order.trading_pair, fill_event.trading_pair) + self.assertEqual(buy_order.trade_type, fill_event.trade_type) + self.assertEqual(buy_order.order_type, fill_event.order_type) + self.assertEqual(buy_order.price, fill_event.price) + self.assertEqual(buy_order.amount, fill_event.amount) + + self.assertTrue( + self.is_logged( + "INFO", + f"The BUY order {buy_order.client_order_id} amounting to {buy_order.executed_amount_base}/{buy_order.amount} COINALPHA has been filled at {Decimal('10000')} HBOT." + ) + ) + + self.assertTrue( + self.is_logged( + "INFO", + f"The SELL order {sell_order.client_order_id} amounting to {sell_order.executed_amount_base}/{sell_order.amount} COINALPHA has been filled at {Decimal('10000')} HBOT." + ) + ) + + def _get_error_response(self, error_code, error_reason): + return { + "status": "error", + "reason": error_reason, + "code": error_code + } diff --git a/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_order_book.py b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_order_book.py new file mode 100644 index 0000000000..788532b4a9 --- /dev/null +++ b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_order_book.py @@ -0,0 +1,107 @@ +from unittest import TestCase + +from hummingbot.connector.exchange.bitstamp.bitstamp_order_book import BitstampOrderBook +from hummingbot.core.data_type.order_book_message import OrderBookMessageType + + +class BitstampOrderBookTests(TestCase): + + def test_snapshot_message_from_exchange(self): + snapshot_message = BitstampOrderBook.snapshot_message_from_exchange( + msg={ + "microtimestamp": "1643643584684047", + "timestamp": "1643643584", + "bids": [ + ["4.00000000", "431.00000000"] + ], + "asks": [ + ["4.00000200", "12.00000000"] + ] + }, + timestamp=1643643584, + metadata={"trading_pair": "COINALPHA-HBOT"} + ) + + self.assertEqual("COINALPHA-HBOT", snapshot_message.trading_pair) + self.assertEqual(OrderBookMessageType.SNAPSHOT, snapshot_message.type) + self.assertEqual(1643643584, snapshot_message.timestamp) + self.assertEqual(1643643584, snapshot_message.update_id) + self.assertEqual(-1, snapshot_message.trade_id) + self.assertEqual(1, len(snapshot_message.bids)) + self.assertEqual(4.0, snapshot_message.bids[0].price) + self.assertEqual(431.0, snapshot_message.bids[0].amount) + self.assertEqual(1643643584, snapshot_message.bids[0].update_id) + self.assertEqual(1, len(snapshot_message.asks)) + self.assertEqual(4.000002, snapshot_message.asks[0].price) + self.assertEqual(12.0, snapshot_message.asks[0].amount) + self.assertEqual(1643643584, snapshot_message.asks[0].update_id) + + def test_diff_message_from_exchange(self): + diff_msg = BitstampOrderBook.diff_message_from_exchange( + msg={ + "data": { + "bids": [ + [ + "0.0024", + "10" + ] + ], + "asks": [ + [ + "0.0026", + "100" + ] + ], + "microtimestamp": "1640000000000000", + "timestamp": "1640000000" + }, + "channel": "diff_order_book_coinalphahbot", + "event": "data" + }, + timestamp=1640000000.0, + metadata={"trading_pair": "COINALPHA-HBOT"} + ) + + self.assertEqual("COINALPHA-HBOT", diff_msg.trading_pair) + self.assertEqual(OrderBookMessageType.DIFF, diff_msg.type) + self.assertEqual(1640000000.0, diff_msg.timestamp) + self.assertEqual(1640000000, diff_msg.update_id) + self.assertEqual(1640000000, diff_msg.first_update_id) + self.assertEqual(-1, diff_msg.trade_id) + self.assertEqual(1, len(diff_msg.bids)) + self.assertEqual(0.0024, diff_msg.bids[0].price) + self.assertEqual(10.0, diff_msg.bids[0].amount) + self.assertEqual(1640000000.0, diff_msg.bids[0].update_id) + self.assertEqual(1, len(diff_msg.asks)) + self.assertEqual(0.0026, diff_msg.asks[0].price) + self.assertEqual(100.0, diff_msg.asks[0].amount) + self.assertEqual(1640000000.0, diff_msg.asks[0].update_id) + + def test_trade_message_from_exchange(self): + trade_update = { + "data": { + "amount": 170473.0, + "amount_str": "0.00170473", + "buy_order_id": 1762645594693633, + "id": 12345, + "microtimestamp": "1719168372720000", + "price": 64075, + "price_str": "64075", + "sell_order_id": 1762645598466049, + "timestamp": "1719168372", + "type": 1 + }, + "event": "trade", + "channel": "live_trades_coinalphahbot", + } + + trade_message = BitstampOrderBook.trade_message_from_exchange( + msg=trade_update, + metadata={"trading_pair": "COINALPHA-HBOT"} + ) + + self.assertEqual("COINALPHA-HBOT", trade_message.trading_pair) + self.assertEqual(OrderBookMessageType.TRADE, trade_message.type) + self.assertEqual(-1, trade_message.update_id) + self.assertEqual(-1, trade_message.first_update_id) + self.assertEqual("12345", trade_message.trade_id) diff --git a/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_utils.py b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_utils.py new file mode 100644 index 0000000000..2b629b16e6 --- /dev/null +++ b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_utils.py @@ -0,0 +1,34 @@ +from decimal import Decimal +from unittest import TestCase + +from pydantic import SecretStr + +from hummingbot.connector.exchange.bitstamp.bitstamp_utils import DEFAULT_FEES, BitstampConfigMap + + +class BitstampUtilsTests(TestCase): + + quote_asset = None + base_asset = None + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.base_asset = "COINALPHA" + cls.quote_asset = "HBOT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.hb_trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + + def test_default_fees(self): + self.assertEqual(DEFAULT_FEES.maker_percent_fee_decimal, Decimal("0.1")) + self.assertEqual(DEFAULT_FEES.taker_percent_fee_decimal, Decimal("0.2")) + + def test_bitstamp_config_map(self): + config_map = BitstampConfigMap( + bitstamp_api_key="test_key", + bitstamp_api_secret="test_secret" + ) + self.assertEqual(config_map.connector, "bitstamp") + self.assertEqual(config_map.bitstamp_api_key, SecretStr("test_key")) + self.assertEqual(config_map.bitstamp_api_secret, SecretStr("test_secret")) diff --git a/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_web_utils.py b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_web_utils.py new file mode 100644 index 0000000000..7697e82d93 --- /dev/null +++ b/test/hummingbot/connector/exchange/bitstamp/test_bitstamp_web_utils.py @@ -0,0 +1,70 @@ +import asyncio +import json +from typing import Awaitable +from unittest import TestCase +from unittest.mock import AsyncMock, Mock, patch + +import hummingbot.connector.exchange.bitstamp.bitstamp_constants as CONSTANTS +from hummingbot.connector.exchange.bitstamp import bitstamp_web_utils as web_utils +from hummingbot.connector.exchange.bitstamp.bitstamp_web_utils import BitstampRESTPreProcessor +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest + + +class BitstampWebUtilsTests(TestCase): + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def test_public_rest_url(self): + path_url = "/TEST_PATH" + domain = "" + expected_url = CONSTANTS.REST_URL + CONSTANTS.API_VERSION + path_url + self.assertEqual(expected_url, web_utils.public_rest_url(path_url, domain)) + + def test_private_rest_url(self): + path_url = "/TEST_PATH" + domain = "" + expected_url = CONSTANTS.REST_URL + CONSTANTS.API_VERSION + path_url + self.assertEqual(expected_url, web_utils.private_rest_url(path_url, domain)) + + @patch('hummingbot.connector.exchange.bitstamp.bitstamp_web_utils' + '.build_api_factory_without_time_synchronizer_pre_processor', + new_callable=Mock) + def test_get_current_server_time(self, mock_api_factory: Mock): + response = {"server_time": 1719431075066} + mock_rest_assistant = AsyncMock() + mock_rest_assistant.execute_request.return_value = {"server_time": 1719431075066} + + async def get_rest_assistant(): + return mock_rest_assistant + + mock_api_factory.return_value.get_rest_assistant = get_rest_assistant + + time = self.async_run_with_timeout(web_utils.get_current_server_time()) + + self.assertEqual(response["server_time"], time) + + def test_bitstamp_rest_pre_processor_with_data(self): + payload = {"test": "data"} + request = RESTRequest(method=RESTMethod.POST, data=json.dumps({"test": "data"}), headers={"Content-Type": "application/json"}) + pre_processor = BitstampRESTPreProcessor() + + request = self.async_run_with_timeout(pre_processor.pre_process(request)) + + self.assertEqual(request.headers["Content-Type"], "application/x-www-form-urlencoded") + self.assertEqual(payload, request.data) + + def test_bitstamp_rest_pre_processor_without_data(self): + request = RESTRequest(method=RESTMethod.POST, data=None, headers={"Content-Type": "application/json"}) + pre_processor = BitstampRESTPreProcessor() + + request = self.async_run_with_timeout(pre_processor.pre_process(request)) + + self.assertEqual(request.headers["Content-Type"], "") + self.assertIsNone(request.data) diff --git a/test/hummingbot/connector/exchange/btc_markets/test_btc_markets_exchange.py b/test/hummingbot/connector/exchange/btc_markets/test_btc_markets_exchange.py index 3b0bda7f29..2c253cfb32 100644 --- a/test/hummingbot/connector/exchange/btc_markets/test_btc_markets_exchange.py +++ b/test/hummingbot/connector/exchange/btc_markets/test_btc_markets_exchange.py @@ -478,7 +478,7 @@ def configure_partial_fill_trade_response( response = self._order_fills_request_partial_fill_mock_response(order=order) mock_api.get(self.trade_url, body=json.dumps(response), callback=callback) - self.configure_open_order_status_response(order, mock_api, callback) + self.configure_partially_filled_order_status_response(order, mock_api, callback) return self.trade_url @@ -499,10 +499,11 @@ def configure_full_fill_trade_response( order: InFlightOrder, mock_api: aioresponses, callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + response = self._order_fills_request_full_fill_mock_response(order=order) mock_api.get(self.trade_url, body=json.dumps(response), callback=callback) - self.configure_open_order_status_response(order, mock_api, callback) + self.configure_completely_filled_order_status_response(order, mock_api, callback) return self.trade_url diff --git a/test/hummingbot/connector/exchange/bybit/test_bybit_api_order_book_data_source.py b/test/hummingbot/connector/exchange/bybit/test_bybit_api_order_book_data_source.py index 50e765398b..4ae620804c 100644 --- a/test/hummingbot/connector/exchange/bybit/test_bybit_api_order_book_data_source.py +++ b/test/hummingbot/connector/exchange/bybit/test_bybit_api_order_book_data_source.py @@ -116,44 +116,40 @@ def get_exchange_rules_mock(self) -> Dict: @staticmethod def _snapshot_response() -> Dict: snapshot = { - "ret_code": 0, - "ret_msg": None, + "retCode": 0, + "retMsg": "OK", "result": { - "time": 1620886105740, - "bids": [ + "ts": 1716863719031, + "u": 230704, + "seq": 1432604333, + "cts": 1716863718905, + "b": [ [ "50005.12", "403.0416" ] ], - "asks": [ + "a": [ [ "50006.34", "0.2297" ] ] }, - "ext_code": None, - "ext_info": None + "time": 1716863719382, + "retExtInfo": {} } return snapshot @staticmethod def _snapshot_response_processed() -> Dict: snapshot_processed = { - "time": 1620886105740, - "bids": [ - [ - "50005.12", - "403.0416" - ] - ], - "asks": [ - [ - "50006.34", - "0.2297" - ] - ] + 'ts': 1716863719031, + 'u': 230704, + 'seq': 1432604333, + 'cts': 1716863718905, + 'b': [['50005.12', '403.0416']], + 'a': [['50006.34', '0.2297']] } return snapshot_processed @@ -200,11 +196,11 @@ def test_get_new_order_book(self, mock_api): self.assertEqual(1, len(bid_entries)) self.assertEqual(50005.12, bid_entries[0].price) self.assertEqual(403.0416, bid_entries[0].amount) - self.assertEqual(int(resp["result"]["time"]), bid_entries[0].update_id) self.assertEqual(1, len(ask_entries)) self.assertEqual(50006.34, ask_entries[0].price) + self.assertEqual(int(resp["result"]["u"]), bid_entries[0].update_id) + self.assertEqual(int(resp["result"]["u"]), ask_entries[0].update_id) self.assertEqual(0.2297, ask_entries[0].amount) - self.assertEqual(int(resp["result"]["time"]), ask_entries[0].update_id) @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_listen_for_subscriptions_subscribes_to_trades_and_depth(self, ws_connect_mock): @@ -218,7 +214,8 @@ def test_listen_for_subscriptions_subscribes_to_trades_and_depth(self, ws_connec 'binary': 'false', 'symbolName': self.ex_trading_pair}, 'code': '0', - 'msg': 'Success'} + 'msg': 'Success' + } result_subscribe_depth = { 'topic': 'depth', @@ -228,7 +225,8 @@ def test_listen_for_subscriptions_subscribes_to_trades_and_depth(self, ws_connec 'binary': 'false', 'symbolName': self.ex_trading_pair}, 'code': '0', - 'msg': 'Success'} + 'msg': 'Success' + } self.mocking_assistant.add_websocket_aiohttp_message( websocket_mock=ws_connect_mock.return_value, @@ -246,29 +244,16 @@ def test_listen_for_subscriptions_subscribes_to_trades_and_depth(self, ws_connec self.assertEqual(2, len(sent_subscription_messages)) expected_trade_subscription = { - "topic": "trade", - "event": "sub", - "symbol": self.ex_trading_pair, - "params": { - "binary": False - } + 'op': 'subscribe', + 'args': ['publicTrade.COINALPHAHBOT'] } self.assertEqual(expected_trade_subscription, sent_subscription_messages[0]) expected_diff_subscription = { - "topic": "diffDepth", - "event": "sub", - "symbol": self.ex_trading_pair, - "params": { - "binary": False - } + 'op': 'subscribe', + 'args': ['orderbook.50.COINALPHAHBOT'] } self.assertEqual(expected_diff_subscription, sent_subscription_messages[1]) - self.assertTrue(self._is_logged( - "INFO", - f"Subscribed to public order book and trade channels of {self.trading_pair}..." - )) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) @patch("hummingbot.connector.exchange.bybit.bybit_api_order_book_data_source.BybitAPIOrderBookDataSource._time") def test_listen_for_subscriptions_sends_ping_message_before_ping_interval_finishes( @@ -288,7 +273,8 @@ def test_listen_for_subscriptions_sends_ping_message_before_ping_interval_finish 'binary': 'false', 'symbolName': self.ex_trading_pair}, 'code': '0', - 'msg': 'Success'} + 'msg': 'Success' + } result_subscribe_depth = { 'topic': 'depth', @@ -298,7 +284,8 @@ def test_listen_for_subscriptions_sends_ping_message_before_ping_interval_finish 'binary': 'false', 'symbolName': self.ex_trading_pair}, 'code': '0', - 'msg': 'Success'} + 'msg': 'Success' + } self.mocking_assistant.add_websocket_aiohttp_message( websocket_mock=ws_connect_mock.return_value, @@ -313,9 +300,7 @@ def test_listen_for_subscriptions_sends_ping_message_before_ping_interval_finish sent_messages = self.mocking_assistant.json_messages_sent_through_websocket( websocket_mock=ws_connect_mock.return_value) - expected_ping_message = { - "ping": int(1101 * 1e3) - } + expected_ping_message = {'op': 'ping'} self.assertEqual(expected_ping_message, sent_messages[-1]) @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) @@ -344,7 +329,7 @@ def test_listen_for_subscriptions_logs_exception_details(self, sleep_mock, ws_co def test_listen_for_trades_cancelled_when_listening(self): mock_queue = MagicMock() mock_queue.get.side_effect = asyncio.CancelledError() - self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + self.ob_data_source._message_queue["trade"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() @@ -356,24 +341,24 @@ def test_listen_for_trades_cancelled_when_listening(self): def test_listen_for_trades_logs_exception(self): incomplete_resp = { - "topic": "trade", - "params": { - "symbol": self.ex_trading_pair, - "binary": "false", - "symbolName": self.ex_trading_pair - }, - "data": { - "v": "564265886622695424", - # "t": 1582001735462, - "p": "9787.5", - "q": "0.195009", - "m": True - } + "topic": f"publicTrade.{self.ex_trading_pair}", + "type": "trade", + "data": [ + { + "s": f"{self.ex_trading_pair}", + "S": "Buy", + "v": "0.001", + "p": "16578.50", + "L": "PlusTick", + "i": "20f43950-d8dd-5b31-9112-a178eb6023af", + "BT": False + } + ] } mock_queue = AsyncMock() mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] - self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + self.ob_data_source._message_queue["trade"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() @@ -392,28 +377,24 @@ def test_listen_for_trades_logs_exception(self): def test_listen_for_trades_successful(self): mock_queue = AsyncMock() trade_event = { - "symbol": self.ex_trading_pair, - "symbolName": self.ex_trading_pair, - "topic": "trade", - "params": { - "realtimeInterval": "24h", - "binary": "false" - }, + "topic": f"publicTrade.{self.ex_trading_pair}", + "type": "trade", + "ts": 1672304486868, "data": [ { - "v": "929681067596857345", - "t": 1625562619577, - "p": "34924.15", - "q": "0.00027", - "m": True + "T": 1672304486865, + "s": f"{self.ex_trading_pair}", + "S": "Buy", + "v": "0.001", + "p": "16578.50", + "L": "PlusTick", + "i": "20f43950-d8dd-5b31-9112-a178eb6023af", + "BT": False } - ], - "f": True, - "sendTime": 1626249138535, - "shared": False + ] } mock_queue.get.side_effect = [trade_event, asyncio.CancelledError()] - self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + self.ob_data_source._message_queue["trade"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() @@ -425,13 +406,14 @@ def test_listen_for_trades_successful(self): pass msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + print(msg) - self.assertTrue(trade_event["data"][0]["t"], msg.trade_id) + self.assertTrue(trade_event["data"][0]["i"], msg.trade_id) def test_listen_for_order_book_diffs_cancelled(self): mock_queue = AsyncMock() mock_queue.get.side_effect = asyncio.CancelledError() - self.ob_data_source._message_queue[CONSTANTS.DIFF_EVENT_TYPE] = mock_queue + self.ob_data_source._message_queue["order_book_diff"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() @@ -443,51 +425,70 @@ def test_listen_for_order_book_diffs_cancelled(self): def test_listen_for_order_book_diffs_logs_exception(self): incomplete_resp = { - # "symbol": self.ex_trading_pair, - "symbolName": self.ex_trading_pair, - "topic": "diffDepth", - "params": { - "realtimeInterval": "24h", - "binary": "false" - }, - "data": [{ - "e": 301, - "s": self.ex_trading_pair, - "t": 1565600357643, - "v": "112801745_18", + "type": "order_book_diff", + "data": { + "s": f"{self.ex_trading_pair}", "b": [ - ["11371.49", "0.0014"], - ["11371.12", "0.2"], - ["11369.97", "0.3523"], - ["11369.96", "0.5"], - ["11369.95", "0.0934"], - ["11369.94", "1.6809"], - ["11369.6", "0.0047"], - ["11369.17", "0.3"], - ["11369.16", "0.2"], - ["11369.04", "1.3203"]], + [ + "30247.20", + "30.028" + ], + [ + "30245.40", + "0.224" + ], + [ + "30242.10", + "1.593" + ], + [ + "30240.30", + "1.305" + ], + [ + "30240.00", + "0" + ] + ], "a": [ - ["11375.41", "0.0053"], - ["11375.42", "0.0043"], - ["11375.48", "0.0052"], - ["11375.58", "0.0541"], - ["11375.7", "0.0386"], - ["11375.71", "2"], - ["11377", "2.0691"], - ["11377.01", "0.0167"], - ["11377.12", "1.5"], - ["11377.61", "0.3"] + [ + "30248.70", + "0" + ], + [ + "30249.30", + "0.892" + ], + [ + "30249.50", + "1.778" + ], + [ + "30249.60", + "0" + ], + [ + "30251.90", + "2.947" + ], + [ + "30252.20", + "0.659" + ], + [ + "30252.50", + "4.591" + ] ], - "o": 0 - }], - "f": False, - "sendTime": 1626253839401, - "shared": False + "u": 177400507, + "seq": 66544703342 + }, + "cts": 1687940967464 } mock_queue = AsyncMock() mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] - self.ob_data_source._message_queue[CONSTANTS.DIFF_EVENT_TYPE] = mock_queue + self.ob_data_source._message_queue["order_book_diff"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() @@ -499,56 +500,76 @@ def test_listen_for_order_book_diffs_logs_exception(self): self.async_run_with_timeout(self.listening_task) except asyncio.CancelledError: pass - self.assertTrue( self._is_logged("ERROR", "Unexpected error when processing public order book updates from exchange")) def test_listen_for_order_book_diffs_successful(self): mock_queue = AsyncMock() diff_event = { - "symbol": self.ex_trading_pair, - "symbolName": self.ex_trading_pair, - "topic": "diffDepth", - "params": { - "realtimeInterval": "24h", - "binary": "false" - }, - "data": [{ - "e": 301, - "s": self.ex_trading_pair, - "t": 1565600357643, - "v": "112801745_18", + "topic": f"orderbook.50.{self.ex_trading_pair}", + "type": "order_book_diff", + "ts": 1687940967466, + "data": { + "s": f"{self.ex_trading_pair}", "b": [ - ["11371.49", "0.0014"], - ["11371.12", "0.2"], - ["11369.97", "0.3523"], - ["11369.96", "0.5"], - ["11369.95", "0.0934"], - ["11369.94", "1.6809"], - ["11369.6", "0.0047"], - ["11369.17", "0.3"], - ["11369.16", "0.2"], - ["11369.04", "1.3203"]], + [ + "30247.20", + "30.028" + ], + [ + "30245.40", + "0.224" + ], + [ + "30242.10", + "1.593" + ], + [ + "30240.30", + "1.305" + ], + [ + "30240.00", + "0" + ] + ], "a": [ - ["11375.41", "0.0053"], - ["11375.42", "0.0043"], - ["11375.48", "0.0052"], - ["11375.58", "0.0541"], - ["11375.7", "0.0386"], - ["11375.71", "2"], - ["11377", "2.0691"], - ["11377.01", "0.0167"], - ["11377.12", "1.5"], - ["11377.61", "0.3"] + [ + "30248.70", + "0" + ], + [ + "30249.30", + "0.892" + ], + [ + "30249.50", + "1.778" + ], + [ + "30249.60", + "0" + ], + [ + "30251.90", + "2.947" + ], + [ + "30252.20", + "0.659" + ], + [ + "30252.50", + "4.591" + ] ], - "o": 0 - }], - "f": False, - "sendTime": 1626253839401, - "shared": False + "u": 177400507, + "seq": 66544703342 + }, + "cts": 1687940967464 } mock_queue.get.side_effect = [diff_event, asyncio.CancelledError()] - self.ob_data_source._message_queue[CONSTANTS.DIFF_EVENT_TYPE] = mock_queue + self.ob_data_source._message_queue["order_book_diff"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() @@ -560,48 +581,15 @@ def test_listen_for_order_book_diffs_successful(self): pass msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) - - self.assertTrue(diff_event["data"][0]["t"], msg.update_id) - - def test_listen_for_order_book_snapshots_cancelled_when_fetching_snapshot(self): - mock_queue = AsyncMock() - mock_queue.get.side_effect = asyncio.CancelledError() - self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue - - msg_queue: asyncio.Queue = asyncio.Queue() - - with self.assertRaises(asyncio.CancelledError): - self.async_run_with_timeout( - self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) - ) - - @aioresponses() - @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") - def test_listen_for_order_book_snapshots_log_exception(self, mock_api, sleep_mock): - mock_queue = AsyncMock() - mock_queue.get.side_effect = ['ERROR', asyncio.CancelledError] - self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue - - msg_queue: asyncio.Queue = asyncio.Queue() - sleep_mock.side_effect = [asyncio.CancelledError] - url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - mock_api.get(regex_url, exception=Exception) - - try: - self.async_run_with_timeout(self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue)) - except asyncio.CancelledError: - pass - - self.assertTrue( - self._is_logged("ERROR", "Unexpected error when processing public order book updates from exchange")) + self.assertTrue(diff_event["data"]["u"], msg.update_id) @aioresponses() @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") def test_listen_for_order_book_snapshots_successful_rest(self, mock_api, _): mock_queue = AsyncMock() mock_queue.get.side_effect = asyncio.TimeoutError - self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + self.ob_data_source._message_queue["order_book_snapshot"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) @@ -613,56 +601,47 @@ def test_listen_for_order_book_snapshots_successful_rest(self, mock_api, _): self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) ) - msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get(), 5) - self.assertEqual(int(snapshot_data["result"]["time"]), msg.update_id) + self.assertEqual(int(snapshot_data["result"]["u"]), msg.update_id) def test_listen_for_order_book_snapshots_successful_ws(self): mock_queue = AsyncMock() snapshot_event = { - "symbol": self.ex_trading_pair, - "symbolName": self.ex_trading_pair, - "topic": "diffDepth", - "params": { - "realtimeInterval": "24h", - "binary": "false" - }, - "data": [{ - "e": 301, - "s": self.ex_trading_pair, - "t": 1565600357643, - "v": "112801745_18", + "topic": f"orderbook.50.{self.ex_trading_pair}", + "type": "snapshot", + "ts": 1672304484978, + "data": { + "s": f"{self.ex_trading_pair}", "b": [ - ["11371.49", "0.0014"], - ["11371.12", "0.2"], - ["11369.97", "0.3523"], - ["11369.96", "0.5"], - ["11369.95", "0.0934"], - ["11369.94", "1.6809"], - ["11369.6", "0.0047"], - ["11369.17", "0.3"], - ["11369.16", "0.2"], - ["11369.04", "1.3203"]], + ..., + [ + "16493.50", + "0.006" + ], + [ + "16493.00", + "0.100" + ] + ], "a": [ - ["11375.41", "0.0053"], - ["11375.42", "0.0043"], - ["11375.48", "0.0052"], - ["11375.58", "0.0541"], - ["11375.7", "0.0386"], - ["11375.71", "2"], - ["11377", "2.0691"], - ["11377.01", "0.0167"], - ["11377.12", "1.5"], - ["11377.61", "0.3"] + [ + "16611.00", + "0.029" + ], + [ + "16612.00", + "0.213" + ], + ..., ], - "o": 0 - }], - "f": True, - "sendTime": 1626253839401, - "shared": False + "u": 18521288, + "seq": 7961638724 + }, + "cts": 1672304484976 } mock_queue.get.side_effect = [snapshot_event, asyncio.CancelledError()] - self.ob_data_source._message_queue[CONSTANTS.DIFF_EVENT_TYPE] = mock_queue + self.ob_data_source._message_queue["order_book_diff"] = mock_queue msg_queue: asyncio.Queue = asyncio.Queue() @@ -676,4 +655,4 @@ def test_listen_for_order_book_snapshots_successful_ws(self): msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get(), timeout=6) - self.assertTrue(snapshot_event["data"][0]["t"], msg.update_id) + self.assertTrue(snapshot_event["data"]["u"], msg.update_id) diff --git a/test/hummingbot/connector/exchange/bybit/test_bybit_api_user_stream_data_source.py b/test/hummingbot/connector/exchange/bybit/test_bybit_api_user_stream_data_source.py index 2e2922dbe8..a9803c2f10 100644 --- a/test/hummingbot/connector/exchange/bybit/test_bybit_api_user_stream_data_source.py +++ b/test/hummingbot/connector/exchange/bybit/test_bybit_api_user_stream_data_source.py @@ -104,9 +104,9 @@ def test_listen_for_user_stream_auth(self, ws_connect_mock, auth_time_mock): sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( websocket_mock=ws_connect_mock.return_value) - self.assertEqual(1, len(sent_subscription_messages)) + self.assertEqual(4, len(sent_subscription_messages)) - expires = int((1000 + 10) * 1000) + expires = 11000000 _val = f'GET/realtime{expires}' signature = hmac.new(self.api_secret_key.encode("utf8"), _val.encode("utf8"), hashlib.sha256).hexdigest() @@ -117,6 +117,14 @@ def test_listen_for_user_stream_auth(self, ws_connect_mock, auth_time_mock): self.assertEqual(auth_subscription, sent_subscription_messages[0]) + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_connected_ws_assistant(self, mock_ws): + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + ws_assistant = self.async_run_with_timeout(self.data_source._get_ws_assistant()) + self.assertEqual(self.mocking_assistant.json_messages_sent_through_websocket(ws_assistant), []) + conn_ws_assistant = self.async_run_with_timeout(self.data_source._connected_websocket_assistant()) + self.assertEqual(self.mocking_assistant.json_messages_sent_through_websocket(conn_ws_assistant), []) + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_listen_for_user_stream_does_not_queue_pong_payload(self, mock_ws): @@ -133,7 +141,7 @@ def test_listen_for_user_stream_does_not_queue_pong_payload(self, mock_ws): self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) - self.assertEqual(0, msg_queue.qsize()) + self.assertEqual(1, msg_queue.qsize()) @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_listen_for_user_stream_does_not_queue_ticket_info(self, mock_ws): @@ -173,10 +181,15 @@ def test_listen_for_user_stream_auth_failed_throws_exception(self, ws_connect_mo auth_time_mock.side_effect = [100] ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - result_auth = {'auth': 'fail', 'userId': 24068148} + result = { + "success": False, + "ret_msg": "Failed to authenticate", + "op": "auth", + "conn_id": "24068148" + } self.mocking_assistant.add_websocket_aiohttp_message( websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_auth)) + message=json.dumps(result)) output_queue = asyncio.Queue() @@ -187,7 +200,8 @@ def test_listen_for_user_stream_auth_failed_throws_exception(self, ws_connect_mo sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( websocket_mock=ws_connect_mock.return_value) - self.assertEqual(1, len(sent_subscription_messages)) + # 4 channels: auth, orderbook, trades and wallet + self.assertEqual(4, len(sent_subscription_messages)) self.assertTrue( self._is_logged("ERROR", "Unexpected error while listening to user stream. Retrying after 5 seconds...")) @@ -237,7 +251,5 @@ def test_listen_for_user_stream_sends_ping_message_before_ping_interval_finishes sent_messages = self.mocking_assistant.json_messages_sent_through_websocket( websocket_mock=ws_connect_mock.return_value) - expected_ping_message = { - "ping": 1101 * 1e3, - } + expected_ping_message = {'op': 'ping', 'args': 1101000} self.assertEqual(expected_ping_message, sent_messages[-1]) diff --git a/test/hummingbot/connector/exchange/bybit/test_bybit_auth.py b/test/hummingbot/connector/exchange/bybit/test_bybit_auth.py index 42800a2d4a..0754dce296 100644 --- a/test/hummingbot/connector/exchange/bybit/test_bybit_auth.py +++ b/test/hummingbot/connector/exchange/bybit/test_bybit_auth.py @@ -1,11 +1,8 @@ import asyncio -import hashlib -import hmac from collections import OrderedDict -from typing import Any, Awaitable, Dict, Mapping, Optional +from typing import Awaitable, Dict, Mapping, Optional from unittest import TestCase from unittest.mock import MagicMock -from urllib.parse import urlencode from hummingbot.connector.exchange.bybit.bybit_auth import BybitAuth from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest, WSJSONRequest @@ -16,7 +13,6 @@ class BybitAuthTests(TestCase): def setUp(self) -> None: super().setUp() self.api_key = "testApiKey" - self.passphrase = "testPassphrase" self.secret_key = "testSecretKey" self.mock_time_provider = MagicMock() @@ -32,20 +28,32 @@ def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) return ret - def test_add_auth_params_to_get_request_without_params(self): + def test_rest_auth_signature(self): + params = {"param_z": "value_param_z", "param_a": "value_param_a"} request = RESTRequest( method=RESTMethod.GET, url="https://test.url/api/endpoint", is_auth_required=True, + params=params, throttler_limit_id="/api/endpoint" ) - params_expected = self._params_expected(request.params) - self.async_run_with_timeout(self.auth.rest_authenticate(request)) + self.assertEqual(request.headers["X-BAPI-API-KEY"], self.api_key) + self.assertIsNotNone(request.headers["X-BAPI-TIMESTAMP"]) + sign_expected = self.auth._generate_rest_signature(request.headers["X-BAPI-TIMESTAMP"], request.method, request.params) + self.assertEqual(request.headers["X-BAPI-SIGN"], sign_expected) - self.assertEqual(params_expected['api_key'], request.params["api_key"]) - self.assertEqual(params_expected['timestamp'], request.params["timestamp"]) - self.assertEqual(params_expected['sign'], request.params["sign"]) + def test_add_auth_params_to_get_request_without_params(self): + request = RESTRequest( + method=RESTMethod.GET, + url="https://test.url/api/endpoint", + is_auth_required=True, + throttler_limit_id="/api/endpoint" + ) + self.async_run_with_timeout(self.auth.rest_authenticate(request)) + self.assertEqual(request.headers["X-BAPI-API-KEY"], self.api_key) + self.assertIsNone(request.params) + self.assertIsNone(request.data) def test_add_auth_params_to_get_request_with_params(self): params = { @@ -61,12 +69,9 @@ def test_add_auth_params_to_get_request_with_params(self): ) params_expected = self._params_expected(request.params) - self.async_run_with_timeout(self.auth.rest_authenticate(request)) - self.assertEqual(params_expected['api_key'], request.params["api_key"]) - self.assertEqual(params_expected['timestamp'], request.params["timestamp"]) - self.assertEqual(params_expected['sign'], request.params["sign"]) + self.assertEqual(len(request.params), 2) self.assertEqual(params_expected['param_z'], request.params["param_z"]) self.assertEqual(params_expected['param_a'], request.params["param_a"]) @@ -74,39 +79,30 @@ def test_add_auth_params_to_post_request(self): params = {"param_z": "value_param_z", "param_a": "value_param_a"} request = RESTRequest( method=RESTMethod.POST, - url="https://test.url/api/endpoint", + url="https://bybit-mock/api/endpoint", data=params, is_auth_required=True, throttler_limit_id="/api/endpoint" ) - params_auth = self._params_expected(request.params) params_request = self._params_expected(request.data) self.async_run_with_timeout(self.auth.rest_authenticate(request)) - self.assertEqual(params_auth['api_key'], request.params["api_key"]) - self.assertEqual(params_auth['timestamp'], request.params["timestamp"]) - self.assertEqual(params_auth['sign'], request.params["sign"]) + self.assertEqual(params_request['param_z'], request.data["param_z"]) self.assertEqual(params_request['param_a'], request.data["param_a"]) - def test_no_auth_added_to_wsrequest(self): - payload = {"param1": "value_param_1"} - request = WSJSONRequest(payload=payload, is_auth_required=True) - self.async_run_with_timeout(self.auth.ws_authenticate(request)) - self.assertEqual(payload, request.payload) + def test_ws_auth(self): + request = WSJSONRequest(payload={}, is_auth_required=True) + ws_auth_msg = self.async_run_with_timeout(self.auth.ws_authenticate(request)) + + api_key = ws_auth_msg["args"][0] + expires = ws_auth_msg["args"][1] + signature = ws_auth_msg["args"][2] - def _generate_signature(self, params: Dict[str, Any]) -> str: - encoded_params_str = urlencode(params) - digest = hmac.new(self.secret_key.encode("utf8"), encoded_params_str.encode("utf8"), hashlib.sha256).hexdigest() - return digest + self.assertEqual(ws_auth_msg["op"], "auth") + self.assertEqual(api_key, self.api_key) + self.assertEqual(signature, self.auth._generate_ws_signature(expires)) def _params_expected(self, request_params: Optional[Mapping[str, str]]) -> Dict: request_params = request_params if request_params else {} - params = { - 'timestamp': 1000000, - 'api_key': self.api_key, - } - params.update(request_params) - params = OrderedDict(sorted(params.items(), key=lambda t: t[0])) - params['sign'] = self._generate_signature(params=params) - return params + return OrderedDict(sorted(request_params.items(), key=lambda t: t[0])) diff --git a/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py b/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py index 4f07d32279..4399153a83 100644 --- a/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py +++ b/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py @@ -16,10 +16,8 @@ from hummingbot.connector.exchange.bybit.bybit_exchange import BybitExchange from hummingbot.connector.trading_rule import TradingRule from hummingbot.connector.utils import get_new_client_order_id -from hummingbot.core.data_type.cancellation_result import CancellationResult from hummingbot.core.data_type.common import OrderType, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState -from hummingbot.core.data_type.trade_fee import TokenAmount from hummingbot.core.event.event_logger import EventLogger from hummingbot.core.event.events import ( BuyOrderCompletedEvent, @@ -116,27 +114,38 @@ def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): def get_exchange_rules_mock(self) -> Dict: exchange_rules = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, - "result": [ - { - "name": self.ex_trading_pair, - "alias": self.ex_trading_pair, - "baseCurrency": "COINALPHA", - "quoteCurrency": "USDT", - "basePrecision": "0.000001", - "quotePrecision": "0.01", - "minTradeQuantity": "0.0001", - "minTradeAmount": "10", - "minPricePrecision": "0.01", - "maxTradeQuantity": "2", - "maxTradeAmount": "200", - "category": 1, - "showStatus": True - }, - ] + "retCode": 0, + "retMsg": "OK", + "result": { + "category": "spot", + "list": [ + { + "symbol": self.ex_trading_pair, + "baseCoin": self.base_asset, + "quoteCoin": self.quote_asset, + "innovation": "0", + "status": "Trading", + "marginTrading": "both", + "lotSizeFilter": { + "basePrecision": "0.000001", + "quotePrecision": "0.01", + "minOrderQty": "0.0001", + "maxOrderQty": "2", + "minOrderAmt": "10", + "maxOrderAmt": "200" + }, + "priceFilter": { + "tickSize": "0.01" + }, + "riskParameters": { + "limitParameter": "0.05", + "marketParameter": "0.05" + } + } + ] + }, + "retExtInfo": {}, + "time": 1000 } return exchange_rules @@ -149,14 +158,22 @@ def _simulate_trading_rules_initialized(self): min_base_amount_increment=Decimal(str(0.000001)), ) } + self.exchange._initialize_trading_pair_symbols_from_exchange_info(self.get_exchange_rules_mock()) + + def _simulate_trading_fees_initialized(self): + fee_rates = { + "symbol": self.ex_trading_pair, + "takerFeeRate": "0.0002", + "makerFeeRate": "0.0001" + } + self.exchange._trading_fees[self.trading_pair] = fee_rates def _validate_auth_credentials_present(self, request_call_tuple: NamedTuple): request_headers = request_call_tuple.kwargs["headers"] - request_params = request_call_tuple.kwargs["params"] self.assertIn("Content-Type", request_headers) - self.assertEqual("application/x-www-form-urlencoded", request_headers["Content-Type"]) - self.assertIn("api_key", request_params) - self.assertIn("sign", request_params) + self.assertIn("X-BAPI-API-KEY", request_headers) + self.assertIn("X-BAPI-TIMESTAMP", request_headers) + self.assertIn("X-BAPI-SIGN", request_headers) def test_supported_order_types(self): supported_types = self.exchange.supported_order_types() @@ -168,13 +185,14 @@ def test_supported_order_types(self): def test_check_network_success(self, mock_api): url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL) resp = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "serverTime": 1625799317787 - } + "timeSecond": "1688639403", + "timeNano": "1688639403423213947" + }, + "retExtInfo": {}, + "time": 1688639403423 } mock_api.get(url, body=json.dumps(resp)) @@ -204,44 +222,48 @@ def test_update_trading_rules(self, mock_api): self.exchange._set_current_timestamp(1000) url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - resp = self.get_exchange_rules_mock() - mock_api.get(url, body=json.dumps(resp)) - mock_api.get(url, body=json.dumps(resp)) - - self.async_run_with_timeout(coroutine=self.exchange._update_trading_rules()) - - self.assertTrue(self.trading_pair in self.exchange._trading_rules) - - @aioresponses() - def test_update_trading_rules_ignores_rule_with_error(self, mock_api): - self.exchange._set_current_timestamp(1000) - - url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) exchange_rules = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, - "result": [ - { - "name": self.ex_trading_pair, - "alias": self.ex_trading_pair, - "baseCurrency": "COINALPHA", - "quoteCurrency": "USDT", - "maxTradeAmount": "200", - "category": 1 - }, - ] + "retCode": 0, + "retMsg": "OK", + "result": { + "category": "spot", + "list": [ + { + "symbol": self.ex_trading_pair, + "baseCoin": self.base_asset, + "quoteCoin": self.quote_asset, + "innovation": "0", + "status": "Trading", + "marginTrading": "both", + "lotSizeFilter": { + "basePrecision": "0.000001", + "quotePrecision": "0.00000001", + "minOrderQty": "0.000048", + "maxOrderQty": "71.73956243", + "minOrderAmt": "1", + "maxOrderAmt": "200" + }, + "priceFilter": { + "tickSize": "0.01" + }, + "riskParameters": { + "limitParameter": "0.05", + "marketParameter": "0.05" + } + } + ] + }, + "retExtInfo": {}, + "time": 1001 } - mock_api.get(url, body=json.dumps(exchange_rules)) + self.exchange._initialize_trading_pair_symbols_from_exchange_info(exchange_rules) + mock_api.get(regex_url, body=json.dumps(exchange_rules)) self.async_run_with_timeout(coroutine=self.exchange._update_trading_rules()) - self.assertEqual(0, len(self.exchange._trading_rules)) - self.assertTrue( - self._is_logged("ERROR", f"Error parsing the trading pair rule {self.ex_trading_pair}. Skipping.") - ) + self.assertTrue(self.trading_pair in self.exchange._trading_rules) def test_initial_status_dict(self): BybitAPIOrderBookDataSource._trading_pair_symbol_map = {} @@ -361,147 +383,113 @@ def test_restore_tracking_states_only_registers_open_orders(self): @aioresponses() def test_create_limit_order_successfully(self, mock_api): self._simulate_trading_rules_initialized() - request_sent_event = asyncio.Event() self.exchange._set_current_timestamp(1640780000) - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - creation_response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + get_orders_url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) + get_orders_regex_url = re.compile(f"^{get_orders_url}".replace(".", r"\.").replace("?", r"\?")) + + get_orders_resp = { + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "1", - "symbol": self.ex_trading_pair, - "symbolName": self.ex_trading_pair, - "orderLinkId": "162073788655749", - "orderId": "889208273689997824", - "transactTime": "1620737886573", - "price": "20000", - "origQty": "10", - "executedQty": "0", - "status": "NEW", - "timeInForce": "GTC", - "type": "LIMIT", - "side": "BUY" - } + "list": [ + { + "orderId": "", + "orderLinkId": "OID1", + "blockTradeId": "", + "symbol": self.ex_trading_pair, + "price": "10", + "qty": "100", + "side": "Sell", + "isLeverage": "", + "positionIdx": 1, + "orderStatus": "New", + "cancelType": "UNKNOWN", + "rejectReason": "EC_NoError", + "avgPrice": "0", + "leavesQty": "0.10", + "leavesValue": "160", + "cumExecQty": "1", + "cumExecValue": "0", + "cumExecFee": "0", + "timeInForce": "GTC", + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "orderIv": "", + "triggerPrice": "0.00", + "takeProfit": "2500.00", + "stopLoss": "1500.00", + "tpTriggerBy": "LastPrice", + "slTriggerBy": "LastPrice", + "triggerDirection": 0, + "triggerBy": "UNKNOWN", + "lastPriceOnCreated": "", + "reduceOnly": False, + "closeOnTrigger": False, + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "tpslMode": "Full", + "tpLimitPrice": "", + "slLimitPrice": "", + "placeType": "", + "createdTime": "1640790000", + "updatedTime": "1640790000" + } + ], + "category": "spot" + }, + "retExtInfo": {}, + "time": 1640790000 } - tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) - resp = self.get_exchange_rules_mock() - mock_api.get(tradingrule_url, body=json.dumps(resp)) - mock_api.post(regex_url, - body=json.dumps(creation_response), - callback=lambda *args, **kwargs: request_sent_event.set()) - - self.test_task = asyncio.get_event_loop().create_task( - self.exchange._create_order(trade_type=TradeType.BUY, - order_id="OID1", - trading_pair=self.trading_pair, - amount=Decimal("100"), - order_type=OrderType.LIMIT, - price=Decimal("10000"))) - self.async_run_with_timeout(request_sent_event.wait()) - - order_request = next(((key, value) for key, value in mock_api.requests.items() - if key[1].human_repr().startswith(url))) - self._validate_auth_credentials_present(order_request[1][0]) - request_params = order_request[1][0].kwargs["params"] - self.assertEqual(self.ex_trading_pair, request_params["symbol"]) - self.assertEqual("BUY", request_params["side"]) - self.assertEqual("LIMIT", request_params["type"]) - self.assertEqual(Decimal("100"), Decimal(request_params["qty"])) - self.assertEqual(Decimal("10000"), Decimal(request_params["price"])) - self.assertEqual("OID1", request_params["orderLinkId"]) - - self.assertIn("OID1", self.exchange.in_flight_orders) - create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] - self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) - self.assertEqual(self.trading_pair, create_event.trading_pair) - self.assertEqual(OrderType.LIMIT, create_event.type) - self.assertEqual(Decimal("100"), create_event.amount) - self.assertEqual(Decimal("10000"), create_event.price) - self.assertEqual("OID1", create_event.order_id) - self.assertEqual(creation_response["result"]["orderId"], create_event.exchange_order_id) - - self.assertTrue( - self._is_logged( - "INFO", - f"Created LIMIT BUY order OID1 for {Decimal('100.000000')} {self.trading_pair}." - ) - ) - - @aioresponses() - def test_create_limit_maker_order_successfully(self, mock_api): - self._simulate_trading_rules_initialized() - request_sent_event = asyncio.Event() - self.exchange._set_current_timestamp(1640780000) - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - creation_response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + place_order_resp = { + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "1", - "symbol": self.ex_trading_pair, - "symbolName": self.ex_trading_pair, - "orderLinkId": "162073788655749", - "orderId": "889208273689997824", - "transactTime": "1620737886573", - "price": "20000", - "origQty": "10", - "executedQty": "0", - "status": "NEW", - "timeInForce": "GTC", - "type": "LIMIT_MAKER", - "side": "BUY" - } + "orderId": "", + "orderLinkId": "OID1" + }, + "retExtInfo": {}, + "time": 1640780000 } - tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) - resp = self.get_exchange_rules_mock() - mock_api.get(tradingrule_url, body=json.dumps(resp)) - mock_api.post(regex_url, - body=json.dumps(creation_response), - callback=lambda *args, **kwargs: request_sent_event.set()) + place_order_url = web_utils.rest_url(CONSTANTS.ORDER_PLACE_PATH_URL) + place_order_regex_url = re.compile(f"^{place_order_url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.post(place_order_regex_url, + body=json.dumps(place_order_resp)) + + mock_api.get(get_orders_regex_url, + body=json.dumps(get_orders_resp)) + self.async_run_with_timeout(self.exchange._update_order_status()) self.test_task = asyncio.get_event_loop().create_task( self.exchange._create_order(trade_type=TradeType.BUY, order_id="OID1", trading_pair=self.trading_pair, amount=Decimal("100"), - order_type=OrderType.LIMIT_MAKER, + order_type=OrderType.LIMIT, price=Decimal("10000"))) - self.async_run_with_timeout(request_sent_event.wait()) + self.async_run_with_timeout(self.exchange._update_order_status()) order_request = next(((key, value) for key, value in mock_api.requests.items() - if key[1].human_repr().startswith(url))) + if key[1].human_repr().startswith(place_order_url))) self._validate_auth_credentials_present(order_request[1][0]) - request_data = order_request[1][0].kwargs["params"] - self.assertEqual(self.ex_trading_pair, request_data["symbol"]) - self.assertEqual(TradeType.BUY.name, request_data["side"]) - self.assertEqual("LIMIT_MAKER", request_data["type"]) - self.assertEqual(Decimal("100"), Decimal(request_data["qty"])) - self.assertEqual(Decimal("10000"), Decimal(request_data["price"])) - self.assertEqual("OID1", request_data["orderLinkId"]) self.assertIn("OID1", self.exchange.in_flight_orders) create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) self.assertEqual(self.trading_pair, create_event.trading_pair) - self.assertEqual(OrderType.LIMIT_MAKER, create_event.type) + self.assertEqual(OrderType.LIMIT, create_event.type) self.assertEqual(Decimal("100"), create_event.amount) self.assertEqual(Decimal("10000"), create_event.price) self.assertEqual("OID1", create_event.order_id) - self.assertEqual(creation_response["result"]["orderId"], create_event.exchange_order_id) + self.assertEqual(place_order_resp["result"]["orderId"], create_event.exchange_order_id) self.assertTrue( self._is_logged( "INFO", - f"Created LIMIT_MAKER BUY order OID1 for {Decimal('100.000000')} {self.trading_pair}." + f"Created LIMIT BUY order OID1 for {Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}." ) ) @@ -510,56 +498,99 @@ def test_create_limit_maker_order_successfully(self, mock_api): def test_create_market_order_successfully(self, mock_api, get_price_mock): get_price_mock.return_value = Decimal(1000) self._simulate_trading_rules_initialized() - request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - creation_response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + + get_orders_url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) + get_orders_regex_url = re.compile(f"^{get_orders_url}".replace(".", r"\.").replace("?", r"\?")) + + get_orders_resp = { + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "1", - "symbol": self.ex_trading_pair, - "symbolName": self.ex_trading_pair, - "orderLinkId": "162073788655749", - "orderId": "889208273689997824", - "transactTime": "1620737886573", - "price": "20000", - "origQty": "10", - "executedQty": "0", - "status": "NEW", - "timeInForce": "GTC", - "type": "MARKET", - "side": "SELL" - } + "list": [ + { + "orderId": "", + "orderLinkId": "OID1", + "blockTradeId": "", + "symbol": self.ex_trading_pair, + "price": "10", + "qty": "100", + "side": "Sell", + "isLeverage": "", + "positionIdx": 1, + "orderStatus": "New", + "cancelType": "UNKNOWN", + "rejectReason": "EC_NoError", + "avgPrice": "0", + "leavesQty": "0.10", + "leavesValue": "160", + "cumExecQty": "1", + "cumExecValue": "0", + "cumExecFee": "0", + "timeInForce": "GTC", + "orderType": "Market", + "stopOrderType": "UNKNOWN", + "orderIv": "", + "triggerPrice": "0.00", + "takeProfit": "2500.00", + "stopLoss": "1500.00", + "tpTriggerBy": "LastPrice", + "slTriggerBy": "LastPrice", + "triggerDirection": 0, + "triggerBy": "UNKNOWN", + "lastPriceOnCreated": "", + "reduceOnly": False, + "closeOnTrigger": False, + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "tpslMode": "Full", + "tpLimitPrice": "", + "slLimitPrice": "", + "placeType": "", + "createdTime": "1640790000", + "updatedTime": "1640790000" + } + ], + "category": "spot" + }, + "retExtInfo": {}, + "time": 1640790000 } - tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) - resp = self.get_exchange_rules_mock() - mock_api.get(tradingrule_url, body=json.dumps(resp)) - mock_api.post(regex_url, - body=json.dumps(creation_response), - callback=lambda *args, **kwargs: request_sent_event.set()) + + place_order_resp = { + "retCode": 0, + "retMsg": "OK", + "result": { + "orderId": "", + "orderLinkId": "OID1" + }, + "retExtInfo": {}, + "time": 1640780000 + } + + place_order_url = web_utils.rest_url(CONSTANTS.ORDER_PLACE_PATH_URL) + place_order_regex_url = re.compile(f"^{place_order_url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.post(place_order_regex_url, + body=json.dumps(place_order_resp)) + + mock_api.get(get_orders_regex_url, + body=json.dumps(get_orders_resp)) + self.async_run_with_timeout(self.exchange._update_order_status()) self.test_task = asyncio.get_event_loop().create_task( self.exchange._create_order(trade_type=TradeType.SELL, order_id="OID1", trading_pair=self.trading_pair, amount=Decimal("100"), + price=Decimal("10"), order_type=OrderType.MARKET)) - self.async_run_with_timeout(request_sent_event.wait()) + self.async_run_with_timeout(self.exchange._update_order_status()) order_request = next(((key, value) for key, value in mock_api.requests.items() - if key[1].human_repr().startswith(url))) + if key[1].human_repr().startswith(place_order_url))) self._validate_auth_credentials_present(order_request[1][0]) - request_data = order_request[1][0].kwargs["params"] - self.assertEqual(self.ex_trading_pair, request_data["symbol"]) - self.assertEqual(TradeType.SELL.name, request_data["side"]) - self.assertEqual("MARKET", request_data["type"]) - self.assertEqual(Decimal("100"), Decimal(request_data["qty"])) - self.assertEqual("OID1", request_data["orderLinkId"]) - self.assertNotIn("price", request_data) self.assertIn("OID1", self.exchange.in_flight_orders) create_event: SellOrderCreatedEvent = self.sell_order_created_logger.event_log[0] @@ -568,28 +599,23 @@ def test_create_market_order_successfully(self, mock_api, get_price_mock): self.assertEqual(OrderType.MARKET, create_event.type) self.assertEqual(Decimal("100"), create_event.amount) self.assertEqual("OID1", create_event.order_id) - self.assertEqual(creation_response["result"]["orderId"], create_event.exchange_order_id) + self.assertEqual(place_order_resp["result"]["orderId"], create_event.exchange_order_id) self.assertTrue( self._is_logged( "INFO", - f"Created MARKET SELL order OID1 for {Decimal('100.000000')} {self.trading_pair}." + f"Created MARKET SELL order OID1 for {Decimal('100.000000')} {self.trading_pair} at {Decimal('10')}." ) ) @aioresponses() def test_create_order_fails_and_raises_failure_event(self, mock_api): self._simulate_trading_rules_initialized() - request_sent_event = asyncio.Event() self.exchange._set_current_timestamp(1640780000) - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.ORDER_PLACE_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) - resp = self.get_exchange_rules_mock() - mock_api.get(tradingrule_url, body=json.dumps(resp)) - mock_api.post(regex_url, - status=400, - callback=lambda *args, **kwargs: request_sent_event.set()) + + mock_api.get(regex_url, status=400) self.test_task = asyncio.get_event_loop().create_task( self.exchange._create_order(trade_type=TradeType.BUY, @@ -598,18 +624,10 @@ def test_create_order_fails_and_raises_failure_event(self, mock_api): amount=Decimal("100"), order_type=OrderType.LIMIT, price=Decimal("10000"))) - self.async_run_with_timeout(request_sent_event.wait()) - - order_request = next(((key, value) for key, value in mock_api.requests.items() - if key[1].human_repr().startswith(url))) - self._validate_auth_credentials_present(order_request[1][0]) + self.async_run_with_timeout(self.exchange._update_order_status()) self.assertNotIn("OID1", self.exchange.in_flight_orders) self.assertEquals(0, len(self.buy_order_created_logger.event_log)) - failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] - self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) - self.assertEqual(OrderType.LIMIT, failure_event.order_type) - self.assertEqual("OID1", failure_event.order_id) self.assertTrue( self._is_logged( @@ -623,17 +641,12 @@ def test_create_order_fails_and_raises_failure_event(self, mock_api): @aioresponses() def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(self, mock_api): self._simulate_trading_rules_initialized() - request_sent_event = asyncio.Event() self.exchange._set_current_timestamp(1640780000) - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) - resp = self.get_exchange_rules_mock() - mock_api.get(tradingrule_url, body=json.dumps(resp)) - mock_api.post(regex_url, - status=400, - callback=lambda *args, **kwargs: request_sent_event.set()) + + mock_api.get(regex_url, status=400) self.test_task = asyncio.get_event_loop().create_task( self.exchange._create_order(trade_type=TradeType.BUY, @@ -651,7 +664,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel order_type=OrderType.LIMIT, price=Decimal("10000"))) - self.async_run_with_timeout(request_sent_event.wait()) + self.async_run_with_timeout(self.exchange._update_order_status()) self.assertNotIn("OID1", self.exchange.in_flight_orders) self.assertEquals(0, len(self.buy_order_created_logger.event_log)) @@ -695,33 +708,23 @@ def test_cancel_order_successfully(self, mock_api): self.assertIn("OID1", self.exchange.in_flight_orders) order = self.exchange.in_flight_orders["OID1"] - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.ORDER_CANCEL_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "10054", - "symbol": self.ex_trading_pair, - "orderLinkId": "OID1", - "orderId": "4", - "transactTime": "1620811601728", - "price": "10000", - "origQty": "100", - "executedQty": "0", - "status": "CANCELED", - "timeInForce": "GTC", - "type": "LIMIT", - "side": "BUY" - } + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id + }, + "retExtInfo": {}, + "time": 1640780000 } - mock_api.delete(regex_url, - body=json.dumps(response), - callback=lambda *args, **kwargs: request_sent_event.set()) + mock_api.post(regex_url, + body=json.dumps(response), + callback=lambda *args, **kwargs: request_sent_event.set()) self.exchange.cancel(client_order_id="OID1", trading_pair=self.trading_pair) self.async_run_with_timeout(request_sent_event.wait()) @@ -759,20 +762,15 @@ def test_cancel_order_raises_failure_event_when_request_fails(self, mock_api): self.assertIn("OID1", self.exchange.in_flight_orders) order = self.exchange.in_flight_orders["OID1"] - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - mock_api.delete(regex_url, - status=400, - callback=lambda *args, **kwargs: request_sent_event.set()) + mock_api.get(regex_url, status=400) + request_sent_event.set() self.exchange.cancel(client_order_id="OID1", trading_pair=self.trading_pair) self.async_run_with_timeout(request_sent_event.wait()) - cancel_request = next(((key, value) for key, value in mock_api.requests.items() - if key[1].human_repr().startswith(url))) - self._validate_auth_credentials_present(cancel_request[1][0]) - self.assertEquals(0, len(self.order_cancelled_logger.event_log)) self.assertTrue( @@ -783,7 +781,7 @@ def test_cancel_order_raises_failure_event_when_request_fails(self, mock_api): ) @aioresponses() - def test_cancel_two_orders_with_cancel_all_and_one_fails(self, mock_api): + def test_cancel_orders_with_cancel_all(self, mock_api): self.exchange._set_current_timestamp(1640780000) self.exchange.start_tracking_order( @@ -797,67 +795,37 @@ def test_cancel_two_orders_with_cancel_all_and_one_fails(self, mock_api): ) self.assertIn("OID1", self.exchange.in_flight_orders) - order1 = self.exchange.in_flight_orders["OID1"] - - self.exchange.start_tracking_order( - order_id="OID2", - exchange_order_id="5", - trading_pair=self.trading_pair, - trade_type=TradeType.SELL, - price=Decimal("11000"), - amount=Decimal("90"), - order_type=OrderType.LIMIT, - ) - - self.assertIn("OID2", self.exchange.in_flight_orders) - order2 = self.exchange.in_flight_orders["OID2"] + order = self.exchange.in_flight_orders["OID1"] - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.ORDER_CANCEL_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "10054", - "symbol": self.ex_trading_pair, - "orderLinkId": order1.client_order_id, - "orderId": order1.exchange_order_id, - "transactTime": "1620811601728", - "price": float(order1.price), - "origQty": float(order1.amount), - "executedQty": "0", - "status": "CANCELED", - "timeInForce": "GTC", - "type": "LIMIT", - "side": "BUY" - } + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id + }, + "retExtInfo": {}, + "time": 1640780000 } - mock_api.delete(regex_url, body=json.dumps(response)) - - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - - mock_api.delete(regex_url, status=400) + mock_api.post(regex_url, body=json.dumps(response)) cancellation_results = self.async_run_with_timeout(self.exchange.cancel_all(10)) - self.assertEqual(2, len(cancellation_results)) - self.assertEqual(CancellationResult(order1.client_order_id, True), cancellation_results[0]) - self.assertEqual(CancellationResult(order2.client_order_id, False), cancellation_results[1]) + self.assertEqual(1, len(cancellation_results)) self.assertEqual(1, len(self.order_cancelled_logger.event_log)) cancel_event: OrderCancelledEvent = self.order_cancelled_logger.event_log[0] self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) - self.assertEqual(order1.client_order_id, cancel_event.order_id) + self.assertEqual(order.client_order_id, cancel_event.order_id) self.assertTrue( self._is_logged( "INFO", - f"Successfully canceled order {order1.client_order_id}." + f"Successfully canceled order {order.client_order_id}." ) ) @@ -871,19 +839,20 @@ def test_update_time_synchronizer_successfully(self, mock_api, seconds_counter_m regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "serverTime": 1625799317787 - } + "timeSecond": "1688639403", + "timeNano": "1688639403423213947" + }, + "retExtInfo": {}, + "time": 1688639403423 } mock_api.get(regex_url, body=json.dumps(response)) self.async_run_with_timeout(self.exchange._update_time_synchronizer()) - self.assertEqual(response["result"]['serverTime'] * 1e-3, self.exchange._time_synchronizer.time()) + self.assertEqual(int(response["result"]['timeNano']) * 1e-3, self.exchange._time_synchronizer.time() * 1e9) @aioresponses() def test_update_time_synchronizer_failure_is_logged(self, mock_api): @@ -914,36 +883,55 @@ def test_update_time_synchronizer_raises_cancelled_error(self, mock_api): @aioresponses() def test_update_balances(self, mock_api): - url = web_utils.rest_url(CONSTANTS.ACCOUNTS_PATH_URL) + url = web_utils.rest_url(CONSTANTS.BALANCE_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "balances": [ - { - "coin": "COINALPHA", - "coinId": "COINALPHA", - "coinName": "COINALPHA", - "total": "15", - "free": "10", - "locked": "0" - }, + "list": [ { - "coin": "USDT", - "coinId": "USDT", - "coinName": "USDT", - "total": "2000", - "free": "2000", - "locked": "0" + "totalEquity": "3.31216591", + "accountIMRate": "0", + "totalMarginBalance": "3.00326056", + "totalInitialMargin": "0", + "accountType": "UNIFIED", + "totalAvailableBalance": "3.00326056", + "accountMMRate": "0", + "totalPerpUPL": "0", + "totalWalletBalance": "3.00326056", + "accountLTV": "0", + "totalMaintenanceMargin": "0", + "coin": [ + { + "availableToBorrow": "10", + "bonus": "0", + "accruedInterest": "0", + "availableToWithdraw": "10", + "totalOrderIM": "0", + "equity": "0", + "totalPositionMM": "0", + "usdValue": "0", + "spotHedgingQty": "0.01592413", + "unrealisedPnl": "0", + "collateralSwitch": True, + "borrowAmount": "0.0", + "totalPositionIM": "0", + "walletBalance": "15", + "cumRealisedPnl": "0", + "locked": "0", + "marginCollateral": True, + "coin": self.base_asset + } + ] } ] - } + }, + "retExtInfo": {}, + "time": 1690872862481 } - + self.exchange._account_type = "UNIFIED" mock_api.get(regex_url, body=json.dumps(response)) self.async_run_with_timeout(self.exchange._update_balances()) @@ -951,39 +939,41 @@ def test_update_balances(self, mock_api): total_balances = self.exchange.get_all_balances() self.assertEqual(Decimal("10"), available_balances["COINALPHA"]) - self.assertEqual(Decimal("2000"), available_balances["USDT"]) + # self.assertEqual(Decimal("2000"), available_balances["USDT"]) self.assertEqual(Decimal("15"), total_balances["COINALPHA"]) - self.assertEqual(Decimal("2000"), total_balances["USDT"]) + # self.assertEqual(Decimal("2000"), total_balances["USDT"]) + + @aioresponses() + def test_update_trading_fees(self, mock_api): + self._simulate_trading_rules_initialized() + url = web_utils.rest_url(CONSTANTS.EXCHANGE_FEE_RATE_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + self._simulate_trading_fees_initialized() + self.assertEqual(Decimal("0.0001"), Decimal(self.exchange._trading_fees[self.trading_pair]["makerFeeRate"])) + self.assertEqual(Decimal("0.0002"), Decimal(self.exchange._trading_fees[self.trading_pair]["takerFeeRate"])) response = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "balances": [ + "list": [ { - "coin": "COINALPHA", - "coinId": "COINALPHA", - "coinName": "COINALPHA", - "total": "15", - "free": "10", - "locked": "0" - }, + "symbol": self.ex_trading_pair, + "takerFeeRate": "0.0006", + "makerFeeRate": "0.0005" + } ] - } + }, + "retExtInfo": {}, + "time": 1676360412576 } mock_api.get(regex_url, body=json.dumps(response)) - self.async_run_with_timeout(self.exchange._update_balances()) + self.async_run_with_timeout(self.exchange._update_trading_fees()) - available_balances = self.exchange.available_balances - total_balances = self.exchange.get_all_balances() - - self.assertNotIn("USDT", available_balances) - self.assertNotIn("USDT", total_balances) - self.assertEqual(Decimal("10"), available_balances["COINALPHA"]) - self.assertEqual(Decimal("15"), total_balances["COINALPHA"]) + self.assertEqual(Decimal("0.0005"), Decimal(self.exchange._trading_fees[self.trading_pair]["makerFeeRate"])) + self.assertEqual(Decimal("0.0006"), Decimal(self.exchange._trading_fees[self.trading_pair]["takerFeeRate"])) @aioresponses() def test_update_order_status_when_filled(self, mock_api): @@ -1002,36 +992,63 @@ def test_update_order_status_when_filled(self, mock_api): ) order: InFlightOrder = self.exchange.in_flight_orders["OID1"] - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) order_status = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "1054", - "exchangeId": "301", - "symbol": self.trading_pair, - "symbolName": "ETHUSDT", - "orderLinkId": order.client_order_id, - "orderId": order.exchange_order_id, - "price": "20000", - "origQty": "1", - "executedQty": "1", - "cummulativeQuoteQty": "1", - "avgPrice": "1000", - "status": "FILLED", - "timeInForce": "GTC", - "type": "LIMIT", - "side": order.trade_type.name, - "stopPrice": "0.0", - "icebergQty": "0.0", - "time": "1620811601728", - "updateTime": "1620811601743", - "isWorking": True - } + "list": [ + { + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id, + "blockTradeId": "", + "symbol": self.ex_trading_pair, + "price": "10000", + "qty": "1", + "side": order.trade_type.name, + "isLeverage": "", + "positionIdx": 1, + "orderStatus": "Filled", + "cancelType": "UNKNOWN", + "rejectReason": "EC_NoError", + "avgPrice": "0", + "leavesQty": "0.10", + "leavesValue": "160", + "cumExecQty": "1", + "cumExecValue": "0", + "cumExecFee": "0", + "timeInForce": "GTC", + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "orderIv": "", + "triggerPrice": "0.00", + "takeProfit": "2500.00", + "stopLoss": "1500.00", + "tpTriggerBy": "LastPrice", + "slTriggerBy": "LastPrice", + "triggerDirection": 0, + "triggerBy": "UNKNOWN", + "lastPriceOnCreated": "", + "reduceOnly": False, + "closeOnTrigger": False, + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "tpslMode": "Full", + "tpLimitPrice": "", + "slLimitPrice": "", + "placeType": "", + "createdTime": "1684738540559", + "updatedTime": "1684738540561" + } + ], + "nextPageCursor": "page_args%3Dfd4300ae-7847-404e-b947-b46980a4d140%26symbol%3D6%26", + "category": "spot" + }, + "retExtInfo": {}, + "time": 1684765770483 } mock_api.get(regex_url, body=json.dumps(order_status)) @@ -1082,36 +1099,63 @@ def test_update_order_status_when_cancelled(self, mock_api): ) order = self.exchange.in_flight_orders["OID1"] - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) order_status = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "1054", - "exchangeId": "301", - "symbol": self.trading_pair, - "symbolName": "ETHUSDT", - "orderLinkId": order.client_order_id, - "orderId": order.exchange_order_id, - "price": "10000", - "origQty": "1", - "executedQty": "1", - "cummulativeQuoteQty": "1", - "avgPrice": "1000", - "status": "CANCELED", - "timeInForce": "GTC", - "type": "LIMIT", - "side": order.trade_type.name, - "stopPrice": "0.0", - "icebergQty": "0.0", - "time": "1620811601728", - "updateTime": "1620811601743", - "isWorking": True - } + "list": [ + { + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id, + "blockTradeId": "", + "symbol": self.ex_trading_pair, + "price": f"{order.price}", + "qty": f"{order.amount}", + "side": order.trade_type.name, + "isLeverage": "", + "positionIdx": 1, + "orderStatus": "Cancelled", + "cancelType": "UNKNOWN", + "rejectReason": "EC_NoError", + "avgPrice": "0", + "leavesQty": "0.10", + "leavesValue": "160", + "cumExecQty": "1", + "cumExecValue": "0", + "cumExecFee": "0", + "timeInForce": "GTC", + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "orderIv": "", + "triggerPrice": "0.00", + "takeProfit": "2500.00", + "stopLoss": "1500.00", + "tpTriggerBy": "LastPrice", + "slTriggerBy": "LastPrice", + "triggerDirection": 0, + "triggerBy": "UNKNOWN", + "lastPriceOnCreated": "", + "reduceOnly": False, + "closeOnTrigger": False, + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "tpslMode": "Full", + "tpLimitPrice": "", + "slLimitPrice": "", + "placeType": "", + "createdTime": "1684738540559", + "updatedTime": "1684738540561" + } + ], + "nextPageCursor": "page_args%3Dfd4300ae-7847-404e-b947-b46980a4d140%26symbol%3D6%26", + "category": "spot" + }, + "retExtInfo": {}, + "time": 1684765770483 } mock_api.get(regex_url, body=json.dumps(order_status)) @@ -1148,40 +1192,66 @@ def test_update_order_status_when_order_has_not_changed(self, mock_api): ) order: InFlightOrder = self.exchange.in_flight_orders["OID1"] - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) order_status = { - "ret_code": 0, - "ret_msg": "", - "ext_code": None, - "ext_info": None, + "retCode": 0, + "retMsg": "OK", "result": { - "accountId": "1054", - "exchangeId": "301", - "symbol": self.trading_pair, - "symbolName": "ETHUSDT", - "orderLinkId": order.client_order_id, - "orderId": order.exchange_order_id, - "price": "10000", - "origQty": "1", - "executedQty": "1", - "cummulativeQuoteQty": "1", - "avgPrice": "1000", - "status": "NEW", - "timeInForce": "GTC", - "type": "LIMIT", - "side": order.trade_type.name, - "stopPrice": "0.0", - "icebergQty": "0.0", - "time": "1620811601728", - "updateTime": "1620811601743", - "isWorking": True - } + "list": [ + { + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id, + "blockTradeId": "", + "symbol": self.trading_pair, + "price": "10000", + "qty": "1", + "side": order.trade_type.name, + "isLeverage": "", + "positionIdx": 1, + "orderStatus": "New", + "cancelType": "UNKNOWN", + "rejectReason": "EC_NoError", + "avgPrice": "0", + "leavesQty": "0.10", + "leavesValue": "160", + "cumExecQty": "1", + "cumExecValue": "0", + "cumExecFee": "0", + "timeInForce": "GTC", + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "orderIv": "", + "triggerPrice": "0.00", + "takeProfit": "2500.00", + "stopLoss": "1500.00", + "tpTriggerBy": "LastPrice", + "slTriggerBy": "LastPrice", + "triggerDirection": 0, + "triggerBy": "UNKNOWN", + "lastPriceOnCreated": "", + "reduceOnly": False, + "closeOnTrigger": False, + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "tpslMode": "Full", + "tpLimitPrice": "", + "slLimitPrice": "", + "placeType": "", + "createdTime": "1684738540559", + "updatedTime": "1684738540561" + } + ], + "nextPageCursor": "page_args%3Dfd4300ae-7847-404e-b947-b46980a4d140%26symbol%3D6%26", + "category": "spot" + }, + "retExtInfo": {}, + "time": 1684765770483 } - mock_response = order_status - mock_api.get(regex_url, body=json.dumps(mock_response)) + mock_api.get(regex_url, body=json.dumps(order_status)) self.assertTrue(order.is_open) @@ -1212,7 +1282,7 @@ def test_update_order_status_when_request_fails_marks_order_as_not_found(self, m ) order: InFlightOrder = self.exchange.in_flight_orders["OID1"] - url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + url = web_utils.rest_url(CONSTANTS.GET_ORDERS_PATH_URL) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) mock_api.get(regex_url, status=404) @@ -1243,31 +1313,56 @@ def test_user_stream_update_for_new_order_does_not_update_status(self): order = self.exchange.in_flight_orders["OID1"] event_message = { - "e": "executionReport", - "E": "1499405658658", - "s": order.trading_pair, - "c": order.client_order_id, - "S": order.trade_type.name, - "o": "LIMIT", - "f": "GTC", - "q": "1.00000000", - "p": "0.10264410", - "X": "NEW", - "i": order.exchange_order_id, - "M": "0", - "l": "0.00000000", - "z": "0.00000000", - "L": "0.00000000", - "n": "0", - "N": "COINALPHA", - "u": True, - "w": True, - "m": False, - "O": "1499405658657", - "Z": "473.199", - "A": "0", - "C": False, - "v": "0" + "id": "5923240c6880ab-c59f-420b-9adb-3639adc9dd90", + "topic": "order", + "channel": "order", + "creationTime": 1499405658658, + "data": [ + { + "symbol": self.ex_trading_pair, + "orderId": order.exchange_order_id, + "side": order.trade_type.name, + "orderType": "Limit", + "cancelType": "UNKNOWN", + "price": "72.5", + "qty": "1", + "orderIv": "", + "timeInForce": "IOC", + "orderStatus": "New", + "orderLinkId": order.client_order_id, + "lastPriceOnCreated": "", + "reduceOnly": False, + "leavesQty": "", + "leavesValue": "", + "cumExecQty": "1", + "cumExecValue": "75", + "avgPrice": "75", + "blockTradeId": "", + "positionIdx": 0, + "cumExecFee": "0.358635", + "createdTime": "1499405658658", + "updatedTime": "1499405658657", + "rejectReason": "EC_NoError", + "stopOrderType": "", + "tpslMode": "", + "triggerPrice": "", + "takeProfit": "", + "stopLoss": "", + "tpTriggerBy": "", + "slTriggerBy": "", + "tpLimitPrice": "", + "slLimitPrice": "", + "triggerDirection": 0, + "triggerBy": "", + "closeOnTrigger": False, + "category": "option", + "placeType": "price", + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "feeCurrency": "" + } + ] } mock_queue = AsyncMock() @@ -1293,7 +1388,7 @@ def test_user_stream_update_for_new_order_does_not_update_status(self): self._is_logged( "INFO", f"Created {order.order_type.name.upper()} {order.trade_type.name.upper()} order " - f"{order.client_order_id} for {order.amount} {order.trading_pair}." + f"{order.client_order_id} for {order.amount} {order.trading_pair} at {Decimal('10000')}." ) ) @@ -1311,31 +1406,56 @@ def test_user_stream_update_for_cancelled_order(self): order = self.exchange.in_flight_orders["OID1"] event_message = { - "e": "executionReport", - "E": "1499405658658", - "s": order.trading_pair, - "c": order.client_order_id, - "S": order.trade_type.name, - "o": "LIMIT", - "f": "GTC", - "q": "1.00000000", - "p": "0.10264410", - "X": "CANCELED", - "i": order.exchange_order_id, - "M": "0", - "l": "0.00000000", - "z": "0.00000000", - "L": "0.00000000", - "n": "0", - "N": "COINALPHA", - "u": True, - "w": True, - "m": False, - "O": "1499405658657", - "Z": "473.199", - "A": "0", - "C": False, - "v": "0" + "id": "5923240c6880ab-c59f-420b-9adb-3639adc9dd90", + "topic": "order", + "channel": "order", + "creationTime": 1672364262474, + "data": [ + { + "symbol": self.ex_trading_pair, + "orderId": order.exchange_order_id, + "side": order.trade_type.name, + "orderType": "Limit", + "cancelType": "UNKNOWN", + "price": order.price, + "qty": order.amount, + "orderIv": "", + "timeInForce": "IOC", + "orderStatus": "Cancelled", + "orderLinkId": order.client_order_id, + "lastPriceOnCreated": "", + "reduceOnly": False, + "leavesQty": "", + "leavesValue": "", + "cumExecQty": "1", + "cumExecValue": "75", + "avgPrice": "75", + "blockTradeId": "", + "positionIdx": 0, + "cumExecFee": "0.358635", + "createdTime": "1672364262444", + "updatedTime": "1672364262457", + "rejectReason": "EC_NoError", + "stopOrderType": "", + "tpslMode": "", + "triggerPrice": "", + "takeProfit": "", + "stopLoss": "", + "tpTriggerBy": "", + "slTriggerBy": "", + "tpLimitPrice": "", + "slLimitPrice": "", + "triggerDirection": 0, + "triggerBy": "", + "closeOnTrigger": False, + "category": "option", + "placeType": "price", + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "feeCurrency": "" + } + ] } mock_queue = AsyncMock() @@ -1373,36 +1493,100 @@ def test_user_stream_update_for_order_partial_fill(self): order = self.exchange.in_flight_orders["OID1"] event_message = { - "e": "executionReport", - "t": "1499405658658", - "E": "1499405658658", - "s": order.trading_pair, - "c": order.client_order_id, - "S": order.trade_type.name, - "o": "LIMIT", - "f": "GTC", - "q": order.amount, - "p": order.price, - "X": "PARTIALLY_FILLED", - "i": order.exchange_order_id, - "M": "0", - "l": "0.50000000", - "z": "0.50000000", - "L": "0.10250000", - "n": "0.003", - "N": self.base_asset, - "u": True, - "w": True, - "m": False, - "O": "1499405658657", - "Z": "473.199", - "A": "0", - "C": False, - "v": "0" + "id": "592324803b2785-26fa-4214-9963-bdd4727f07be", + "channel": "trade", + "topic": "execution", + "creationTime": 1640790000, + "data": [ + { + "category": "spot", + # "symbol": order.trading_pair, + "symbol": self.ex_trading_pair, + "execFee": "0.005061", + "execId": "7e2ae69c-4edf-5800-a352-893d52b446aa", + "execPrice": order.price + Decimal("0.5"), + "execQty": order.amount + Decimal("0.5"), + "execType": "Trade", + "execValue": "8.435", + "isMaker": False, + "feeRate": "0.0006", + "tradeIv": "", + "markIv": "", + "blockTradeId": "", + "markPrice": "0.3391", + "indexPrice": "", + "underlyingPrice": "", + "leavesQty": "0", + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id, + "orderPrice": "0.3207", + "orderQty": "25", + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "side": order.trade_type.name, + "execTime": "1640790000", + "isLeverage": "0", + "closedSize": "", + "seq": 4688002127 + } + ] + } + + order_status_event = { + "id": "5923240c6880ab-c59f-420b-9adb-3639adc9dd90", + "topic": "order", + "channel": "order", + "creationTime": 1672364262474, + "data": [ + { + "symbol": self.ex_trading_pair, + "orderId": order.exchange_order_id, + "side": order.trade_type.name, + "orderType": "Limit", + "cancelType": "UNKNOWN", + "price": order.price, + "qty": order.amount, + "orderIv": "", + "timeInForce": "IOC", + "orderStatus": "PartiallyFilled", + "orderLinkId": order.client_order_id, + "lastPriceOnCreated": "", + "reduceOnly": False, + "leavesQty": "", + "leavesValue": "", + "cumExecQty": "1", + "cumExecValue": "75", + "avgPrice": "75", + "blockTradeId": "", + "positionIdx": 0, + "cumExecFee": "0.358635", + "createdTime": "1672364262444", + "updatedTime": "1672364262457", + "rejectReason": "EC_NoError", + "stopOrderType": "", + "tpslMode": "", + "triggerPrice": "", + "takeProfit": "", + "stopLoss": "", + "tpTriggerBy": "", + "slTriggerBy": "", + "tpLimitPrice": "", + "slLimitPrice": "", + "triggerDirection": 0, + "triggerBy": "", + "closeOnTrigger": False, + "category": "option", + "placeType": "price", + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "feeCurrency": "" + } + ] } mock_queue = AsyncMock() - mock_queue.get.side_effect = [event_message, asyncio.CancelledError] + mock_queue.get.side_effect = [event_message, order_status_event, asyncio.CancelledError] self.exchange._user_stream_tracker._user_stream = mock_queue try: @@ -1419,17 +1603,223 @@ def test_user_stream_update_for_order_partial_fill(self): self.assertEqual(order.trading_pair, fill_event.trading_pair) self.assertEqual(order.trade_type, fill_event.trade_type) self.assertEqual(order.order_type, fill_event.order_type) - self.assertEqual(Decimal(event_message["L"]), fill_event.price) - self.assertEqual(Decimal(event_message["l"]), fill_event.amount) + self.assertEqual(Decimal(event_message["data"][0]["execPrice"]), fill_event.price) + self.assertEqual(Decimal(event_message["data"][0]["execQty"]), fill_event.amount) + self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) + + def test_user_stream_update_for_order_partial_fill_completed(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders["OID1"] - self.assertEqual([TokenAmount(amount=Decimal(event_message["n"]), token=(event_message["N"]))], - fill_event.trade_fee.flat_fees) + event_message_1 = { + "id": "592324803b2785-26fa-4214-9963-bdd4727f07be", + "channel": "trade", + "topic": "execution", + "creationTime": 1640790000, + "data": [ + { + "category": "spot", + "symbol": self.ex_trading_pair, + "execFee": "0.005061", + "execId": "7e2ae69c-4edf-5800-a352-893d52b446aa", + "execPrice": order.price * Decimal("0.5"), + "execQty": order.amount * Decimal("0.5"), + "execType": "Trade", + "execValue": "8.435", + "isMaker": False, + "feeRate": "0.0006", + "tradeIv": "", + "markIv": "", + "blockTradeId": "", + "markPrice": "0.3391", + "indexPrice": "", + "underlyingPrice": "", + "leavesQty": "0", + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id, + "orderPrice": "0.3207", + "orderQty": "25", + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "side": order.trade_type.name, + "execTime": "1640790000", + "isLeverage": "0", + "closedSize": "", + "seq": 4688002127 + } + ] + } - self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) + event_message_2 = { + "id": "592324803b2785-26fa-4214-9963-bdd4727f17be", + "channel": "trade", + "topic": "execution", + "creationTime": 1640790000, + "data": [ + { + "category": "spot", + "symbol": self.ex_trading_pair, + "execFee": "0.005061", + "execId": "7e2ae69c-4edf-5800-a352-893d52b446ab", + "execPrice": order.price * Decimal("0.5"), + "execQty": order.amount * Decimal("0.5"), + "execType": "Trade", + "execValue": "8.435", + "isMaker": False, + "feeRate": "0.0006", + "tradeIv": "", + "markIv": "", + "blockTradeId": "", + "markPrice": "0.3391", + "indexPrice": "", + "underlyingPrice": "", + "leavesQty": "0", + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id, + "orderPrice": "0.3207", + "orderQty": "25", + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "side": order.trade_type.name, + "execTime": "1640790000", + "isLeverage": "0", + "closedSize": "", + "seq": 4688002127 + } + ] + } + order_status_event_1 = { + "id": "5923240c6880ab-c59f-420b-9adb-3639adc9dd90", + "topic": "order", + "channel": "order", + "creationTime": 1672364262474, + "data": [ + { + "symbol": self.ex_trading_pair, + "orderId": order.exchange_order_id, + "side": order.trade_type.name, + "orderType": "Limit", + "cancelType": "UNKNOWN", + "price": order.price, + "qty": order.amount, + "orderIv": "", + "timeInForce": "IOC", + "orderStatus": "PartiallyFilled", + "orderLinkId": order.client_order_id, + "lastPriceOnCreated": "", + "reduceOnly": False, + "leavesQty": "", + "leavesValue": "", + "cumExecQty": "1", + "cumExecValue": "75", + "avgPrice": "75", + "blockTradeId": "", + "positionIdx": 0, + "cumExecFee": "0.358635", + "createdTime": "1672364262444", + "updatedTime": "1672364262457", + "rejectReason": "EC_NoError", + "stopOrderType": "", + "tpslMode": "", + "triggerPrice": "", + "takeProfit": "", + "stopLoss": "", + "tpTriggerBy": "", + "slTriggerBy": "", + "tpLimitPrice": "", + "slLimitPrice": "", + "triggerDirection": 0, + "triggerBy": "", + "closeOnTrigger": False, + "category": "option", + "placeType": "price", + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "feeCurrency": "" + } + ] + } + + order_status_event_2 = { + "id": "5923240c6880ab-c59f-420b-9adb-3639adc9dd90", + "topic": "order", + "channel": "order", + "creationTime": 1672364263474, + "data": [ + { + "symbol": self.ex_trading_pair, + "orderId": order.exchange_order_id, + "side": order.trade_type.name, + "orderType": "Limit", + "cancelType": "UNKNOWN", + "price": order.price, + "qty": order.amount, + "orderIv": "", + "timeInForce": "IOC", + "orderStatus": "Filled", + "orderLinkId": order.client_order_id, + "lastPriceOnCreated": "", + "reduceOnly": False, + "leavesQty": "", + "leavesValue": "", + "cumExecQty": "1", + "cumExecValue": "75", + "avgPrice": "75", + "blockTradeId": "", + "positionIdx": 0, + "cumExecFee": "0.358635", + "createdTime": "1672364262444", + "updatedTime": "1672364263457", + "rejectReason": "EC_NoError", + "stopOrderType": "", + "tpslMode": "", + "triggerPrice": "", + "takeProfit": "", + "stopLoss": "", + "tpTriggerBy": "", + "slTriggerBy": "", + "tpLimitPrice": "", + "slLimitPrice": "", + "triggerDirection": 0, + "triggerBy": "", + "closeOnTrigger": False, + "category": "option", + "placeType": "price", + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "feeCurrency": "" + } + ] + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [event_message_1, event_message_2, order_status_event_1, order_status_event_2, asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertTrue(order.is_filled) + self.assertTrue(order.is_done) self.assertTrue( - self._is_logged("INFO", f"The {order.trade_type.name} order {order.client_order_id} amounting to " - f"{fill_event.amount}/{order.amount} {order.base_asset} has been filled.") + self._is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) ) def test_user_stream_update_for_order_fill(self): @@ -1446,54 +1836,100 @@ def test_user_stream_update_for_order_fill(self): order = self.exchange.in_flight_orders["OID1"] event_message = { - "e": "executionReport", - "t": "1499405658658", - "E": "1499405658658", - "s": order.trading_pair, - "c": order.client_order_id, - "S": order.trade_type.name, - "o": "LIMIT", - "f": "GTC", - "q": order.amount, - "p": order.price, - "X": "FILLED", - "i": order.exchange_order_id, - "M": "0", - "l": order.amount, - "z": "0.50000000", - "L": order.price, - "n": "0.003", - "N": self.base_asset, - "u": True, - "w": True, - "m": False, - "O": "1499405658657", - "Z": "473.199", - "A": "0", - "C": False, - "v": "0" + "id": "5923240c6880ab-c59f-420b-9adb-3639adc9dd90", + "topic": "order", + "channel": "order", + "creationTime": 1672364262474, + "data": [ + { + "symbol": self.ex_trading_pair, + "orderId": order.exchange_order_id, + "side": order.trade_type.name, + "orderType": "Limit", + "cancelType": "UNKNOWN", + "price": order.price, + "qty": order.amount, + "orderIv": "", + "timeInForce": "IOC", + "orderStatus": "Filled", + "orderLinkId": order.client_order_id, + "lastPriceOnCreated": "", + "reduceOnly": False, + "leavesQty": "", + "leavesValue": "", + "cumExecQty": "1", + "cumExecValue": "75", + "avgPrice": "75", + "blockTradeId": "", + "positionIdx": 0, + "cumExecFee": "0.358635", + "createdTime": "1672364262444", + "updatedTime": "1672364262457", + "rejectReason": "EC_NoError", + "stopOrderType": "", + "tpslMode": "", + "triggerPrice": "", + "takeProfit": "", + "stopLoss": "", + "tpTriggerBy": "", + "slTriggerBy": "", + "tpLimitPrice": "", + "slLimitPrice": "", + "triggerDirection": 0, + "triggerBy": "", + "closeOnTrigger": False, + "category": "option", + "placeType": "price", + "smpType": "None", + "smpGroup": 0, + "smpOrderId": "", + "feeCurrency": "" + } + ] } filled_event = { - "e": "ticketInfo", - "E": "1621912542359", - "s": self.ex_trading_pair, - "q": "0.001639", - "t": "1621912542314", - "p": "61000.0", - "T": "899062000267837441", - "o": "899048013515737344", - "c": "1621910874883", - "O": "899062000118679808", - "a": "10043", - "A": "10024", - "m": True + "id": "592324803b2785-26fa-4214-9963-bdd4727f07be", + "channel": "trade", + "topic": "execution", + "creationTime": 1499405658658, + "data": [ + { + "category": "spot", + "symbol": self.ex_trading_pair, + "execFee": "0.005061", + "execId": "7e2ae69c-4edf-5800-a352-893d52b446aa", + "execPrice": order.price, + "execQty": order.amount, + "execType": "Trade", + "execValue": "8.435", + "isMaker": False, + "feeRate": "0.0006", + "tradeIv": "", + "markIv": "", + "blockTradeId": "", + "markPrice": "0.3391", + "indexPrice": "", + "underlyingPrice": "", + "leavesQty": "0", + "orderId": order.exchange_order_id, + "orderLinkId": order.client_order_id, + "orderPrice": order.price, + "orderQty": order.amount, + "orderType": "Limit", + "stopOrderType": "UNKNOWN", + "side": order.trade_type.name, + "execTime": "1499405658658", + "isLeverage": "0", + "closedSize": "", + "seq": 4688002127 + } + ] } mock_queue = AsyncMock() mock_queue.get.side_effect = [event_message, filled_event, asyncio.CancelledError] self.exchange._user_stream_tracker._user_stream = mock_queue - try: self.async_run_with_timeout(self.exchange._user_stream_event_listener()) except asyncio.CancelledError: @@ -1505,12 +1941,10 @@ def test_user_stream_update_for_order_fill(self): self.assertEqual(order.trading_pair, fill_event.trading_pair) self.assertEqual(order.trade_type, fill_event.trade_type) self.assertEqual(order.order_type, fill_event.order_type) - match_price = Decimal(event_message["L"]) - match_size = Decimal(event_message["l"]) + match_price = Decimal(event_message["data"][0]["price"]) + match_size = Decimal(event_message["data"][0]["qty"]) self.assertEqual(match_price, fill_event.price) self.assertEqual(match_size, fill_event.amount) - self.assertEqual([TokenAmount(amount=Decimal(event_message["n"]), token=(event_message["N"]))], - fill_event.trade_fee.flat_fees) buy_event: BuyOrderCompletedEvent = self.buy_order_completed_logger.event_log[0] self.assertEqual(self.exchange.current_timestamp, buy_event.timestamp) @@ -1534,18 +1968,46 @@ def test_user_stream_update_for_order_fill(self): def test_user_stream_balance_update(self): self.exchange._set_current_timestamp(1640780000) - event_message = { - "e": "outboundAccountInfo", - "E": "1629969654753", - "T": True, - "W": True, - "D": True, - "B": [ + "id": "592324d2bce751-ad38-48eb-8f42-4671d1fb4d4e", + "channel": CONSTANTS.PRIVATE_WALLET_CHANNEL, + "topic": "wallet", + "creationTime": 1700034722104, + "data": [ { - "a": self.base_asset, - "f": "10000", - "l": "500" + "accountIMRate": "0", + "accountMMRate": "0", + "totalEquity": "10262.91335023", + "totalWalletBalance": "9684.46297164", + "totalMarginBalance": "9684.46297164", + "totalAvailableBalance": "9556.6056555", + "totalPerpUPL": "0", + "totalInitialMargin": "0", + "totalMaintenanceMargin": "0", + "coin": [ + { + "coin": self.base_asset, + "equity": "10000", + "usdValue": "10", + "walletBalance": "10000", + "availableToWithdraw": "10000", + "availableToBorrow": "", + "borrowAmount": "0", + "accruedInterest": "0", + "totalOrderIM": "", + "totalPositionIM": "", + "totalPositionMM": "", + "unrealisedPnl": "0", + "cumRealisedPnl": "-0.00000973", + "bonus": "0", + "collateralSwitch": True, + "marginCollateral": True, + "locked": "0", + "spotHedgingQty": "0.01592413" + } + ], + "accountLTV": "0", + "accountType": "SPOT" } ] } @@ -1553,14 +2015,14 @@ def test_user_stream_balance_update(self): mock_queue = AsyncMock() mock_queue.get.side_effect = [event_message, asyncio.CancelledError] self.exchange._user_stream_tracker._user_stream = mock_queue + self.exchange._account_type = "SPOT" try: self.async_run_with_timeout(self.exchange._user_stream_event_listener()) except asyncio.CancelledError: pass - self.assertEqual(Decimal("10000"), self.exchange.available_balances["COINALPHA"]) - self.assertEqual(Decimal("10500"), self.exchange.get_balance("COINALPHA")) + self.assertEqual(Decimal("10000"), self.exchange.available_balances[self.base_asset]) def test_user_stream_raises_cancel_exception(self): self.exchange._set_current_timestamp(1640780000) @@ -1573,31 +2035,3 @@ def test_user_stream_raises_cancel_exception(self): asyncio.CancelledError, self.async_run_with_timeout, self.exchange._user_stream_event_listener()) - - @patch("hummingbot.connector.exchange.bybit.bybit_exchange.BybitExchange._sleep") - def test_user_stream_logs_errors(self, _): - self.exchange._set_current_timestamp(1640780000) - - incomplete_event = { - "e": "outboundAccountInfo", - "E": "1629969654753", - "T": True, - "W": True, - "D": True, - } - - mock_queue = AsyncMock() - mock_queue.get.side_effect = [incomplete_event, asyncio.CancelledError] - self.exchange._user_stream_tracker._user_stream = mock_queue - - try: - self.async_run_with_timeout(self.exchange._user_stream_event_listener()) - except asyncio.CancelledError: - pass - - self.assertTrue( - self._is_logged( - "ERROR", - "Unexpected error in user stream listener loop." - ) - ) diff --git a/test/hummingbot/connector/exchange/bybit/test_bybit_web_utils.py b/test/hummingbot/connector/exchange/bybit/test_bybit_web_utils.py index f4759e2433..b412db31bc 100644 --- a/test/hummingbot/connector/exchange/bybit/test_bybit_web_utils.py +++ b/test/hummingbot/connector/exchange/bybit/test_bybit_web_utils.py @@ -6,6 +6,6 @@ class WebUtilsTests(TestCase): def test_rest_url(self): url = web_utils.rest_url(path_url=CONSTANTS.LAST_TRADED_PRICE_PATH, domain=CONSTANTS.DEFAULT_DOMAIN) - self.assertEqual('https://api.bybit.com/spot/quote/v1/ticker/price', url) + self.assertEqual('https://api.bybit.com/v5/market/tickers', url) url = web_utils.rest_url(path_url=CONSTANTS.LAST_TRADED_PRICE_PATH, domain='bybit_testnet') - self.assertEqual('https://api-testnet.bybit.com/spot/quote/v1/ticker/price', url) + self.assertEqual('https://api-testnet.bybit.com/v5/market/tickers', url) diff --git a/test/hummingbot/connector/exchange/coinbase_advanced_trade/test_coinbase_advanced_trade_exchange.py b/test/hummingbot/connector/exchange/coinbase_advanced_trade/test_coinbase_advanced_trade_exchange.py index 9d0c4752ba..6770017866 100644 --- a/test/hummingbot/connector/exchange/coinbase_advanced_trade/test_coinbase_advanced_trade_exchange.py +++ b/test/hummingbot/connector/exchange/coinbase_advanced_trade/test_coinbase_advanced_trade_exchange.py @@ -690,7 +690,7 @@ def test_create_buy_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} {self.trading_pair}." + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}." ) ) @@ -743,7 +743,7 @@ def test_create_sell_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000000')} {self.trading_pair}." + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}." ) ) diff --git a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_api_order_book_data_source.py b/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_api_order_book_data_source.py deleted file mode 100644 index 1ac4d86ef0..0000000000 --- a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_api_order_book_data_source.py +++ /dev/null @@ -1,480 +0,0 @@ -import asyncio -import json -import unittest -from decimal import Decimal -from typing import Awaitable, Dict, List, Optional -from unittest.mock import AsyncMock, patch - -from aioresponses import aioresponses - -from hummingbot.connector.exchange.coinbase_pro import coinbase_pro_constants as CONSTANTS -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_api_order_book_data_source import ( - CoinbaseProAPIOrderBookDataSource, -) -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_auth import CoinbaseProAuth -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_order_book_tracker_entry import ( - CoinbaseProOrderBookTrackerEntry, -) -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_utils import build_coinbase_pro_web_assistant_factory -from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.core.data_type.order_book import OrderBook - - -class CoinbaseProAPIOrderBookDataSourceTests(unittest.TestCase): - # logging.Level required to receive logs from the data source logger - level = 0 - - @classmethod - def setUpClass(cls) -> None: - super().setUpClass() - cls.ev_loop = asyncio.get_event_loop() - cls.base_asset = "COINALPHA" - cls.quote_asset = "HBOT" - cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" - - def setUp(self) -> None: - super().setUp() - self.mocking_assistant = NetworkMockingAssistant() - auth = CoinbaseProAuth(api_key="SomeAPIKey", secret_key="SomeSecretKey", passphrase="SomePassPhrase") - web_assistants_factory = build_coinbase_pro_web_assistant_factory(auth) - self.data_source = CoinbaseProAPIOrderBookDataSource( - trading_pairs=[self.trading_pair], web_assistants_factory=web_assistants_factory - ) - self.data_source.logger().setLevel(1) - self.data_source.logger().addHandler(self) - - self.log_records = [] - self.async_tasks: List[asyncio.Task] = [] - - def tearDown(self) -> None: - for task in self.async_tasks: - task.cancel() - super().tearDown() - - def handle(self, record): - self.log_records.append(record) - - def _is_logged(self, log_level: str, message: str) -> bool: - return any(record.levelname == log_level and record.getMessage() == message - for record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret - - @staticmethod - def get_products_ticker_response_mock(price: float) -> Dict: - products_ticker_mock = { - "trade_id": 86326522, - "price": str(price), - "size": "0.00698254", - "time": "2020-03-20T00:22:57.833897Z", - "bid": "6265.15", - "ask": "6267.71", - "volume": "53602.03940154" - } - return products_ticker_mock - - def get_products_response_mock(self, other_pair: str) -> List: - products_mock = [ - { - "id": self.trading_pair, - "base_currency": self.base_asset, - "quote_currency": self.quote_asset, - "base_min_size": "0.00100000", - "base_max_size": "280.00000000", - "quote_increment": "0.01000000", - "base_increment": "0.00000001", - "display_name": f"{self.base_asset}/{self.quote_asset}", - "min_market_funds": "10", - "max_market_funds": "1000000", - "margin_enabled": False, - "post_only": False, - "limit_only": False, - "cancel_only": False, - "status": "online", - "status_message": "", - "auction_mode": True, - }, - { - "id": other_pair, - "base_currency": other_pair.split("-")[0], - "quote_currency": other_pair.split("-")[1], - "base_min_size": "0.00100000", - "base_max_size": "280.00000000", - "quote_increment": "0.01000000", - "base_increment": "0.00000001", - "display_name": other_pair.replace("-", "/"), - "min_market_funds": "10", - "max_market_funds": "1000000", - "margin_enabled": False, - "post_only": False, - "limit_only": False, - "cancel_only": False, - "status": "online", - "status_message": "", - "auction_mode": True, - } - ] - return products_mock - - @staticmethod - def get_products_book_response_mock( - bids: Optional[List[List[str]]] = None, asks: Optional[List[List[str]]] = None - ) -> Dict: - bids = bids or [["1", "2", "3"]] - asks = asks or [["4", "5", "6"]] - products_book_mock = { - "sequence": 13051505638, - "bids": bids, - "asks": asks, - } - return products_book_mock - - def get_ws_open_message_mock(self) -> Dict: - message = { - "type": "open", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": self.trading_pair, - "sequence": 10, - "order_id": "d50ec984-77a8-460a-b958-66f114b0de9b", - "price": "200.2", - "remaining_size": "1.00", - "side": "sell" - } - return message - - def get_ws_match_message_mock(self) -> Dict: - message = { - "type": "match", - "trade_id": 10, - "sequence": 50, - "maker_order_id": "ac928c66-ca53-498f-9c13-a110027a60e8", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": self.trading_pair, - "size": "5.23512", - "price": "400.23", - "side": "sell" - } - return message - - def get_ws_change_message_mock(self) -> Dict: - message = { - "type": "change", - "time": "2014-11-07T08:19:27.028459Z", - "sequence": 80, - "order_id": "ac928c66-ca53-498f-9c13-a110027a60e8", - "product_id": self.trading_pair, - "new_size": "5.23512", - "old_size": "12.234412", - "price": "400.23", - "side": "sell" - } - return message - - def get_ws_done_message_mock(self) -> Dict: - message = { - "type": "done", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": self.trading_pair, - "sequence": 10, - "price": "200.2", - "order_id": "d50ec984-77a8-460a-b958-66f114b0de9b", - "reason": "filled", - "side": "sell", - "remaining_size": "0" - } - return message - - @aioresponses() - def test_get_last_traded_prices(self, mock_api): - alt_pair = "BTC-USDT" - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{self.trading_pair}/ticker" - alt_url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{alt_pair}/ticker" - price = 10.0 - alt_price = 15.0 - resp = self.get_products_ticker_response_mock(price=price) - alt_resp = self.get_products_ticker_response_mock(price=alt_price) - mock_api.get(url, body=json.dumps(resp)) - mock_api.get(alt_url, body=json.dumps(alt_resp)) - - trading_pairs = [self.trading_pair, alt_pair] - ret = self.async_run_with_timeout( - coroutine=CoinbaseProAPIOrderBookDataSource.get_last_traded_prices(trading_pairs) - ) - - self.assertEqual(ret[self.trading_pair], Decimal(resp["price"])) - self.assertEqual(ret[alt_pair], Decimal(alt_resp["price"])) - - # @aioresponses() - # def test_fetch_trading_pairs(self, mock_api): - # url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}" - # alt_pair = "BTC-USDT" - # resp = self.get_products_response_mock(alt_pair) - # mock_api.get(url, body=json.dumps(resp)) - # - # ret = self.async_run_with_timeout(coroutine=CoinbaseProAPIOrderBookDataSource.fetch_trading_pairs()) - # - # self.assertIn(self.trading_pair, ret) - # self.assertIn(alt_pair, ret) - - @aioresponses() - def test_get_snapshot(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{self.trading_pair}/book?level=3" - resp = self.get_products_book_response_mock() - mock_api.get(url, body=json.dumps(resp)) - - rest_assistant = self.ev_loop.run_until_complete( - build_coinbase_pro_web_assistant_factory().get_rest_assistant() - ) - ret = self.async_run_with_timeout( - coroutine=CoinbaseProAPIOrderBookDataSource.get_snapshot(rest_assistant, self.trading_pair) - ) - - self.assertEqual(resp, ret) # shallow comparison ok - - @aioresponses() - def test_get_snapshot_raises_on_status_code(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{self.trading_pair}/book?level=3" - resp = self.get_products_book_response_mock() - mock_api.get(url, body=json.dumps(resp), status=401) - - rest_assistant = self.ev_loop.run_until_complete( - build_coinbase_pro_web_assistant_factory().get_rest_assistant() - ) - with self.assertRaises(IOError): - self.async_run_with_timeout( - coroutine=CoinbaseProAPIOrderBookDataSource.get_snapshot(rest_assistant, self.trading_pair) - ) - - @aioresponses() - def test_get_new_order_book(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{self.trading_pair}/book?level=3" - resp = self.get_products_book_response_mock(bids=[["1", "2", "3"]], asks=[["4", "5", "6"]]) - mock_api.get(url, body=json.dumps(resp)) - - ret = self.async_run_with_timeout(self.data_source.get_new_order_book(self.trading_pair)) - - self.assertIsInstance(ret, OrderBook) - - bid_entries = list(ret.bid_entries()) - ask_entries = list(ret.ask_entries()) - - self.assertEqual(1, len(bid_entries)) - self.assertEqual(1, len(ask_entries)) - - bid_entry = bid_entries[0] - ask_entry = ask_entries[0] - - self.assertEqual(1, bid_entry.price) - self.assertEqual(4, ask_entry.price) - - @aioresponses() - def test_get_tracking_pairs(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{self.trading_pair}/book?level=3" - resp = self.get_products_book_response_mock(bids=[["1", "2", "3"]]) - mock_api.get(url, body=json.dumps(resp)) - - ret = self.async_run_with_timeout(self.data_source.get_tracking_pairs()) - - self.assertEqual(1, len(ret)) - - tracker_entry = ret[self.trading_pair] - - self.assertIsInstance(tracker_entry, CoinbaseProOrderBookTrackerEntry) - self.assertEqual(1, list(tracker_entry.order_book.bid_entries())[0].price) - - @aioresponses() - def test_get_tracking_pairs_logs_io_error(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{self.trading_pair}/book?level=3" - mock_api.get(url, exception=IOError) - - ret = self.async_run_with_timeout(self.data_source.get_tracking_pairs()) - - self.assertEqual(0, len(ret)) - self.assertTrue(self._is_logged( - log_level="NETWORK", message=f"Error getting snapshot for {self.trading_pair}.") - ) - - @aioresponses() - def test_get_tracking_pairs_logs_other_exceptions(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}/{self.trading_pair}/book?level=3" - mock_api.get(url, exception=RuntimeError) - - ret = self.async_run_with_timeout(self.data_source.get_tracking_pairs()) - - self.assertEqual(0, len(ret)) - self.assertTrue(self._is_logged( - log_level="ERROR", message=f"Error initializing order book for {self.trading_pair}. ") - ) - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_processes_open_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_open_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - ob_message = output_queue.get_nowait() - - self.assertEqual(resp, ob_message.content) # shallow comparison is ok - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_processes_match_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_match_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - ob_message = output_queue.get_nowait() - - self.assertEqual(resp, ob_message.content) # shallow comparison is ok - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_processes_change_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_change_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - ob_message = output_queue.get_nowait() - - self.assertEqual(resp, ob_message.content) # shallow comparison is ok - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_processes_done_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_done_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - ob_message = output_queue.get_nowait() - - self.assertEqual(resp, ob_message.content) # shallow comparison is ok - - @patch( - "hummingbot.connector.exchange.coinbase_pro" - ".coinbase_pro_api_order_book_data_source.CoinbaseProAPIOrderBookDataSource._sleep" - ) - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_raises_on_no_type(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = {} - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertTrue( - self._is_logged(log_level="NETWORK", message="Unexpected error with WebSocket connection.") - ) - self.assertTrue(output_queue.empty()) - - @patch( - "hummingbot.connector.exchange.coinbase_pro" - ".coinbase_pro_api_order_book_data_source.CoinbaseProAPIOrderBookDataSource._sleep" - ) - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_raises_on_error_msg(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = {"type": "error", "message": "some error"} - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertTrue( - self._is_logged(log_level="NETWORK", message="Unexpected error with WebSocket connection.") - ) - self.assertTrue(output_queue.empty()) - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_ignores_irrelevant_messages(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps({"type": "received"}) - ) - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps({"type": "activate"}) - ) - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps({"type": "subscriptions"}) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertTrue(output_queue.empty()) - - @patch( - "hummingbot.connector.exchange.coinbase_pro" - ".coinbase_pro_api_order_book_data_source.CoinbaseProAPIOrderBookDataSource._sleep" - ) - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_order_book_diffs_raises_on_unrecognized_message(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = {"type": "some-new-message-type"} - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_order_book_diffs(self.ev_loop, output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertTrue( - self._is_logged(log_level="NETWORK", message="Unexpected error with WebSocket connection.") - ) - self.assertTrue(output_queue.empty()) diff --git a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_api_user_stream_data_source.py b/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_api_user_stream_data_source.py deleted file mode 100644 index 07312e1e9f..0000000000 --- a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_api_user_stream_data_source.py +++ /dev/null @@ -1,258 +0,0 @@ -import asyncio -import json -import unittest -from typing import Awaitable, Dict, List -from unittest.mock import AsyncMock, patch - -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_api_user_stream_data_source import ( - CoinbaseProAPIUserStreamDataSource, -) -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_auth import CoinbaseProAuth -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_utils import build_coinbase_pro_web_assistant_factory -from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant - - -class TestCoinbaseProAPIUserStreamDataSource(unittest.TestCase): - # logging.Level required to receive logs from the data source logger - level = 0 - - @classmethod - def setUpClass(cls) -> None: - super().setUpClass() - cls.ev_loop = asyncio.get_event_loop() - cls.base_asset = "COINALPHA" - cls.quote_asset = "HBOT" - cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" - - def setUp(self) -> None: - super().setUp() - auth = CoinbaseProAuth(api_key="SomeAPIKey", secret_key="shht", passphrase="SomePassPhrase") - self.mocking_assistant = NetworkMockingAssistant() - web_assistants_factory = build_coinbase_pro_web_assistant_factory(auth) - self.data_source = CoinbaseProAPIUserStreamDataSource( - trading_pairs=[self.trading_pair], web_assistants_factory=web_assistants_factory - ) - self.data_source.logger().setLevel(1) - self.data_source.logger().addHandler(self) - - self.log_records = [] - self.async_tasks: List[asyncio.Task] = [] - - def tearDown(self) -> None: - for task in self.async_tasks: - task.cancel() - super().tearDown() - - def handle(self, record): - self.log_records.append(record) - - def _is_logged(self, log_level: str, message: str) -> bool: - return any(record.levelname == log_level and record.getMessage() == message - for record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret - - def get_ws_open_message_mock(self) -> Dict: - message = { - "type": "open", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": self.trading_pair, - "sequence": 10, - "order_id": "d50ec984-77a8-460a-b958-66f114b0de9b", - "price": "200.2", - "remaining_size": "1.00", - "side": "sell" - } - return message - - def get_ws_match_message_mock(self) -> Dict: - message = { - "type": "match", - "trade_id": 10, - "sequence": 50, - "maker_order_id": "ac928c66-ca53-498f-9c13-a110027a60e8", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": self.trading_pair, - "size": "5.23512", - "price": "400.23", - "side": "sell" - } - return message - - def get_ws_change_message_mock(self) -> Dict: - message = { - "type": "change", - "time": "2014-11-07T08:19:27.028459Z", - "sequence": 80, - "order_id": "ac928c66-ca53-498f-9c13-a110027a60e8", - "product_id": self.trading_pair, - "new_size": "5.23512", - "old_size": "12.234412", - "price": "400.23", - "side": "sell" - } - return message - - def get_ws_done_message_mock(self) -> Dict: - message = { - "type": "done", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": self.trading_pair, - "sequence": 10, - "price": "200.2", - "order_id": "d50ec984-77a8-460a-b958-66f114b0de9b", - "reason": "filled", - "side": "sell", - "remaining_size": "0" - } - return message - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_user_stream_processes_open_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_open_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_user_stream(output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - content = output_queue.get_nowait() - - self.assertEqual(resp, content) # shallow comparison is ok - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_user_stream_processes_match_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_match_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_user_stream(output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - content = output_queue.get_nowait() - - self.assertEqual(resp, content) # shallow comparison is ok - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_user_stream_processes_change_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_change_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_user_stream(output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - content = output_queue.get_nowait() - - self.assertEqual(resp, content) # shallow comparison is ok - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_user_stream_processes_done_message(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = self.get_ws_done_message_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_user_stream(output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertFalse(output_queue.empty()) - - content = output_queue.get_nowait() - - self.assertEqual(resp, content) # shallow comparison is ok - - @patch( - "hummingbot.connector.exchange.coinbase_pro" - ".coinbase_pro_api_user_stream_data_source.CoinbaseProAPIUserStreamDataSource._sleep" - ) - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_user_stream_raises_on_no_type(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = {} - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_user_stream(output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertTrue( - self._is_logged(log_level="NETWORK", message="Unexpected error with WebSocket connection.") - ) - self.assertTrue(output_queue.empty()) - - @patch( - "hummingbot.connector.exchange.coinbase_pro" - ".coinbase_pro_api_user_stream_data_source.CoinbaseProAPIUserStreamDataSource._sleep" - ) - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_user_stream_raises_on_error_message(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - resp = {"type": "error", "message": "some error"} - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps(resp) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_user_stream(output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertTrue( - self._is_logged(log_level="NETWORK", message="Unexpected error with WebSocket connection.") - ) - self.assertTrue(output_queue.empty()) - - @patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_user_stream_ignores_irrelevant_messages(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps({"type": "received"}) - ) - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps({"type": "activate"}) - ) - self.mocking_assistant.add_websocket_aiohttp_message( - ws_connect_mock.return_value, json.dumps({"type": "subscriptions"}) - ) - output_queue = asyncio.Queue() - - t = self.ev_loop.create_task(self.data_source.listen_for_user_stream(output_queue)) - self.async_tasks.append(t) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertTrue(output_queue.empty()) diff --git a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_exchange.py b/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_exchange.py deleted file mode 100644 index 7d58949c45..0000000000 --- a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_exchange.py +++ /dev/null @@ -1,427 +0,0 @@ -import asyncio -import json -import re -import unittest -from decimal import Decimal -from typing import Awaitable, Dict, List - -from aioresponses import aioresponses - -from hummingbot.client.config.client_config_map import ClientConfigMap -from hummingbot.client.config.config_helpers import ClientConfigAdapter -from hummingbot.connector.exchange.coinbase_pro import coinbase_pro_constants as CONSTANTS -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_exchange import CoinbaseProExchange -from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.connector.trading_rule import TradingRule -from hummingbot.core.data_type.common import OrderType -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import ( - BuyOrderCreatedEvent, - MarketEvent, - MarketOrderFailureEvent, - OrderCancelledEvent, - SellOrderCreatedEvent, -) -from hummingbot.core.network_iterator import NetworkStatus - - -class TestCoinbaseProExchange(unittest.TestCase): - # logging.Level required to receive logs from the exchange - level = 0 - - @classmethod - def setUpClass(cls) -> None: - super().setUpClass() - cls.ev_loop = asyncio.get_event_loop() - cls.base_asset = "COINALPHA" - cls.quote_asset = "HBOT" - cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" - cls.ex_trading_pair = f"{cls.base_asset}_{cls.quote_asset}" - cls.api_key = "someKey" - cls.api_secret = "shht" - cls.api_passphrase = "somePhrase" - - def setUp(self) -> None: - super().setUp() - self.log_records = [] - self.mocking_assistant = NetworkMockingAssistant() - self.async_tasks: List[asyncio.Task] = [] - self.client_config_map = ClientConfigAdapter(ClientConfigMap()) - - self.exchange = CoinbaseProExchange( - client_config_map=self.client_config_map, - coinbase_pro_api_key=self.api_key, - coinbase_pro_secret_key=self.api_secret, - coinbase_pro_passphrase=self.api_passphrase, - trading_pairs=[self.trading_pair] - ) - self.event_listener = EventLogger() - - self.exchange.logger().setLevel(1) - self.exchange.logger().addHandler(self) - - def tearDown(self) -> None: - for task in self.async_tasks: - task.cancel() - super().tearDown() - - def handle(self, record): - self.log_records.append(record) - - def _is_logged(self, log_level: str, message: str) -> bool: - return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret - - def simulate_trading_rules_initialization(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}" - alt_pair = "BTC-USDT" - resp = self.get_products_response_mock(alt_pair) - mock_api.get(url, body=json.dumps(resp)) - - self.async_run_with_timeout(self.exchange._update_trading_rules()) - - def simulate_execute_buy_order(self, mock_api, order_id): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - resp = self.get_orders_response_mock(order_id) - mock_api.post(regex_url, body=json.dumps(resp)) - - self.async_run_with_timeout( - self.exchange.execute_sell( - order_id=order_id, - trading_pair=self.trading_pair, - amount=Decimal("1"), - order_type=OrderType.LIMIT, - price=Decimal("2"), - ) - ) - - def get_account_mock( - self, base_balance: float, base_available: float, quote_balance: float, quote_available: float - ) -> List: - account_mock = [ - { - "id": "7fd0abc0-e5ad-4cbb-8d54-f2b3f43364da", - "currency": self.base_asset, - "balance": str(base_balance), - "available": str(base_available), - "hold": "0.0000000000000000", - "profile_id": "8058d771-2d88-4f0f-ab6e-299c153d4308", - "trading_enabled": True - }, - { - "id": "7fd0abc0-e5ad-4cbb-8d54-f2b3f43364da", - "currency": self.quote_asset, - "balance": str(quote_balance), - "available": str(quote_available), - "hold": "0.0000000000000000", - "profile_id": "8058d771-2d88-4f0f-ab6e-299c153d4308", - "trading_enabled": True - } - ] - return account_mock - - def get_products_response_mock(self, other_pair: str) -> List: - products_mock = [ - { - "id": self.trading_pair, - "base_currency": self.base_asset, - "quote_currency": self.quote_asset, - "quote_increment": "0.01000000", - "base_increment": "0.00000001", - "display_name": f"{self.base_asset}/{self.quote_asset}", - "min_market_funds": "10", - "margin_enabled": False, - "post_only": False, - "limit_only": False, - "cancel_only": False, - "status": "online", - "status_message": "", - "auction_mode": True, - }, - { - "id": other_pair, - "base_currency": other_pair.split("-")[0], - "quote_currency": other_pair.split("-")[1], - "quote_increment": "0.01000000", - "base_increment": "0.00000001", - "display_name": other_pair.replace("-", "/"), - "min_market_funds": "10", - "margin_enabled": False, - "post_only": False, - "limit_only": False, - "cancel_only": False, - "status": "online", - "status_message": "", - "auction_mode": True, - } - ] - return products_mock - - def get_orders_response_mock(self, order_id: str) -> Dict: - orders_mock = { - "id": order_id, - "price": "10.00000000", - "size": "1.00000000", - "product_id": self.trading_pair, - "profile_id": "8058d771-2d88-4f0f-ab6e-299c153d4308", - "side": "buy", - "type": "limit", - "time_in_force": "GTC", - "post_only": True, - "created_at": "2020-03-11T20:48:46.622052Z", - "fill_fees": "0.0000000000000000", - "filled_size": "0.00000000", - "executed_value": "0.0000000000000000", - "status": "open", - "settled": False - } - return orders_mock - - @aioresponses() - def test_check_network_not_connected(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.TIME_PATH_URL}" - resp = "" - mock_api.get(url, status=500, body=json.dumps(resp)) - - ret = self.async_run_with_timeout(coroutine=self.exchange.check_network()) - - self.assertEqual(ret, NetworkStatus.NOT_CONNECTED) - - @aioresponses() - def test_check_network(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.TIME_PATH_URL}" - resp = {} - mock_api.get(url, body=json.dumps(resp)) - - ret = self.async_run_with_timeout(coroutine=self.exchange.check_network()) - - self.assertEqual(ret, NetworkStatus.CONNECTED) - - @aioresponses() - def test_update_fee_percentage(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.FEES_PATH_URL}" - resp = { - "maker_fee_rate": "0.0050", - "taker_fee_rate": "0.0050", - "usd_volume": "43806.92" - } - mock_api.get(url, body=json.dumps(resp)) - - self.async_run_with_timeout(self.exchange._update_fee_percentage()) - - self.assertEqual(Decimal(resp["maker_fee_rate"]), self.exchange.maker_fee_percentage) - self.assertEqual(Decimal(resp["taker_fee_rate"]), self.exchange.taker_fee_percentage) - - @aioresponses() - def test_update_balances(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ACCOUNTS_PATH_URL}" - resp = self.get_account_mock( - base_balance=2, - base_available=1, - quote_balance=4, - quote_available=3, - ) - mock_api.get(url, body=json.dumps(resp)) - - self.async_run_with_timeout(self.exchange._update_balances()) - - expected_available_balances = {self.base_asset: Decimal("1"), self.quote_asset: Decimal("3")} - self.assertEqual(expected_available_balances, self.exchange.available_balances) - expected_balances = {self.base_asset: Decimal("2"), self.quote_asset: Decimal("4")} - self.assertEqual(expected_balances, self.exchange.get_all_balances()) - - @aioresponses() - def test_update_trading_rules(self, mock_api): - url = f"{CONSTANTS.REST_URL}{CONSTANTS.PRODUCTS_PATH_URL}" - alt_pair = "BTC-USDT" - resp = self.get_products_response_mock(alt_pair) - mock_api.get(url, body=json.dumps(resp)) - - self.async_run_with_timeout(self.exchange._update_trading_rules()) - - trading_rules = self.exchange.trading_rules - - self.assertEqual(2, len(trading_rules)) - self.assertIn(self.trading_pair, trading_rules) - self.assertIn(alt_pair, trading_rules) - self.assertIsInstance(trading_rules[self.trading_pair], TradingRule) - self.assertIsInstance(trading_rules[alt_pair], TradingRule) - - @aioresponses() - def test_execute_buy(self, mock_api): - self.simulate_trading_rules_initialization(mock_api) - - some_order_id = "someID" - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - resp = self.get_orders_response_mock(some_order_id) - mock_api.post(regex_url, body=json.dumps(resp)) - - self.exchange.add_listener(MarketEvent.BuyOrderCreated, self.event_listener) - self.exchange.add_listener(MarketEvent.OrderFilled, self.event_listener) - - self.async_run_with_timeout( - self.exchange.execute_buy( - order_id=some_order_id, - trading_pair=self.trading_pair, - amount=Decimal("1"), - order_type=OrderType.LIMIT, - price=Decimal("2"), - ) - ) - - self.assertEqual(1, len(self.event_listener.event_log)) - - event = self.event_listener.event_log[0] - - self.assertIsInstance(event, BuyOrderCreatedEvent) - self.assertEqual(some_order_id, event.order_id) - self.assertIn(some_order_id, self.exchange.in_flight_orders) - - @aioresponses() - def test_execute_buy_handles_errors(self, mock_api): - self.simulate_trading_rules_initialization(mock_api) - - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - mock_api.post(regex_url, exception=RuntimeError) - - self.exchange.add_listener(MarketEvent.BuyOrderCreated, self.event_listener) - self.exchange.add_listener(MarketEvent.OrderFailure, self.event_listener) - - some_order_id = "someID" - self.async_run_with_timeout( - self.exchange.execute_buy( - order_id=some_order_id, - trading_pair=self.trading_pair, - amount=Decimal("1"), - order_type=OrderType.LIMIT, - price=Decimal("2"), - ) - ) - - self.assertEqual(1, len(self.event_listener.event_log)) - - event = self.event_listener.event_log[0] - - self.assertIsInstance(event, MarketOrderFailureEvent) - self.assertEqual(some_order_id, event.order_id) - self.assertNotIn(some_order_id, self.exchange.in_flight_orders) - - @aioresponses() - def test_execute_sell(self, mock_api): - self.simulate_trading_rules_initialization(mock_api) - - some_order_id = "someID" - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - resp = self.get_orders_response_mock(some_order_id) - mock_api.post(regex_url, body=json.dumps(resp)) - - self.exchange.add_listener(MarketEvent.SellOrderCreated, self.event_listener) - self.exchange.add_listener(MarketEvent.OrderFilled, self.event_listener) - - self.async_run_with_timeout( - self.exchange.execute_sell( - order_id=some_order_id, - trading_pair=self.trading_pair, - amount=Decimal("1"), - order_type=OrderType.LIMIT, - price=Decimal("2"), - ) - ) - - self.assertEqual(1, len(self.event_listener.event_log)) - - event = self.event_listener.event_log[0] - - self.assertIsInstance(event, SellOrderCreatedEvent) - self.assertEqual(some_order_id, event.order_id) - self.assertIn(some_order_id, self.exchange.in_flight_orders) - - @aioresponses() - def test_execute_sell_handles_errors(self, mock_api): - self.simulate_trading_rules_initialization(mock_api) - - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - mock_api.post(regex_url, exception=RuntimeError) - - self.exchange.add_listener(MarketEvent.SellOrderCreated, self.event_listener) - self.exchange.add_listener(MarketEvent.OrderFailure, self.event_listener) - - some_order_id = "someID" - self.async_run_with_timeout( - self.exchange.execute_sell( - order_id=some_order_id, - trading_pair=self.trading_pair, - amount=Decimal("1"), - order_type=OrderType.LIMIT, - price=Decimal("2"), - ) - ) - - self.assertEqual(1, len(self.event_listener.event_log)) - - event = self.event_listener.event_log[0] - - self.assertIsInstance(event, MarketOrderFailureEvent) - self.assertEqual(some_order_id, event.order_id) - self.assertNotIn(some_order_id, self.exchange.in_flight_orders) - - @aioresponses() - def test_execute_cancel(self, mock_api): - self.simulate_trading_rules_initialization(mock_api) - some_order_id = "someID" - self.simulate_execute_buy_order(mock_api, some_order_id) - - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}/{some_order_id}" - resp = some_order_id - mock_api.delete(url, body=json.dumps(resp)) - - self.exchange.add_listener(MarketEvent.OrderCancelled, self.event_listener) - - self.async_run_with_timeout(self.exchange.execute_cancel(self.trading_pair, some_order_id)) - - self.assertEqual(1, len(self.event_listener.event_log)) - - event = self.event_listener.event_log[0] - - self.assertIsInstance(event, OrderCancelledEvent) - self.assertEqual(some_order_id, event.order_id) - self.assertNotIn(some_order_id, self.exchange.in_flight_orders) - - @aioresponses() - def test_execute_cancel_order_does_not_exist(self, mock_api): - self.simulate_trading_rules_initialization(mock_api) - some_order_id = "someID" - self.simulate_execute_buy_order(mock_api, some_order_id) - - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}/{some_order_id}" - mock_api.delete(url, exception=IOError("order not found")) - - self.exchange.add_listener(MarketEvent.OrderCancelled, self.event_listener) - - self.async_run_with_timeout(self.exchange.execute_cancel(self.trading_pair, some_order_id)) - - self.assertEqual(1, len(self.event_listener.event_log)) - - event = self.event_listener.event_log[0] - - self.assertIsInstance(event, OrderCancelledEvent) - self.assertEqual(some_order_id, event.order_id) - self.assertNotIn(some_order_id, self.exchange.in_flight_orders) - - @aioresponses() - def test_get_order(self, mock_api): - self.simulate_trading_rules_initialization(mock_api) - some_order_id = "someID" - self.simulate_execute_buy_order(mock_api, some_order_id) - - url = f"{CONSTANTS.REST_URL}{CONSTANTS.ORDERS_PATH_URL}/{some_order_id}" - resp = self.get_orders_response_mock(some_order_id) - mock_api.get(url, body=json.dumps(resp)) diff --git a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_in_flight_order.py b/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_in_flight_order.py deleted file mode 100644 index 133d58b0d1..0000000000 --- a/test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_in_flight_order.py +++ /dev/null @@ -1,194 +0,0 @@ -from decimal import Decimal -from unittest import TestCase - -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_in_flight_order import CoinbaseProInFlightOrder -from hummingbot.core.data_type.common import OrderType, TradeType - - -class CoinbaseProInFlightOrderTests(TestCase): - - def setUp(self): - super().setUp() - self.base_token = "BTC" - self.quote_token = "USDT" - self.trading_pair = f"{self.base_token}-{self.quote_token}" - - def test_update_with_partial_trade_event(self): - order = CoinbaseProInFlightOrder( - client_order_id="OID1", - exchange_order_id="EOID1", - trading_pair=self.trading_pair, - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(10000), - amount=Decimal(1), - creation_timestamp=1640001112.0 - ) - - trade_event_info = { - "type": "match", - "trade_id": 1, - "sequence": 50, - "maker_order_id": "EOID1", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": "BTC-USDT", - "size": "0.1", - "price": "10050.0", - "side": "buy", - "taker_user_id": "5844eceecf7e803e259d0365", - "user_id": "5844eceecf7e803e259d0365", - "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "taker_fee_rate": "0.005" - } - - update_result = order.update_with_trade_update(trade_event_info) - - self.assertTrue(update_result) - self.assertFalse(order.is_done) - self.assertEqual("open", order.last_state) - self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base) - expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(str(trade_event_info["price"])) - self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) - self.assertEqual(Decimal(trade_event_info["taker_fee_rate"]) * expected_executed_quote_amount, order.fee_paid) - self.assertEqual(order.quote_asset, order.fee_asset) - - def test_update_with_full_fill_trade_event(self): - order = CoinbaseProInFlightOrder( - client_order_id="OID1", - exchange_order_id="EOID1", - trading_pair=self.trading_pair, - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(10000), - amount=Decimal(1), - creation_timestamp=1640001112.0 - ) - - trade_event_info = { - "type": "match", - "trade_id": 1, - "sequence": 50, - "maker_order_id": "EOID1", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": "BTC-USDT", - "size": "0.1", - "price": "10050.0", - "side": "buy", - "taker_user_id": "5844eceecf7e803e259d0365", - "user_id": "5844eceecf7e803e259d0365", - "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "taker_fee_rate": "0.005" - } - - update_result = order.update_with_trade_update(trade_event_info) - - self.assertTrue(update_result) - self.assertFalse(order.is_done) - self.assertEqual("open", order.last_state) - self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base) - expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal( - str(trade_event_info["price"])) - self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) - expected_partial_event_fee = (Decimal(trade_event_info["taker_fee_rate"]) * - expected_executed_quote_amount) - self.assertEqual(expected_partial_event_fee, order.fee_paid) - - complete_event_info = { - "type": "match", - "trade_id": 2, - "sequence": 50, - "maker_order_id": "EOID1", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": "BTC-USDT", - "size": "0.9", - "price": "10050.0", - "side": "buy", - "taker_user_id": "5844eceecf7e803e259d0365", - "user_id": "5844eceecf7e803e259d0365", - "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "taker_fee_rate": "0.001" - } - - update_result = order.update_with_trade_update(complete_event_info) - - self.assertTrue(update_result) - # orders are marked as done with the done event - self.assertFalse(order.is_done) - self.assertEqual("open", order.last_state) - self.assertEqual(order.amount, order.executed_amount_base) - expected_executed_quote_amount += Decimal(str(complete_event_info["size"])) * Decimal( - str(complete_event_info["price"])) - self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) - expected_complete_event_fee = (Decimal(complete_event_info["taker_fee_rate"]) * - Decimal(str(complete_event_info["size"])) * - Decimal(str(complete_event_info["price"]))) - self.assertEqual(expected_partial_event_fee + expected_complete_event_fee, order.fee_paid) - - def test_update_with_repeated_trade_id_is_ignored(self): - order = CoinbaseProInFlightOrder( - client_order_id="OID1", - exchange_order_id="EOID1", - trading_pair=self.trading_pair, - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal(10000), - amount=Decimal(1), - creation_timestamp=1640001112.0 - ) - - trade_event_info = { - "type": "match", - "trade_id": 1, - "sequence": 50, - "maker_order_id": "EOID1", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": "BTC-USDT", - "size": "0.1", - "price": "10050.0", - "side": "buy", - "taker_user_id": "5844eceecf7e803e259d0365", - "user_id": "5844eceecf7e803e259d0365", - "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "taker_fee_rate": "0.005" - } - - update_result = order.update_with_trade_update(trade_event_info) - - self.assertTrue(update_result) - - complete_event_info = { - "type": "match", - "trade_id": 1, - "sequence": 50, - "maker_order_id": "EOID1", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": "BTC-USDT", - "size": "0.9", - "price": "10050.0", - "side": "buy", - "taker_user_id": "5844eceecf7e803e259d0365", - "user_id": "5844eceecf7e803e259d0365", - "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "taker_fee_rate": "0.001" - } - - update_result = order.update_with_trade_update(complete_event_info) - - self.assertFalse(update_result) - self.assertFalse(order.is_done) - self.assertEqual("open", order.last_state) - self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base) - expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal( - str(trade_event_info["price"])) - self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) - self.assertEqual(Decimal(trade_event_info["taker_fee_rate"]) * expected_executed_quote_amount, order.fee_paid) diff --git a/test/hummingbot/connector/exchange/coinbase_pro/test_coingbase_pro_exchange.py b/test/hummingbot/connector/exchange/coinbase_pro/test_coingbase_pro_exchange.py deleted file mode 100644 index 63c52bedba..0000000000 --- a/test/hummingbot/connector/exchange/coinbase_pro/test_coingbase_pro_exchange.py +++ /dev/null @@ -1,184 +0,0 @@ -import asyncio -import functools -from decimal import Decimal -from typing import Awaitable, Callable, Optional -from unittest import TestCase -from unittest.mock import AsyncMock - -from hummingbot.client.config.client_config_map import ClientConfigMap -from hummingbot.client.config.config_helpers import ClientConfigAdapter -from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_exchange import CoinbaseProExchange -from hummingbot.core.data_type.common import OrderType, TradeType -from hummingbot.core.event.event_logger import EventLogger -from hummingbot.core.event.events import MarketEvent, OrderFilledEvent - - -class BitfinexExchangeTests(TestCase): - # the level is required to receive logs from the data source logger - level = 0 - - @classmethod - def setUpClass(cls) -> None: - super().setUpClass() - cls.base_asset = "COINALPHA" - cls.quote_asset = "HBOT" - cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" - cls.symbol = f"{cls.base_asset}{cls.quote_asset}" - cls.listen_key = "TEST_LISTEN_KEY" - - def setUp(self) -> None: - super().setUp() - - self.log_records = [] - self.test_task: Optional[asyncio.Task] = None - self.resume_test_event = asyncio.Event() - self.client_config_map = ClientConfigAdapter(ClientConfigMap()) - - self.exchange = CoinbaseProExchange( - client_config_map=self.client_config_map, - coinbase_pro_api_key="testAPIKey", - coinbase_pro_secret_key="testSecret", - coinbase_pro_passphrase="testPassphrase", - trading_pairs=[self.trading_pair] - ) - - self.exchange.logger().setLevel(1) - self.exchange.logger().addHandler(self) - - self._initialize_event_loggers() - - def tearDown(self) -> None: - self.test_task and self.test_task.cancel() - super().tearDown() - - def _initialize_event_loggers(self): - self.buy_order_completed_logger = EventLogger() - self.sell_order_completed_logger = EventLogger() - self.order_filled_logger = EventLogger() - - events_and_loggers = [ - (MarketEvent.BuyOrderCompleted, self.buy_order_completed_logger), - (MarketEvent.SellOrderCompleted, self.sell_order_completed_logger), - (MarketEvent.OrderFilled, self.order_filled_logger)] - - for event, logger in events_and_loggers: - self.exchange.add_listener(event, logger) - - def handle(self, record): - self.log_records.append(record) - - def _is_logged(self, log_level: str, message: str) -> bool: - return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret - - def _return_calculation_and_set_done_event(self, calculation: Callable, *args, **kwargs): - if self.resume_test_event.is_set(): - raise asyncio.CancelledError - self.resume_test_event.set() - return calculation(*args, **kwargs) - - def test_order_fill_event_takes_fee_from_update_event(self): - self.exchange.start_tracking_order( - order_id="OID1", - trading_pair=self.trading_pair, - order_type=OrderType.LIMIT, - trade_type=TradeType.BUY, - price=Decimal("10000"), - amount=Decimal("1"), - ) - - order = self.exchange.in_flight_orders.get("OID1") - order.update_exchange_order_id("EOID1") - - partial_fill = { - "type": "match", - "trade_id": 1, - "sequence": 50, - "maker_order_id": "EOID1", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": "BTC-USDT", - "size": "0.1", - "price": "10050.0", - "side": "buy", - "taker_user_id": "5844eceecf7e803e259d0365", - "user_id": "5844eceecf7e803e259d0365", - "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "taker_fee_rate": "0.005" - } - - mock_user_stream = AsyncMock() - mock_user_stream.get.side_effect = functools.partial(self._return_calculation_and_set_done_event, - lambda: partial_fill) - - self.exchange.user_stream_tracker._user_stream = mock_user_stream - - self.test_task = asyncio.get_event_loop().create_task(self.exchange._user_stream_event_listener()) - self.async_run_with_timeout(self.resume_test_event.wait()) - - expected_executed_quote_amount = Decimal(str(partial_fill["size"])) * Decimal(str(partial_fill["price"])) - expected_partial_event_fee = (Decimal(partial_fill["taker_fee_rate"]) * - expected_executed_quote_amount) - - self.assertEqual(expected_partial_event_fee, order.fee_paid) - self.assertEqual(1, len(self.order_filled_logger.event_log)) - fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] - self.assertEqual(Decimal("0.005"), fill_event.trade_fee.percent) - self.assertEqual([], fill_event.trade_fee.flat_fees) - self.assertTrue(self._is_logged( - "INFO", - f"Filled {Decimal(partial_fill['size'])} out of {order.amount} of the " - f"{order.order_type_description} order {order.client_order_id}" - )) - - self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) - - complete_fill = { - "type": "match", - "trade_id": 2, - "sequence": 50, - "maker_order_id": "EOID1", - "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", - "time": "2014-11-07T08:19:27.028459Z", - "product_id": "BTC-USDT", - "size": "0.9", - "price": "10050.0", - "side": "buy", - "taker_user_id": "5844eceecf7e803e259d0365", - "user_id": "5844eceecf7e803e259d0365", - "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", - "taker_fee_rate": "0.001" - } - - self.resume_test_event = asyncio.Event() - mock_user_stream = AsyncMock() - mock_user_stream.get.side_effect = functools.partial(self._return_calculation_and_set_done_event, - lambda: complete_fill) - - self.exchange.user_stream_tracker._user_stream = mock_user_stream - - self.test_task = asyncio.get_event_loop().create_task(self.exchange._user_stream_event_listener()) - self.async_run_with_timeout(self.resume_test_event.wait()) - - expected_executed_quote_amount = Decimal(str(complete_fill["size"])) * Decimal(str(complete_fill["price"])) - expected_partial_event_fee += Decimal(complete_fill["taker_fee_rate"]) * expected_executed_quote_amount - - self.assertEqual(expected_partial_event_fee, order.fee_paid) - - self.assertEqual(2, len(self.order_filled_logger.event_log)) - fill_event: OrderFilledEvent = self.order_filled_logger.event_log[1] - self.assertEqual(Decimal("0.001"), fill_event.trade_fee.percent) - self.assertEqual([], fill_event.trade_fee.flat_fees) - - # The order should be marked as complete only when the "done" event arrives, not with the fill event - self.assertFalse(self._is_logged( - "INFO", - f"The market buy order {order.client_order_id} has completed according to Coinbase Pro user stream." - )) - - self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) diff --git a/test/hummingbot/connector/exchange/foxbit/test_foxbit_api_order_book_data_source.py b/test/hummingbot/connector/exchange/foxbit/test_foxbit_api_order_book_data_source.py index 30addcb3ec..5c913c5bb1 100644 --- a/test/hummingbot/connector/exchange/foxbit/test_foxbit_api_order_book_data_source.py +++ b/test/hummingbot/connector/exchange/foxbit/test_foxbit_api_order_book_data_source.py @@ -155,6 +155,7 @@ def _level_1_response(self): } ] + @patch("hummingbot.connector.exchange.foxbit.foxbit_api_order_book_data_source.FoxbitAPIOrderBookDataSource._ORDER_BOOK_INTERVAL", 0.0) @aioresponses() def test_get_new_order_book_successful(self, mock_api): url = web_utils.public_rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL.format(self.trading_pair), domain=self.domain) diff --git a/test/hummingbot/connector/exchange/foxbit/test_foxbit_exchange.py b/test/hummingbot/connector/exchange/foxbit/test_foxbit_exchange.py index d0650d328f..7fcafb53c8 100644 --- a/test/hummingbot/connector/exchange/foxbit/test_foxbit_exchange.py +++ b/test/hummingbot/connector/exchange/foxbit/test_foxbit_exchange.py @@ -922,7 +922,8 @@ def test_client_order_id_on_order(self): self.assertEqual(result[:12], expected_client_order_id[:12]) - def test_create_order(self): + @aioresponses() + def test_create_order(self, mock_api): self._simulate_trading_rules_initialized() _order = self.async_run_with_timeout(self.exchange._create_order(TradeType.BUY, '551100', diff --git a/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py b/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py index 507cb38bcf..4b1f03d310 100644 --- a/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py +++ b/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py @@ -1274,7 +1274,7 @@ def test_update_order_status_processes_trade_fill(self, mock_api): "INFO", f"The {order.trade_type.name.upper()} order {order.client_order_id} " f"amounting to {order.executed_amount_base}/{order.amount} " - f"{order.base_asset} has been filled." + f"{order.base_asset} has been filled at {Decimal('10000')} HBOT." ) ) @@ -1535,7 +1535,7 @@ def test_user_stream_update_for_order_partial_fill(self): self.assertTrue( self._is_logged("INFO", f"The {order.trade_type.name} order {order.client_order_id} amounting to " - f"0.5/{order.amount} {order.base_asset} has been filled.") + f"0.5/{order.amount} {order.base_asset} has been filled at {Decimal('10000.00000000')} HBOT.") ) def test_user_stream_update_for_order_fill(self): diff --git a/test/connector/exchange/kraken/__init__.py b/test/hummingbot/connector/exchange/hashkey/__init__.py similarity index 100% rename from test/connector/exchange/kraken/__init__.py rename to test/hummingbot/connector/exchange/hashkey/__init__.py diff --git a/test/hummingbot/connector/exchange/hashkey/test_hashkey_api_order_book_data_source.py b/test/hummingbot/connector/exchange/hashkey/test_hashkey_api_order_book_data_source.py new file mode 100644 index 0000000000..6d821cc948 --- /dev/null +++ b/test/hummingbot/connector/exchange/hashkey/test_hashkey_api_order_book_data_source.py @@ -0,0 +1,584 @@ +import asyncio +import json +import re +import unittest +from typing import Awaitable, Dict +from unittest.mock import AsyncMock, MagicMock, patch + +from aioresponses import aioresponses +from bidict import bidict + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.hashkey import hashkey_constants as CONSTANTS, hashkey_web_utils as web_utils +from hummingbot.connector.exchange.hashkey.hashkey_api_order_book_data_source import HashkeyAPIOrderBookDataSource +from hummingbot.connector.exchange.hashkey.hashkey_exchange import HashkeyExchange +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.data_type.order_book_message import OrderBookMessage + + +class TestHashkeyAPIOrderBookDataSource(unittest.TestCase): + # logging.Level required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "ETH" + cls.quote_asset = "USD" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.domain = CONSTANTS.DEFAULT_DOMAIN + + def setUp(self) -> None: + super().setUp() + self.log_records = [] + self.async_task = None + self.mocking_assistant = NetworkMockingAssistant() + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = HashkeyExchange( + client_config_map=client_config_map, + hashkey_api_key="", + hashkey_api_secret="", + trading_pairs=[self.trading_pair]) + + self.throttler = AsyncThrottler(CONSTANTS.RATE_LIMITS) + self.time_synchronnizer = TimeSynchronizer() + self.time_synchronnizer.add_time_offset_ms_sample(1000) + self.ob_data_source = HashkeyAPIOrderBookDataSource( + trading_pairs=[self.trading_pair], + throttler=self.throttler, + connector=self.connector, + api_factory=self.connector._web_assistants_factory, + time_synchronizer=self.time_synchronnizer) + + self._original_full_order_book_reset_time = self.ob_data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS + self.ob_data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = -1 + + self.ob_data_source.logger().setLevel(1) + self.ob_data_source.logger().addHandler(self) + + self.resume_test_event = asyncio.Event() + + self.connector._set_trading_pair_symbol_map(bidict({self.ex_trading_pair: self.trading_pair})) + + def tearDown(self) -> None: + self.async_task and self.async_task.cancel() + self.ob_data_source.FULL_ORDER_BOOK_RESET_DELTA_SECONDS = self._original_full_order_book_reset_time + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message + for record in self.log_records) + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def get_exchange_rules_mock(self) -> Dict: + exchange_rules = { + "symbol": "ETHUSD", + "symbolName": "ETHUSD", + "status": "TRADING", + "baseAsset": "ETH", + "baseAssetName": "ETH", + "baseAssetPrecision": "0.0001", + "quoteAsset": "USD", + "quoteAssetName": "USD", + "quotePrecision": "0.0000001", + "retailAllowed": True, + "piAllowed": True, + "corporateAllowed": True, + "omnibusAllowed": True, + "icebergAllowed": True, + "isAggregate": True, + "allowMargin": True, + "filters": [ + { + "minPrice": "0.01", + "maxPrice": "100000.00000000", + "tickSize": "0.01", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.005", + "maxQty": "53", + "stepSize": "0.0001", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "10", + "filterType": "MIN_NOTIONAL" + }, + { + "minAmount": "10", + "maxAmount": "10000000", + "minBuyPrice": "0", + "filterType": "TRADE_AMOUNT" + }, + { + "maxSellPrice": "0", + "buyPriceUpRate": "0.2", + "sellPriceDownRate": "0.2", + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.2", + "sellPriceDownRate": "0.2", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ] + } + return exchange_rules + + # ORDER BOOK SNAPSHOT + @staticmethod + def _snapshot_response() -> Dict: + snapshot = { + "t": 1703613017099, + "b": [ + [ + "2500", + "1" + ] + ], + "a": [ + [ + "25981.04", + "0.69773" + ], + [ + "25981.76", + "0.09316" + ], + ] + } + return snapshot + + @staticmethod + def _snapshot_response_processed() -> Dict: + snapshot_processed = { + "t": 1703613017099, + "b": [ + [ + "2500", + "1" + ] + ], + "a": [ + [ + "25981.04", + "0.69773" + ], + [ + "25981.76", + "0.09316" + ], + ] + } + return snapshot_processed + + @aioresponses() + def test_request_order_book_snapshot(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + snapshot_data = self._snapshot_response() + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + tradingrule_resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(tradingrule_resp)) + mock_api.get(regex_url, body=json.dumps(snapshot_data)) + + ret = self.async_run_with_timeout( + coroutine=self.ob_data_source._request_order_book_snapshot(self.trading_pair) + ) + + self.assertEqual(ret, self._snapshot_response_processed()) # shallow comparison ok + + @aioresponses() + def test_get_snapshot_raises(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + tradingrule_resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(tradingrule_resp)) + mock_api.get(regex_url, status=500) + + with self.assertRaises(IOError): + self.async_run_with_timeout( + coroutine=self.ob_data_source._order_book_snapshot(self.trading_pair) + ) + + @aioresponses() + def test_get_new_order_book(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + resp = self._snapshot_response() + mock_api.get(regex_url, body=json.dumps(resp)) + + ret = self.async_run_with_timeout(coroutine=self.ob_data_source.get_new_order_book(self.trading_pair)) + bid_entries = list(ret.bid_entries()) + ask_entries = list(ret.ask_entries()) + self.assertEqual(1, len(bid_entries)) + self.assertEqual(2500, bid_entries[0].price) + self.assertEqual(1, bid_entries[0].amount) + self.assertEqual(int(resp["t"]), bid_entries[0].update_id) + self.assertEqual(2, len(ask_entries)) + self.assertEqual(25981.04, ask_entries[0].price) + self.assertEqual(0.69773, ask_entries[0].amount) + self.assertEqual(25981.76, ask_entries[1].price) + self.assertEqual(0.09316, ask_entries[1].amount) + self.assertEqual(int(resp["t"]), ask_entries[0].update_id) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_subscriptions_subscribes_to_trades_and_depth(self, ws_connect_mock): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + result_subscribe_trades = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "trade", + "event": "sub", + "params": { + "binary": False, + "realtimeInterval": "24h", + }, + "f": True, + "sendTime": 1688198964293, + "shared": False, + "id": "1" + } + + result_subscribe_depth = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "depth", + "event": "sub", + "params": { + "binary": False, + }, + "f": True, + "sendTime": 1688198964293, + "shared": False, + "id": "1" + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_trades)) + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_depth)) + + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=ws_connect_mock.return_value) + + self.assertEqual(2, len(sent_subscription_messages)) + expected_trade_subscription = { + "topic": "trade", + "event": "sub", + "symbol": self.ex_trading_pair, + "params": { + "binary": False + } + } + self.assertEqual(expected_trade_subscription, sent_subscription_messages[0]) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch("hummingbot.connector.exchange.hashkey.hashkey_api_order_book_data_source.HashkeyAPIOrderBookDataSource._time") + def test_listen_for_subscriptions_sends_ping_message_before_ping_interval_finishes( + self, + time_mock, + ws_connect_mock): + + time_mock.side_effect = [1000, 1100, 1101, 1102] # Simulate first ping interval is already due + + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + result_subscribe_trades = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "trade", + "event": "sub", + "params": { + "binary": False, + "realtimeInterval": "24h", + }, + "id": "1" + } + + result_subscribe_depth = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "depth", + "event": "sub", + "params": { + "binary": False, + }, + "id": "1" + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_trades)) + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_depth)) + + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + sent_messages = self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=ws_connect_mock.return_value) + + expected_ping_message = { + "ping": int(1101 * 1e3) + } + self.assertEqual(expected_ping_message, sent_messages[-1]) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_subscriptions_raises_cancel_exception(self, _, ws_connect_mock): + ws_connect_mock.side_effect = asyncio.CancelledError + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + self.async_run_with_timeout(self.listening_task) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_subscriptions_logs_exception_details(self, sleep_mock, ws_connect_mock): + sleep_mock.side_effect = asyncio.CancelledError + ws_connect_mock.side_effect = Exception("TEST ERROR.") + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.ob_data_source.listen_for_subscriptions()) + self.async_run_with_timeout(self.listening_task) + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error occurred when listening to order book streams. Retrying in 5 seconds...")) + + def test_listen_for_trades_cancelled_when_listening(self): + mock_queue = MagicMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + self.async_run_with_timeout(self.listening_task) + + def test_listen_for_trades_logs_exception(self): + incomplete_resp = { + "symbol": self.trading_pair, + "symbolName": self.trading_pair, + "topic": "trade", + "event": "sub", + "params": { + "binary": False, + }, + "id": "1", + "data": [ + { + "v": "1447335405363150849", + "t": 1687271825415, + "p": "10001", + "q": "0.001", + "m": False, + }, + { + "v": "1447337171483901952", + "t": 1687272035953, + "p": "10001.1", + "q": "0.001", + "m": True + }, + ] + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [incomplete_resp, asyncio.CancelledError()] + self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + self.async_run_with_timeout(self.listening_task) + + def test_listen_for_trades_successful(self): + mock_queue = AsyncMock() + trade_event = { + "symbol": self.ex_trading_pair, + "symbolName": self.ex_trading_pair, + "topic": "trade", + "params": { + "realtimeInterval": "24h", + "binary": "false" + }, + "data": [ + { + "v": "929681067596857345", + "t": 1625562619577, + "p": "34924.15", + "q": "0.00027", + "m": True + } + ], + "f": True, + "sendTime": 1626249138535, + "shared": False + } + mock_queue.get.side_effect = [trade_event, asyncio.CancelledError()] + self.ob_data_source._message_queue[CONSTANTS.TRADE_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + try: + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_trades(self.ev_loop, msg_queue) + ) + except asyncio.CancelledError: + pass + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertTrue(trade_event["data"][0]["t"], msg.trade_id) + + def test_listen_for_order_book_snapshots_cancelled_when_fetching_snapshot(self): + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout( + self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + + @aioresponses() + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_order_book_snapshots_log_exception(self, mock_api, sleep_mock): + mock_queue = AsyncMock() + mock_queue.get.side_effect = ['ERROR', asyncio.CancelledError] + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + sleep_mock.side_effect = [asyncio.CancelledError] + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.get(regex_url, exception=Exception) + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout(self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue)) + + @aioresponses() + @patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep") + def test_listen_for_order_book_snapshots_successful_rest(self, mock_api, _): + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.TimeoutError + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + url = web_utils.rest_url(path_url=CONSTANTS.SNAPSHOT_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + snapshot_data = self._snapshot_response() + mock_api.get(regex_url, body=json.dumps(snapshot_data)) + + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(int(snapshot_data["t"]), msg.update_id) + + def test_listen_for_order_book_snapshots_successful_ws(self): + mock_queue = AsyncMock() + snapshot_event = { + "symbol": self.ex_trading_pair, + "symbolName": self.ex_trading_pair, + "topic": "depth", + "params": { + "realtimeInterval": "24h", + "binary": "false" + }, + "data": [{ + "e": 301, + "s": self.ex_trading_pair, + "t": 1565600357643, + "v": "112801745_18", + "b": [ + ["11371.49", "0.0014"], + ["11371.12", "0.2"], + ["11369.97", "0.3523"], + ["11369.96", "0.5"], + ["11369.95", "0.0934"], + ["11369.94", "1.6809"], + ["11369.6", "0.0047"], + ["11369.17", "0.3"], + ["11369.16", "0.2"], + ["11369.04", "1.3203"]], + "a": [ + ["11375.41", "0.0053"], + ["11375.42", "0.0043"], + ["11375.48", "0.0052"], + ["11375.58", "0.0541"], + ["11375.7", "0.0386"], + ["11375.71", "2"], + ["11377", "2.0691"], + ["11377.01", "0.0167"], + ["11377.12", "1.5"], + ["11377.61", "0.3"] + ], + "o": 0 + }], + "f": True, + "sendTime": 1626253839401, + "shared": False + } + mock_queue.get.side_effect = [snapshot_event, asyncio.CancelledError()] + self.ob_data_source._message_queue[CONSTANTS.SNAPSHOT_EVENT_TYPE] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + try: + self.listening_task = self.ev_loop.create_task( + self.ob_data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue) + ) + except asyncio.CancelledError: + pass + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get(), + timeout=6) + + self.assertTrue(snapshot_event["data"][0]["t"], msg.update_id) diff --git a/test/hummingbot/connector/exchange/hashkey/test_hashkey_api_user_stream_data_source.py b/test/hummingbot/connector/exchange/hashkey/test_hashkey_api_user_stream_data_source.py new file mode 100644 index 0000000000..f8e207e6eb --- /dev/null +++ b/test/hummingbot/connector/exchange/hashkey/test_hashkey_api_user_stream_data_source.py @@ -0,0 +1,346 @@ +import asyncio +import json +import re +import unittest +from typing import Any, Awaitable, Dict, Optional +from unittest.mock import AsyncMock, MagicMock, patch + +from aioresponses import aioresponses +from bidict import bidict + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.hashkey import hashkey_constants as CONSTANTS, hashkey_web_utils as web_utils +from hummingbot.connector.exchange.hashkey.hashkey_api_user_stream_data_source import HashkeyAPIUserStreamDataSource +from hummingbot.connector.exchange.hashkey.hashkey_auth import HashkeyAuth +from hummingbot.connector.exchange.hashkey.hashkey_exchange import HashkeyExchange +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.connector.time_synchronizer import TimeSynchronizer +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler + + +class HashkeyUserStreamDataSourceUnitTests(unittest.TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "ETH" + cls.quote_asset = "USD" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.domain = CONSTANTS.DEFAULT_DOMAIN + + cls.listen_key = "TEST_LISTEN_KEY" + + def setUp(self) -> None: + super().setUp() + self.log_records = [] + self.listening_task: Optional[asyncio.Task] = None + self.mocking_assistant = NetworkMockingAssistant() + + self.throttler = AsyncThrottler(rate_limits=CONSTANTS.RATE_LIMITS) + self.mock_time_provider = MagicMock() + self.mock_time_provider.time.return_value = 1000 + self.auth = HashkeyAuth(api_key="TEST_API_KEY", secret_key="TEST_SECRET", time_provider=self.mock_time_provider) + self.time_synchronizer = TimeSynchronizer() + self.time_synchronizer.add_time_offset_ms_sample(0) + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + self.connector = HashkeyExchange( + client_config_map=client_config_map, + hashkey_api_key="", + hashkey_api_secret="", + trading_pairs=[], + trading_required=False, + domain=self.domain) + self.connector._web_assistants_factory._auth = self.auth + + self.data_source = HashkeyAPIUserStreamDataSource( + auth=self.auth, + trading_pairs=[self.trading_pair], + connector=self.connector, + api_factory=self.connector._web_assistants_factory, + domain=self.domain + ) + + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + + self.resume_test_event = asyncio.Event() + + self.connector._set_trading_pair_symbol_map(bidict({self.ex_trading_pair: self.trading_pair})) + + def tearDown(self) -> None: + self.listening_task and self.listening_task.cancel() + super().tearDown() + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message + for record in self.log_records) + + def _raise_exception(self, exception_class): + raise exception_class + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception + + def _create_return_value_and_unlock_test_with_event(self, value): + self.resume_test_event.set() + return value + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def _error_response(self) -> Dict[str, Any]: + resp = { + "code": "ERROR CODE", + "msg": "ERROR MESSAGE" + } + + return resp + + def _successfully_subscribed_event(self): + resp = { + "result": None, + "id": 1 + } + return resp + + @aioresponses() + def test_get_listen_key_log_exception(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.post(regex_url, status=400, body=json.dumps(self._error_response())) + + with self.assertRaises(IOError): + self.async_run_with_timeout(self.data_source._get_listen_key()) + + @aioresponses() + def test_get_listen_key_successful(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + result: str = self.async_run_with_timeout(self.data_source._get_listen_key()) + + self.assertEqual(self.listen_key, result) + + @aioresponses() + def test_ping_listen_key_log_exception(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.put(regex_url, status=400, body=json.dumps(self._error_response())) + + self.data_source._current_listen_key = self.listen_key + result: bool = self.async_run_with_timeout(self.data_source._ping_listen_key()) + + self.assertTrue(self._is_logged("WARNING", f"Failed to refresh the listen key {self.listen_key}: " + f"{self._error_response()}")) + self.assertFalse(result) + + @aioresponses() + def test_ping_listen_key_successful(self, mock_api): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.put(regex_url, body=json.dumps({})) + + self.data_source._current_listen_key = self.listen_key + result: bool = self.async_run_with_timeout(self.data_source._ping_listen_key()) + self.assertTrue(result) + + @patch("hummingbot.connector.exchange.hashkey.hashkey_api_user_stream_data_source.HashkeyAPIUserStreamDataSource" + "._ping_listen_key", + new_callable=AsyncMock) + def test_manage_listen_key_task_loop_keep_alive_failed(self, mock_ping_listen_key): + mock_ping_listen_key.side_effect = (lambda *args, **kwargs: + self._create_return_value_and_unlock_test_with_event(False)) + + self.data_source._current_listen_key = self.listen_key + + # Simulate LISTEN_KEY_KEEP_ALIVE_INTERVAL reached + self.data_source._last_listen_key_ping_ts = 0 + + self.listening_task = self.ev_loop.create_task(self.data_source._manage_listen_key_task_loop()) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue(self._is_logged("ERROR", "Error occurred renewing listen key ...")) + self.assertIsNone(self.data_source._current_listen_key) + self.assertFalse(self.data_source._listen_key_initialized_event.is_set()) + + @patch("hummingbot.connector.exchange.hashkey.hashkey_api_user_stream_data_source.HashkeyAPIUserStreamDataSource." + "_ping_listen_key", + new_callable=AsyncMock) + def test_manage_listen_key_task_loop_keep_alive_successful(self, mock_ping_listen_key): + mock_ping_listen_key.side_effect = (lambda *args, **kwargs: + self._create_return_value_and_unlock_test_with_event(True)) + + # Simulate LISTEN_KEY_KEEP_ALIVE_INTERVAL reached + self.data_source._current_listen_key = self.listen_key + self.data_source._listen_key_initialized_event.set() + self.data_source._last_listen_key_ping_ts = 0 + + self.listening_task = self.ev_loop.create_task(self.data_source._manage_listen_key_task_loop()) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue(self._is_logged("INFO", f"Refreshed listen key {self.listen_key}.")) + self.assertGreater(self.data_source._last_listen_key_ping_ts, 0) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_does_not_queue_empty_payload(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, "") + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.assertEqual(0, msg_queue.qsize()) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_connection_failed(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + mock_ws.side_effect = lambda *arg, **kwars: self._create_exception_and_unlock_test_with_event( + Exception("TEST ERROR.")) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self._is_logged("ERROR", + "Unexpected error while listening to user stream. Retrying after 5 seconds...")) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_iter_message_throws_exception(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + msg_queue: asyncio.Queue = asyncio.Queue() + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + mock_ws.return_value.receive.side_effect = (lambda *args, **kwargs: + self._create_exception_and_unlock_test_with_event( + Exception("TEST ERROR"))) + mock_ws.close.return_value = None + + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error while listening to user stream. Retrying after 5 seconds...")) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_does_not_queue_pong_payload(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + mock_pong = { + "pong": "1545910590801" + } + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, json.dumps(mock_pong)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.assertEqual(1, msg_queue.qsize()) + + @aioresponses() + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_user_stream_does_not_queue_ticket_info(self, mock_api, mock_ws): + url = web_utils.rest_url(path_url=CONSTANTS.USER_STREAM_PATH_URL, domain=self.domain) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_response = { + "listenKey": self.listen_key + } + mock_api.post(regex_url, body=json.dumps(mock_response)) + + ticket_info = [ + { + "e": "ticketInfo", # Event type + "E": "1668693440976", # Event time + "s": "BTCUSDT", # Symbol + "q": "0.001639", # quantity + "t": "1668693440899", # time + "p": "61000.0", # price + "T": "899062000267837441", # ticketId + "o": "899048013515737344", # orderId + "c": "1621910874883", # clientOrderId + "O": "899062000118679808", # matchOrderId + "a": "10086", # accountId + "A": 0, # ignore + "m": True, # isMaker + "S": "BUY", # side SELL or BUY + } + ] + mock_ws.return_value = self.mocking_assistant.create_websocket_mock() + self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, json.dumps(ticket_info)) + + msg_queue = asyncio.Queue() + self.listening_task = self.ev_loop.create_task( + self.data_source.listen_for_user_stream(msg_queue) + ) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value) + + self.assertEqual(1, msg_queue.qsize()) diff --git a/test/hummingbot/connector/exchange/hashkey/test_hashkey_auth.py b/test/hummingbot/connector/exchange/hashkey/test_hashkey_auth.py new file mode 100644 index 0000000000..4ef759e590 --- /dev/null +++ b/test/hummingbot/connector/exchange/hashkey/test_hashkey_auth.py @@ -0,0 +1,111 @@ +import asyncio +import hashlib +import hmac +from collections import OrderedDict +from typing import Any, Awaitable, Dict, Mapping, Optional +from unittest import TestCase +from unittest.mock import MagicMock +from urllib.parse import urlencode + +from hummingbot.connector.exchange.hashkey.hashkey_auth import HashkeyAuth +from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest, WSJSONRequest + + +class HashkeyAuthTests(TestCase): + + def setUp(self) -> None: + super().setUp() + self.api_key = "testApiKey" + self.passphrase = "testPassphrase" + self.secret_key = "testSecretKey" + + self.mock_time_provider = MagicMock() + self.mock_time_provider.time.return_value = 1000 + + self.auth = HashkeyAuth( + api_key=self.api_key, + secret_key=self.secret_key, + time_provider=self.mock_time_provider, + ) + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): + ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def test_add_auth_params_to_get_request_without_params(self): + request = RESTRequest( + method=RESTMethod.GET, + url="https://test.url/api/endpoint", + is_auth_required=True, + throttler_limit_id="/api/endpoint" + ) + params_expected = self._params_expected(request.params) + + self.async_run_with_timeout(self.auth.rest_authenticate(request)) + + self.assertEqual(self.api_key, request.headers["X-HK-APIKEY"]) + self.assertEqual(params_expected['timestamp'], request.params["timestamp"]) + self.assertEqual(params_expected['signature'], request.params["signature"]) + + def test_add_auth_params_to_get_request_with_params(self): + params = { + "param_z": "value_param_z", + "param_a": "value_param_a" + } + request = RESTRequest( + method=RESTMethod.GET, + url="https://test.url/api/endpoint", + params=params, + is_auth_required=True, + throttler_limit_id="/api/endpoint" + ) + + params_expected = self._params_expected(request.params) + + self.async_run_with_timeout(self.auth.rest_authenticate(request)) + + self.assertEqual(self.api_key, request.headers["X-HK-APIKEY"]) + self.assertEqual(params_expected['timestamp'], request.params["timestamp"]) + self.assertEqual(params_expected['signature'], request.params["signature"]) + self.assertEqual(params_expected['param_z'], request.params["param_z"]) + self.assertEqual(params_expected['param_a'], request.params["param_a"]) + + def test_add_auth_params_to_post_request(self): + params = {"param_z": "value_param_z", "param_a": "value_param_a"} + request = RESTRequest( + method=RESTMethod.POST, + url="https://test.url/api/endpoint", + data=params, + is_auth_required=True, + throttler_limit_id="/api/endpoint" + ) + params_auth = self._params_expected(request.params) + params_request = self._params_expected(request.data) + + self.async_run_with_timeout(self.auth.rest_authenticate(request)) + self.assertEqual(self.api_key, request.headers["X-HK-APIKEY"]) + self.assertEqual(params_auth['timestamp'], request.params["timestamp"]) + self.assertEqual(params_auth['signature'], request.params["signature"]) + self.assertEqual(params_request['param_z'], request.data["param_z"]) + self.assertEqual(params_request['param_a'], request.data["param_a"]) + + def test_no_auth_added_to_wsrequest(self): + payload = {"param1": "value_param_1"} + request = WSJSONRequest(payload=payload, is_auth_required=True) + self.async_run_with_timeout(self.auth.ws_authenticate(request)) + self.assertEqual(payload, request.payload) + + def _generate_signature(self, params: Dict[str, Any]) -> str: + encoded_params_str = urlencode(params) + digest = hmac.new(self.secret_key.encode("utf8"), encoded_params_str.encode("utf8"), hashlib.sha256).hexdigest() + return digest + + def _params_expected(self, request_params: Optional[Mapping[str, str]]) -> Dict: + request_params = request_params if request_params else {} + params = { + 'timestamp': 1000000, + } + params.update(request_params) + params = OrderedDict(sorted(params.items(), key=lambda t: t[0])) + params['signature'] = self._generate_signature(params=params) + return params diff --git a/test/hummingbot/connector/exchange/hashkey/test_hashkey_exchange.py b/test/hummingbot/connector/exchange/hashkey/test_hashkey_exchange.py new file mode 100644 index 0000000000..a9e996864d --- /dev/null +++ b/test/hummingbot/connector/exchange/hashkey/test_hashkey_exchange.py @@ -0,0 +1,1664 @@ +import asyncio +import json +import re +import unittest +from decimal import Decimal +from typing import Awaitable, Dict, NamedTuple, Optional +from unittest.mock import AsyncMock, patch + +from aioresponses import aioresponses +from bidict import bidict + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.hashkey import hashkey_constants as CONSTANTS, hashkey_web_utils as web_utils +from hummingbot.connector.exchange.hashkey.hashkey_api_order_book_data_source import HashkeyAPIOrderBookDataSource +from hummingbot.connector.exchange.hashkey.hashkey_exchange import HashkeyExchange +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import get_new_client_order_id +from hummingbot.core.data_type.cancellation_result import CancellationResult +from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState +from hummingbot.core.data_type.trade_fee import TokenAmount +from hummingbot.core.event.event_logger import EventLogger +from hummingbot.core.event.events import ( + BuyOrderCompletedEvent, + BuyOrderCreatedEvent, + MarketEvent, + MarketOrderFailureEvent, + OrderCancelledEvent, + OrderFilledEvent, + SellOrderCreatedEvent, +) +from hummingbot.core.network_iterator import NetworkStatus + + +class TestHashkeyExchange(unittest.TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "ETH" + cls.quote_asset = "USD" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.api_key = "someKey" + cls.api_passphrase = "somePassPhrase" + cls.api_secret_key = "someSecretKey" + + def setUp(self) -> None: + super().setUp() + + self.log_records = [] + self.test_task: Optional[asyncio.Task] = None + self.client_config_map = ClientConfigAdapter(ClientConfigMap()) + + self.exchange = HashkeyExchange( + self.client_config_map, + self.api_key, + self.api_secret_key, + trading_pairs=[self.trading_pair] + ) + + self.exchange.logger().setLevel(1) + self.exchange.logger().addHandler(self) + self.exchange._time_synchronizer.add_time_offset_ms_sample(0) + self.exchange._time_synchronizer.logger().setLevel(1) + self.exchange._time_synchronizer.logger().addHandler(self) + self.exchange._order_tracker.logger().setLevel(1) + self.exchange._order_tracker.logger().addHandler(self) + + self._initialize_event_loggers() + + HashkeyAPIOrderBookDataSource._trading_pair_symbol_map = { + CONSTANTS.DEFAULT_DOMAIN: bidict( + {self.ex_trading_pair: self.trading_pair}) + } + + def tearDown(self) -> None: + self.test_task and self.test_task.cancel() + HashkeyAPIOrderBookDataSource._trading_pair_symbol_map = {} + super().tearDown() + + def _initialize_event_loggers(self): + self.buy_order_completed_logger = EventLogger() + self.buy_order_created_logger = EventLogger() + self.order_cancelled_logger = EventLogger() + self.order_failure_logger = EventLogger() + self.order_filled_logger = EventLogger() + self.sell_order_completed_logger = EventLogger() + self.sell_order_created_logger = EventLogger() + + events_and_loggers = [ + (MarketEvent.BuyOrderCompleted, self.buy_order_completed_logger), + (MarketEvent.BuyOrderCreated, self.buy_order_created_logger), + (MarketEvent.OrderCancelled, self.order_cancelled_logger), + (MarketEvent.OrderFailure, self.order_failure_logger), + (MarketEvent.OrderFilled, self.order_filled_logger), + (MarketEvent.SellOrderCompleted, self.sell_order_completed_logger), + (MarketEvent.SellOrderCreated, self.sell_order_created_logger)] + + for event, logger in events_and_loggers: + self.exchange.add_listener(event, logger) + + def handle(self, record): + self.log_records.append(record) + + def _is_logged(self, log_level: str, message: str) -> bool: + return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records) + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def get_exchange_rules_mock(self) -> Dict: + exchange_rules = { + "timezone": "UTC", + "serverTime": "1703696385826", + "brokerFilters": [], + "symbols": [ + { + "symbol": "ETHUSD", + "symbolName": "ETHUSD", + "status": "TRADING", + "baseAsset": "ETH", + "baseAssetName": "ETH", + "baseAssetPrecision": "0.0001", + "quoteAsset": "USD", + "quoteAssetName": "USD", + "quotePrecision": "0.0000001", + "retailAllowed": True, + "piAllowed": True, + "corporateAllowed": True, + "omnibusAllowed": True, + "icebergAllowed": False, + "isAggregate": False, + "allowMargin": False, + "filters": [ + { + "minPrice": "0.01", + "maxPrice": "100000.00000000", + "tickSize": "0.01", + "filterType": "PRICE_FILTER" + }, + { + "minQty": "0.005", + "maxQty": "53", + "stepSize": "0.0001", + "filterType": "LOT_SIZE" + }, + { + "minNotional": "10", + "filterType": "MIN_NOTIONAL" + }, + { + "minAmount": "10", + "maxAmount": "10000000", + "minBuyPrice": "0", + "filterType": "TRADE_AMOUNT" + }, + { + "maxSellPrice": "0", + "buyPriceUpRate": "0.2", + "sellPriceDownRate": "0.2", + "filterType": "LIMIT_TRADING" + }, + { + "buyPriceUpRate": "0.2", + "sellPriceDownRate": "0.2", + "filterType": "MARKET_TRADING" + }, + { + "noAllowMarketStartTime": "0", + "noAllowMarketEndTime": "0", + "limitOrderStartTime": "0", + "limitOrderEndTime": "0", + "limitMinPrice": "0", + "limitMaxPrice": "0", + "filterType": "OPEN_QUOTE" + } + ] + } + ], + "options": [], + "contracts": [], + "coins": [ + { + "orgId": "9001", + "coinId": "BTC", + "coinName": "BTC", + "coinFullName": "Bitcoin", + "allowWithdraw": True, + "allowDeposit": True, + "chainTypes": [ + { + "chainType": "Bitcoin", + "withdrawFee": "0", + "minWithdrawQuantity": "0.0005", + "maxWithdrawQuantity": "0", + "minDepositQuantity": "0.0001", + "allowDeposit": True, + "allowWithdraw": True + } + ] + }, + { + "orgId": "9001", + "coinId": "ETH", + "coinName": "ETH", + "coinFullName": "Ethereum", + "allowWithdraw": True, + "allowDeposit": True, + "chainTypes": [ + { + "chainType": "ERC20", + "withdrawFee": "0", + "minWithdrawQuantity": "0", + "maxWithdrawQuantity": "0", + "minDepositQuantity": "0.0075", + "allowDeposit": True, + "allowWithdraw": True + } + ] + }, + { + "orgId": "9001", + "coinId": "USD", + "coinName": "USD", + "coinFullName": "USD", + "allowWithdraw": True, + "allowDeposit": True, + "chainTypes": [] + } + ] + } + return exchange_rules + + def _simulate_trading_rules_initialized(self): + self.exchange._trading_rules = { + self.trading_pair: TradingRule( + trading_pair=self.trading_pair, + min_order_size=Decimal(str(0.01)), + min_price_increment=Decimal(str(0.0001)), + min_base_amount_increment=Decimal(str(0.000001)), + ) + } + + def _validate_auth_credentials_present(self, request_call_tuple: NamedTuple): + request_headers = request_call_tuple.kwargs["headers"] + request_params = request_call_tuple.kwargs["params"] + self.assertIn("Content-Type", request_headers) + self.assertIn("X-HK-APIKEY", request_headers) + self.assertEqual("application/x-www-form-urlencoded", request_headers["Content-Type"]) + self.assertIn("signature", request_params) + + def test_supported_order_types(self): + supported_types = self.exchange.supported_order_types() + self.assertIn(OrderType.MARKET, supported_types) + self.assertIn(OrderType.LIMIT, supported_types) + self.assertIn(OrderType.LIMIT_MAKER, supported_types) + + @aioresponses() + def test_check_network_success(self, mock_api): + url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL) + resp = { + "serverTime": 1703695619183 + } + mock_api.get(url, body=json.dumps(resp)) + + ret = self.async_run_with_timeout(coroutine=self.exchange.check_network()) + + self.assertEqual(NetworkStatus.CONNECTED, ret) + + @aioresponses() + def test_check_network_failure(self, mock_api): + url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL) + mock_api.get(url, status=500) + + ret = self.async_run_with_timeout(coroutine=self.exchange.check_network()) + + self.assertEqual(ret, NetworkStatus.NOT_CONNECTED) + + @aioresponses() + def test_check_network_raises_cancel_exception(self, mock_api): + url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL) + + mock_api.get(url, exception=asyncio.CancelledError) + + self.assertRaises(asyncio.CancelledError, self.async_run_with_timeout, self.exchange.check_network()) + + @aioresponses() + def test_update_trading_rules(self, mock_api): + self.exchange._set_current_timestamp(1000) + + url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + + resp = self.get_exchange_rules_mock() + mock_api.get(url, body=json.dumps(resp)) + mock_api.get(url, body=json.dumps(resp)) + + self.async_run_with_timeout(coroutine=self.exchange._update_trading_rules()) + + self.assertTrue(self.trading_pair in self.exchange._trading_rules) + + @aioresponses() + def test_update_trading_rules_ignores_rule_with_error(self, mock_api): + self.exchange._set_current_timestamp(1000) + + url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + exchange_rules = { + "timezone": "UTC", + "serverTime": "1703696385826", + "brokerFilters": [], + "symbols": [ + { + "symbol": "ETHUSD", + "symbolName": "ETHUSD", + "status": "TRADING", + "baseAsset": "ETH", + "baseAssetName": "ETH", + "baseAssetPrecision": "0.0001", + "quoteAsset": "USD", + "quoteAssetName": "USD", + "quotePrecision": "0.0000001", + "retailAllowed": True, + "piAllowed": True, + "corporateAllowed": True, + "omnibusAllowed": True, + "icebergAllowed": False, + "isAggregate": False, + "allowMargin": False, + "filters": [] + } + ], + "options": [], + "contracts": [], + } + mock_api.get(url, body=json.dumps(exchange_rules)) + + self.async_run_with_timeout(coroutine=self.exchange._update_trading_rules()) + + self.assertEqual(0, len(self.exchange._trading_rules)) + self.assertTrue( + self._is_logged("ERROR", f"Error parsing the trading pair rule {self.ex_trading_pair}. Skipping.") + ) + + def test_initial_status_dict(self): + HashkeyAPIOrderBookDataSource._trading_pair_symbol_map = {} + + status_dict = self.exchange.status_dict + + expected_initial_dict = { + "symbols_mapping_initialized": False, + "order_books_initialized": False, + "account_balance": False, + "trading_rule_initialized": False, + "user_stream_initialized": False, + } + + self.assertEqual(expected_initial_dict, status_dict) + self.assertFalse(self.exchange.ready) + + def test_get_fee_returns_fee_from_exchange_if_available_and_default_if_not(self): + fee = self.exchange.get_fee( + base_currency="SOME", + quote_currency="OTHER", + order_type=OrderType.LIMIT, + order_side=TradeType.BUY, + amount=Decimal("10"), + price=Decimal("20"), + ) + + self.assertEqual(Decimal("0.000"), fee.percent) # default fee + + @patch("hummingbot.connector.utils.get_tracking_nonce") + def test_client_order_id_on_order(self, mocked_nonce): + mocked_nonce.return_value = 9 + + result = self.exchange.buy( + trading_pair=self.trading_pair, + amount=Decimal("1"), + order_type=OrderType.LIMIT, + price=Decimal("2"), + ) + expected_client_order_id = get_new_client_order_id( + is_buy=True, trading_pair=self.trading_pair, + hbot_order_id_prefix=CONSTANTS.HBOT_ORDER_ID_PREFIX, + max_id_len=CONSTANTS.MAX_ORDER_ID_LEN + ) + + self.assertEqual(result, expected_client_order_id) + + result = self.exchange.sell( + trading_pair=self.trading_pair, + amount=Decimal("1"), + order_type=OrderType.LIMIT, + price=Decimal("2"), + ) + expected_client_order_id = get_new_client_order_id( + is_buy=False, trading_pair=self.trading_pair, + hbot_order_id_prefix=CONSTANTS.HBOT_ORDER_ID_PREFIX, + max_id_len=CONSTANTS.MAX_ORDER_ID_LEN + ) + + self.assertEqual(result, expected_client_order_id) + + def test_restore_tracking_states_only_registers_open_orders(self): + orders = [] + orders.append(InFlightOrder( + client_order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + )) + orders.append(InFlightOrder( + client_order_id="OID2", + exchange_order_id="EOID2", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + initial_state=OrderState.CANCELED + )) + orders.append(InFlightOrder( + client_order_id="OID3", + exchange_order_id="EOID3", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + initial_state=OrderState.FILLED + )) + orders.append(InFlightOrder( + client_order_id="OID4", + exchange_order_id="EOID4", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + initial_state=OrderState.FAILED + )) + + tracking_states = {order.client_order_id: order.to_json() for order in orders} + + self.exchange.restore_tracking_states(tracking_states) + + self.assertIn("OID1", self.exchange.in_flight_orders) + self.assertNotIn("OID2", self.exchange.in_flight_orders) + self.assertNotIn("OID3", self.exchange.in_flight_orders) + self.assertNotIn("OID4", self.exchange.in_flight_orders) + + @aioresponses() + def test_create_limit_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + creation_response = { + "accountId": "32423423423", + "symbol": "ETHUSD", + "symbolName": "ETHUSD", + "clientOrderId": "2343242342", + "orderId": "23423432423", + "transactTime": "1703708477519", + "price": "2222", + "origQty": "0.04", + "executedQty": "0.03999", + "status": "FILLED", + "timeInForce": "IOC", + "type": "LIMIT", + "side": "BUY", + "reqAmount": "0", + "concentration": "" + } + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(resp)) + mock_api.post(regex_url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.test_task = asyncio.get_event_loop().create_task( + self.exchange._create_order(trade_type=TradeType.BUY, + order_id="OID1", + trading_pair=self.trading_pair, + amount=Decimal("100"), + order_type=OrderType.LIMIT, + price=Decimal("10000"))) + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + request_params = order_request[1][0].kwargs["params"] + self.assertEqual(self.ex_trading_pair, request_params["symbol"]) + self.assertEqual("BUY", request_params["side"]) + self.assertEqual("LIMIT", request_params["type"]) + self.assertEqual(Decimal("100"), Decimal(request_params["quantity"])) + self.assertEqual(Decimal("10000"), Decimal(request_params["price"])) + self.assertEqual("OID1", request_params["newClientOrderId"]) + + self.assertIn("OID1", self.exchange.in_flight_orders) + create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual("OID1", create_event.order_id) + self.assertEqual(creation_response["orderId"], create_event.exchange_order_id) + + self.assertTrue( + self._is_logged( + "INFO", + f"Created LIMIT BUY order OID1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000.0000')}." + ) + ) + + @aioresponses() + def test_create_limit_maker_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + creation_response = { + "accountId": "32423423423", + "symbol": "ETHUSD", + "symbolName": "ETHUSD", + "clientOrderId": "2343242342", + "orderId": "23423432423", + "transactTime": "1703708477519", + "price": "2222", + "origQty": "0.04", + "executedQty": "0.03999", + "status": "FILLED", + "timeInForce": "IOC", + "type": "LIMIT_MAKER", + "side": "BUY", + "reqAmount": "0", + "concentration": "" + } + + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(resp)) + mock_api.post(regex_url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.test_task = asyncio.get_event_loop().create_task( + self.exchange._create_order(trade_type=TradeType.BUY, + order_id="OID1", + trading_pair=self.trading_pair, + amount=Decimal("100"), + order_type=OrderType.LIMIT_MAKER, + price=Decimal("10000"))) + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + request_data = order_request[1][0].kwargs["params"] + self.assertEqual(self.ex_trading_pair, request_data["symbol"]) + self.assertEqual(TradeType.BUY.name, request_data["side"]) + self.assertEqual("LIMIT_MAKER", request_data["type"]) + self.assertEqual(Decimal("100"), Decimal(request_data["quantity"])) + self.assertEqual(Decimal("10000"), Decimal(request_data["price"])) + self.assertEqual("OID1", request_data["newClientOrderId"]) + + self.assertIn("OID1", self.exchange.in_flight_orders) + create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.LIMIT_MAKER, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual("OID1", create_event.order_id) + self.assertEqual(creation_response["orderId"], create_event.exchange_order_id) + + self.assertTrue( + self._is_logged( + "INFO", + f"Created LIMIT_MAKER BUY order OID1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000.0000')}." + ) + ) + + @aioresponses() + @patch("hummingbot.connector.exchange.hashkey.hashkey_exchange.HashkeyExchange.get_price") + def test_create_market_order_successfully(self, mock_api, get_price_mock): + get_price_mock.return_value = Decimal(1000) + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + url = web_utils.rest_url(CONSTANTS.MARKET_ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + creation_response = { + "accountId": "32423423423", + "symbol": "ETHUSD", + "symbolName": "ETHUSD", + "clientOrderId": "2343242342", + "orderId": "23423432423", + "transactTime": "1703708477519", + "price": "0", + "origQty": "0.04", + "executedQty": "0.03999", + "status": "FILLED", + "timeInForce": "IOC", + "type": "MARKET", + "side": "BUY", + "reqAmount": "0", + "concentration": "" + } + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(resp)) + mock_api.post(regex_url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.test_task = asyncio.get_event_loop().create_task( + self.exchange._create_order(trade_type=TradeType.SELL, + order_id="OID1", + trading_pair=self.trading_pair, + amount=Decimal("100"), + order_type=OrderType.MARKET)) + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + request_data = order_request[1][0].kwargs["params"] + self.assertEqual(self.ex_trading_pair, request_data["symbol"]) + self.assertEqual(TradeType.SELL.name, request_data["side"]) + self.assertEqual("MARKET", request_data["type"]) + self.assertEqual(Decimal("100"), Decimal(request_data["quantity"])) + self.assertEqual("OID1", request_data["newClientOrderId"]) + self.assertNotIn("price", request_data) + + self.assertIn("OID1", self.exchange.in_flight_orders) + create_event: SellOrderCreatedEvent = self.sell_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.MARKET, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual("OID1", create_event.order_id) + self.assertEqual(creation_response["orderId"], create_event.exchange_order_id) + + self.assertTrue( + self._is_logged( + "INFO", + f"Created MARKET SELL order OID1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {None}." + ) + ) + + @aioresponses() + def test_create_order_fails_and_raises_failure_event(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(resp)) + mock_api.post(regex_url, + status=400, + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.test_task = asyncio.get_event_loop().create_task( + self.exchange._create_order(trade_type=TradeType.BUY, + order_id="OID1", + trading_pair=self.trading_pair, + amount=Decimal("100"), + order_type=OrderType.LIMIT, + price=Decimal("10000"))) + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + + self.assertNotIn("OID1", self.exchange.in_flight_orders) + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual("OID1", failure_event.order_id) + + self.assertTrue( + self._is_logged( + "INFO", + f"Order OID1 has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='OID1', exchange_order_id=None, misc_updates=None)" + ) + ) + + @aioresponses() + def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + tradingrule_url = web_utils.rest_url(CONSTANTS.EXCHANGE_INFO_PATH_URL) + resp = self.get_exchange_rules_mock() + mock_api.get(tradingrule_url, body=json.dumps(resp)) + mock_api.post(regex_url, + status=400, + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.test_task = asyncio.get_event_loop().create_task( + self.exchange._create_order(trade_type=TradeType.BUY, + order_id="OID1", + trading_pair=self.trading_pair, + amount=Decimal("0.0001"), + order_type=OrderType.LIMIT, + price=Decimal("0.0001"))) + # The second order is used only to have the event triggered and avoid using timeouts for tests + asyncio.get_event_loop().create_task( + self.exchange._create_order(trade_type=TradeType.BUY, + order_id="OID2", + trading_pair=self.trading_pair, + amount=Decimal("100"), + order_type=OrderType.LIMIT, + price=Decimal("10000"))) + + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertNotIn("OID1", self.exchange.in_flight_orders) + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual("OID1", failure_event.order_id) + + self.assertTrue( + self._is_logged( + "WARNING", + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." + ) + ) + self.assertTrue( + self._is_logged( + "INFO", + f"Order OID1 has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + "client_order_id='OID1', exchange_order_id=None, misc_updates=None)" + ) + ) + + @aioresponses() + def test_cancel_order_successfully(self, mock_api): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="4", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.assertIn("OID1", self.exchange.in_flight_orders) + order = self.exchange.in_flight_orders["OID1"] + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + response = { + "accountId": "10086", + "symbol": self.ex_trading_pair, + "clientOrderId": "1703710745976", + "orderId": order.exchange_order_id, + "transactTime": "1703710747523", + "price": float(order.price), + "origQty": float(order.amount), + "executedQty": "0", + "status": "CANCELED", + "timeInForce": "GTC", + "type": "LIMIT", + "side": "BUY" + } + + mock_api.delete(regex_url, + body=json.dumps(response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.exchange.cancel(client_order_id="OID1", trading_pair=self.trading_pair) + self.async_run_with_timeout(request_sent_event.wait()) + + cancel_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(cancel_request[1][0]) + + cancel_event: OrderCancelledEvent = self.order_cancelled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) + self.assertEqual(order.client_order_id, cancel_event.order_id) + + self.assertTrue( + self._is_logged( + "INFO", + f"Successfully canceled order {order.client_order_id}." + ) + ) + + @aioresponses() + def test_cancel_order_raises_failure_event_when_request_fails(self, mock_api): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="4", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.assertIn("OID1", self.exchange.in_flight_orders) + order = self.exchange.in_flight_orders["OID1"] + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.delete(regex_url, + status=400, + callback=lambda *args, **kwargs: request_sent_event.set()) + + self.exchange.cancel(client_order_id="OID1", trading_pair=self.trading_pair) + self.async_run_with_timeout(request_sent_event.wait()) + + cancel_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(cancel_request[1][0]) + + self.assertEquals(0, len(self.order_cancelled_logger.event_log)) + + self.assertTrue( + self._is_logged( + "ERROR", + f"Failed to cancel order {order.client_order_id}" + ) + ) + + @aioresponses() + def test_cancel_two_orders_with_cancel_all_and_one_fails(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="4", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.assertIn("OID1", self.exchange.in_flight_orders) + order1 = self.exchange.in_flight_orders["OID1"] + + self.exchange.start_tracking_order( + order_id="OID2", + exchange_order_id="5", + trading_pair=self.trading_pair, + trade_type=TradeType.SELL, + price=Decimal("11000"), + amount=Decimal("90"), + order_type=OrderType.LIMIT, + ) + + self.assertIn("OID2", self.exchange.in_flight_orders) + order2 = self.exchange.in_flight_orders["OID2"] + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + response = { + "accountId": "10086", + "symbol": self.ex_trading_pair, + "clientOrderId": order1.client_order_id, + "orderId": order1.exchange_order_id, + "transactTime": "1620811601728", + "price": float(order1.price), + "origQty": float(order1.amount), + "executedQty": "0", + "status": "CANCELED", + "timeInForce": "GTC", + "type": "LIMIT", + "side": "BUY" + } + + mock_api.delete(regex_url, body=json.dumps(response)) + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.delete(regex_url, status=400) + + cancellation_results = self.async_run_with_timeout(self.exchange.cancel_all(10)) + + self.assertEqual(2, len(cancellation_results)) + self.assertEqual(CancellationResult(order1.client_order_id, True), cancellation_results[0]) + self.assertEqual(CancellationResult(order2.client_order_id, False), cancellation_results[1]) + + self.assertEqual(1, len(self.order_cancelled_logger.event_log)) + cancel_event: OrderCancelledEvent = self.order_cancelled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) + self.assertEqual(order1.client_order_id, cancel_event.order_id) + + self.assertTrue( + self._is_logged( + "INFO", + f"Successfully canceled order {order1.client_order_id}." + ) + ) + + @aioresponses() + @patch("hummingbot.connector.time_synchronizer.TimeSynchronizer._current_seconds_counter") + def test_update_time_synchronizer_successfully(self, mock_api, seconds_counter_mock): + seconds_counter_mock.side_effect = [0, 0, 0] + + self.exchange._time_synchronizer.clear_time_offset_ms_samples() + url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + response = { + "serverTime": 1703740249709 + } + + mock_api.get(regex_url, body=json.dumps(response)) + + self.async_run_with_timeout(self.exchange._update_time_synchronizer()) + self.assertEqual(response['serverTime'] * 1e-3, self.exchange._time_synchronizer.time()) + + @aioresponses() + def test_update_time_synchronizer_failure_is_logged(self, mock_api): + url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + response = { + "code": "-1", + "msg": "error" + } + + mock_api.get(regex_url, body=json.dumps(response)) + + self.async_run_with_timeout(self.exchange._update_time_synchronizer()) + + self.assertTrue(self._is_logged("NETWORK", "Error getting server time.")) + + @aioresponses() + def test_update_time_synchronizer_raises_cancelled_error(self, mock_api): + url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, exception=asyncio.CancelledError) + + self.assertRaises( + asyncio.CancelledError, + self.async_run_with_timeout, self.exchange._update_time_synchronizer()) + + @aioresponses() + def test_update_balances(self, mock_api): + url = web_utils.rest_url(CONSTANTS.ACCOUNTS_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + response = { + "balances": [ + { + "asset": "HKD", + "assetId": "HKD", + "assetName": "HKD", + "total": "2", + "free": "2", + "locked": "0" + }, + { + "asset": "USD", + "assetId": "USD", + "assetName": "USD", + "total": "3505", + "free": "3505", + "locked": "0" + } + ], + "userId": "10086" + } + + mock_api.get(regex_url, body=json.dumps(response)) + self.async_run_with_timeout(self.exchange._update_balances()) + + available_balances = self.exchange.available_balances + total_balances = self.exchange.get_all_balances() + + self.assertEqual(Decimal("2"), available_balances["HKD"]) + self.assertEqual(Decimal("3505"), available_balances["USD"]) + + response = response = { + "balances": [ + { + "asset": "HKD", + "assetId": "HKD", + "assetName": "HKD", + "total": "2", + "free": "1", + "locked": "0" + }, + { + "asset": "USD", + "assetId": "USD", + "assetName": "USD", + "total": "3505", + "free": "3000", + "locked": "0" + } + ], + "userId": "10086" + } + + mock_api.get(regex_url, body=json.dumps(response)) + self.async_run_with_timeout(self.exchange._update_balances()) + + available_balances = self.exchange.available_balances + total_balances = self.exchange.get_all_balances() + + self.assertNotIn("USDT", available_balances) + self.assertNotIn("USDT", total_balances) + self.assertEqual(Decimal("3000"), available_balances["USD"]) + self.assertEqual(Decimal("3505"), total_balances["USD"]) + + @aioresponses() + def test_update_order_status_when_filled(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + self.exchange._last_poll_timestamp = (self.exchange.current_timestamp - + 10 - 1) + + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order: InFlightOrder = self.exchange.in_flight_orders["OID1"] + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + order_status = { + "accountId": "10086", + "exchangeId": "301", + "symbol": self.ex_trading_pair, + "symbolName": self.ex_trading_pair, + "clientOrderId": order.client_order_id, + "orderId": order.exchange_order_id, + "price": "50", + "origQty": "1", + "executedQty": "0", + "cummulativeQuoteQty": "0", + "cumulativeQuoteQty": "0", + "avgPrice": "0", + "status": "FILLED", + "timeInForce": "GTC", + "type": "LIMIT", + "side": order.trade_type.name, + "stopPrice": "0.0", + "icebergQty": "0.0", + "time": "1703710747523", + "updateTime": "1703710888400", + "isWorking": True, + "reqAmount": "0" + } + + mock_api.get(regex_url, body=json.dumps(order_status)) + + # Simulate the order has been filled with a TradeUpdate + order.completely_filled_event.set() + self.async_run_with_timeout(self.exchange._update_order_status()) + self.async_run_with_timeout(order.wait_until_completely_filled()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + + self.assertTrue(order.is_filled) + self.assertTrue(order.is_done) + + buy_event: BuyOrderCompletedEvent = self.buy_order_completed_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, buy_event.timestamp) + self.assertEqual(order.client_order_id, buy_event.order_id) + self.assertEqual(order.base_asset, buy_event.base_asset) + self.assertEqual(order.quote_asset, buy_event.quote_asset) + self.assertEqual(Decimal(0), buy_event.base_asset_amount) + self.assertEqual(Decimal(0), buy_event.quote_asset_amount) + self.assertEqual(order.order_type, buy_event.order_type) + self.assertEqual(order.exchange_order_id, buy_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue( + self._is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) + ) + + @aioresponses() + def test_update_order_status_when_cancelled(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + self.exchange._last_poll_timestamp = (self.exchange.current_timestamp - + 10 - 1) + + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders["OID1"] + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + order_status = { + "accountId": "10086", + "exchangeId": "301", + "symbol": self.ex_trading_pair, + "symbolName": self.ex_trading_pair, + "clientOrderId": order.client_order_id, + "orderId": order.exchange_order_id, + "price": "50", + "origQty": "1", + "executedQty": "0", + "cummulativeQuoteQty": "0", + "cumulativeQuoteQty": "0", + "avgPrice": "0", + "status": "CANCELED", + "timeInForce": "GTC", + "type": "LIMIT", + "side": order.trade_type.name, + "stopPrice": "0.0", + "icebergQty": "0.0", + "time": "1703710747523", + "updateTime": "1703710888400", + "isWorking": True, + "reqAmount": "0" + } + + mock_api.get(regex_url, body=json.dumps(order_status)) + + self.async_run_with_timeout(self.exchange._update_order_status()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + + cancel_event: OrderCancelledEvent = self.order_cancelled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) + self.assertEqual(order.client_order_id, cancel_event.order_id) + self.assertEqual(order.exchange_order_id, cancel_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue( + self._is_logged("INFO", f"Successfully canceled order {order.client_order_id}.") + ) + + @aioresponses() + def test_update_order_status_when_order_has_not_changed(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + self.exchange._last_poll_timestamp = (self.exchange.current_timestamp - + 10 - 1) + + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order: InFlightOrder = self.exchange.in_flight_orders["OID1"] + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + order_status = { + "accountId": "10086", + "exchangeId": "301", + "symbol": self.ex_trading_pair, + "symbolName": self.ex_trading_pair, + "clientOrderId": order.client_order_id, + "orderId": order.exchange_order_id, + "price": "50", + "origQty": "1", + "executedQty": "0", + "cummulativeQuoteQty": "0", + "cumulativeQuoteQty": "0", + "avgPrice": "0", + "status": "NEW", + "timeInForce": "GTC", + "type": "LIMIT", + "side": order.trade_type.name, + "stopPrice": "0.0", + "icebergQty": "0.0", + "time": "1703710747523", + "updateTime": "1703710888400", + "isWorking": True, + "reqAmount": "0" + } + + mock_response = order_status + mock_api.get(regex_url, body=json.dumps(mock_response)) + + self.assertTrue(order.is_open) + + self.async_run_with_timeout(self.exchange._update_order_status()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + + self.assertTrue(order.is_open) + self.assertFalse(order.is_filled) + self.assertFalse(order.is_done) + + @aioresponses() + def test_update_order_status_when_request_fails_marks_order_as_not_found(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + self.exchange._last_poll_timestamp = (self.exchange.current_timestamp - + 10 - 1) + + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order: InFlightOrder = self.exchange.in_flight_orders["OID1"] + + url = web_utils.rest_url(CONSTANTS.ORDER_PATH_URL) + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url, status=404) + + self.async_run_with_timeout(self.exchange._update_order_status()) + + order_request = next(((key, value) for key, value in mock_api.requests.items() + if key[1].human_repr().startswith(url))) + self._validate_auth_credentials_present(order_request[1][0]) + + self.assertTrue(order.is_open) + self.assertFalse(order.is_filled) + self.assertFalse(order.is_done) + + self.assertEqual(1, self.exchange._order_tracker._order_not_found_records[order.client_order_id]) + + def test_user_stream_update_for_new_order_does_not_update_status(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders["OID1"] + + event_message = { + "e": "executionReport", # Event type + "E": 1499405658658, # Event time + "s": order.trading_pair, # Symbol + "c": order.client_order_id, # Client order ID + "S": order.trade_type.name, # Side + "o": "LIMIT", # Order type + "f": "GTC", # Time in force + "q": "1.00000000", # Order quantity + "p": "0.10264410", # Order price + "reqAmt": "1000", # Requested cash amount (To be released) + "X": "NEW", # Current order status + "d": "1234567890123456789", # Execution ID + "i": order.exchange_order_id, # Order ID + "l": "0.00000000", # Last executed quantity + "r": "0", # unfilled quantity + "z": "0.00000000", # Cumulative filled quantity + "L": "0.00000000", # Last executed price + "V": "26105.5", # average executed price + "n": "0", # Commission amount + "N": None, # Commission asset + "u": True, # Is the trade normal, ignore for now + "w": True, # Is the order working? Stops will have + "m": False, # Is this trade the maker side? + "O": 1499405658657, # Order creation time + "Z": "0.00000000" # Cumulative quote asset transacted quantity + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [[event_message], asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, event.timestamp) + self.assertEqual(order.order_type, event.type) + self.assertEqual(order.trading_pair, event.trading_pair) + self.assertEqual(order.amount, event.amount) + self.assertEqual(order.price, event.price) + self.assertEqual(order.client_order_id, event.order_id) + self.assertEqual(order.exchange_order_id, event.exchange_order_id) + self.assertTrue(order.is_open) + + self.assertTrue( + self._is_logged( + "INFO", + f"Created {order.order_type.name.upper()} {order.trade_type.name.upper()} order " + f"{order.client_order_id} for {order.amount} {order.trading_pair} " + f"at {Decimal('10000')}." + ) + ) + + def test_user_stream_update_for_cancelled_order(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders["OID1"] + + event_message = { + "e": "executionReport", # Event type + "E": 1499405658658, # Event time + "s": order.trading_pair, # Symbol + "c": order.client_order_id, # Client order ID + "S": order.trade_type.name, # Side + "o": "LIMIT", # Order type + "f": "GTC", # Time in force + "q": "1.00000000", # Order quantity + "p": "0.10264410", # Order price + "reqAmt": "1000", # Requested cash amount (To be released) + "X": "CANCELED", # Current order status + "d": "1234567890123456789", # Execution ID + "i": order.exchange_order_id, # Order ID + "l": "0.00000000", # Last executed quantity + "r": "0", # unfilled quantity + "z": "0.00000000", # Cumulative filled quantity + "L": "0.00000000", # Last executed price + "V": "26105.5", # average executed price + "n": "0", # Commission amount + "N": None, # Commission asset + "u": True, # Is the trade normal, ignore for now + "w": True, # Is the order working? Stops will have + "m": False, # Is this trade the maker side? + "O": 1499405658657, # Order creation time + "Z": "0.00000000" # Cumulative quote asset transacted quantity + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [[event_message], asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + cancel_event: OrderCancelledEvent = self.order_cancelled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) + self.assertEqual(order.client_order_id, cancel_event.order_id) + self.assertEqual(order.exchange_order_id, cancel_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue(order.is_cancelled) + self.assertTrue(order.is_done) + + self.assertTrue( + self._is_logged("INFO", f"Successfully canceled order {order.client_order_id}.") + ) + + def test_user_stream_update_for_order_partial_fill(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders["OID1"] + + event_message = { + "e": "executionReport", # Event type + "E": 1499405658658, # Event time + "s": order.trading_pair, # Symbol + "c": order.client_order_id, # Client order ID + "S": order.trade_type.name, # Side + "o": "LIMIT", # Order type + "f": "GTC", # Time in force + "q": order.amount, # Order quantity + "p": order.price, # Order price + "reqAmt": "1000", # Requested cash amount (To be released) + "X": "PARTIALLY_FILLED", # Current order status + "d": "1234567890123456789", # Execution ID + "i": order.exchange_order_id, # Order ID + "l": "0.50000000", # Last executed quantity + "r": "0", # unfilled quantity + "z": "0.50000000", # Cumulative filled quantity + "L": "0.10250000", # Last executed price + "V": "26105.5", # average executed price + "n": "0.003", # Commission amount + "N": self.base_asset, # Commission asset + "u": True, # Is the trade normal, ignore for now + "w": True, # Is the order working? Stops will have + "m": False, # Is this trade the maker side? + "O": 1499405658657, # Order creation time + "Z": "473.199" # Cumulative quote asset transacted quantity + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [[event_message], asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertTrue(order.is_open) + self.assertEqual(OrderState.PARTIALLY_FILLED, order.current_state) + + fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(order.client_order_id, fill_event.order_id) + self.assertEqual(order.trading_pair, fill_event.trading_pair) + self.assertEqual(order.trade_type, fill_event.trade_type) + self.assertEqual(order.order_type, fill_event.order_type) + self.assertEqual(Decimal(event_message["L"]), fill_event.price) + self.assertEqual(Decimal(event_message["l"]), fill_event.amount) + + self.assertEqual([TokenAmount(amount=Decimal(event_message["n"]), token=(event_message["N"]))], + fill_event.trade_fee.flat_fees) + + self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) + + self.assertTrue( + self._is_logged("INFO", f"The {order.trade_type.name} order {order.client_order_id} amounting to " + f"{fill_event.amount}/{order.amount} {order.base_asset} has been filled " + f"at {Decimal('0.10250000')} {self.quote_asset}.") + ) + + def test_user_stream_update_for_order_fill(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id="OID1", + exchange_order_id="EOID1", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders["OID1"] + + event_message = { + "e": "executionReport", # Event type + "E": 1499405658658, # Event time + "s": order.trading_pair, # Symbol + "c": order.client_order_id, # Client order ID + "S": order.trade_type.name, # Side + "o": "LIMIT", # Order type + "f": "GTC", # Time in force + "q": order.amount, # Order quantity + "p": order.price, # Order price + "reqAmt": "1000", # Requested cash amount (To be released) + "X": "FILLED", # Current order status + "d": "1234567890123456789", # Execution ID + "i": order.exchange_order_id, # Order ID + "l": order.amount, # Last executed quantity + "r": "0", # unfilled quantity + "z": "0.50000000", # Cumulative filled quantity + "L": order.price, # Last executed price + "V": "26105.5", # average executed price + "n": "0.003", # Commission amount + "N": self.base_asset, # Commission asset + "u": True, # Is the trade normal, ignore for now + "w": True, # Is the order working? Stops will have + "m": False, # Is this trade the maker side? + "O": 1499405658657, # Order creation time + "Z": "473.199" # Cumulative quote asset transacted quantity + } + + filled_event = { + "e": "ticketInfo", # Event type + "E": "1668693440976", # Event time + "s": self.ex_trading_pair, # Symbol + "q": "0.001639", # quantity + "t": "1668693440899", # time + "p": "441.0", # price + "T": "899062000267837441", # ticketId + "o": "899048013515737344", # orderId + "c": "1621910874883", # clientOrderId + "O": "899062000118679808", # matchOrderId + "a": "10086", # accountId + "A": 0, # ignore + "m": True, # isMaker + "S": order.trade_type.name # side SELL or BUY + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [[event_message], [filled_event], asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(order.client_order_id, fill_event.order_id) + self.assertEqual(order.trading_pair, fill_event.trading_pair) + self.assertEqual(order.trade_type, fill_event.trade_type) + self.assertEqual(order.order_type, fill_event.order_type) + match_price = Decimal(event_message["L"]) + match_size = Decimal(event_message["l"]) + self.assertEqual(match_price, fill_event.price) + self.assertEqual(match_size, fill_event.amount) + self.assertEqual([TokenAmount(amount=Decimal(event_message["n"]), token=(event_message["N"]))], + fill_event.trade_fee.flat_fees) + + buy_event: BuyOrderCompletedEvent = self.buy_order_completed_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, buy_event.timestamp) + self.assertEqual(order.client_order_id, buy_event.order_id) + self.assertEqual(order.base_asset, buy_event.base_asset) + self.assertEqual(order.quote_asset, buy_event.quote_asset) + self.assertEqual(order.amount, buy_event.base_asset_amount) + self.assertEqual(order.amount * match_price, buy_event.quote_asset_amount) + self.assertEqual(order.order_type, buy_event.order_type) + self.assertEqual(order.exchange_order_id, buy_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue(order.is_filled) + self.assertTrue(order.is_done) + + self.assertTrue( + self._is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) + ) + + def test_user_stream_balance_update(self): + self.exchange._set_current_timestamp(1640780000) + + event_message = [{ + "e": "outboundAccountInfo", # Event type + "E": 1629969654753, # Event time + "T": True, # Can trade + "W": True, # Can withdraw + "D": True, # Can deposit + "B": [ # Balances changed + { + "a": self.base_asset, # Asset + "f": "10000", # Free amount + "l": "500" # Locked amount + } + ] + }] + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [event_message, asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertEqual(Decimal("10000"), self.exchange.available_balances["ETH"]) + self.assertEqual(Decimal("10500"), self.exchange.get_balance("ETH")) + + def test_user_stream_raises_cancel_exception(self): + self.exchange._set_current_timestamp(1640780000) + + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.CancelledError + self.exchange._user_stream_tracker._user_stream = mock_queue + + self.assertRaises( + asyncio.CancelledError, + self.async_run_with_timeout, + self.exchange._user_stream_event_listener()) + + @patch("hummingbot.connector.exchange.hashkey.hashkey_exchange.HashkeyExchange._sleep") + def test_user_stream_logs_errors(self, _): + self.exchange._set_current_timestamp(1640780000) + + incomplete_event = { + "e": "outboundAccountInfo", + "E": "1629969654753", + "T": True, + "W": True, + "D": True, + } + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [incomplete_event, asyncio.CancelledError] + self.exchange._user_stream_tracker._user_stream = mock_queue + + try: + self.async_run_with_timeout(self.exchange._user_stream_event_listener()) + except asyncio.CancelledError: + pass + + self.assertTrue( + self._is_logged( + "ERROR", + "Unexpected error in user stream listener loop." + ) + ) diff --git a/test/hummingbot/connector/exchange/hashkey/test_hashkey_web_utils.py b/test/hummingbot/connector/exchange/hashkey/test_hashkey_web_utils.py new file mode 100644 index 0000000000..f23f4d41dd --- /dev/null +++ b/test/hummingbot/connector/exchange/hashkey/test_hashkey_web_utils.py @@ -0,0 +1,11 @@ +from unittest import TestCase + +from hummingbot.connector.exchange.hashkey import hashkey_constants as CONSTANTS, hashkey_web_utils as web_utils + + +class WebUtilsTests(TestCase): + def test_rest_url(self): + url = web_utils.rest_url(path_url=CONSTANTS.LAST_TRADED_PRICE_PATH, domain=CONSTANTS.DEFAULT_DOMAIN) + self.assertEqual('https://api-glb.hashkey.com/quote/v1/ticker/price', url) + url = web_utils.rest_url(path_url=CONSTANTS.LAST_TRADED_PRICE_PATH, domain='hashkey_global_testnet') + self.assertEqual('https://api.sim.bmuxdc.com/quote/v1/ticker/price', url) diff --git a/test/hummingbot/connector/exchange/htx/test_htx_exchange.py b/test/hummingbot/connector/exchange/htx/test_htx_exchange.py index 27f290214e..73040e9b98 100644 --- a/test/hummingbot/connector/exchange/htx/test_htx_exchange.py +++ b/test/hummingbot/connector/exchange/htx/test_htx_exchange.py @@ -526,7 +526,7 @@ def configure_canceled_order_status_response( def configure_erroneous_http_fill_trade_response( self, order: InFlightOrder, mock_api: aioresponses, callback: Optional[Callable] = lambda *args, **kwargs: None ) -> str: - url = web_utils.private_rest_url(path_url=CONSTANTS.ORDER_DETAIL_URL.format(order.exchange_order_id)) + url = web_utils.private_rest_url(path_url=CONSTANTS.ORDER_MATCHES_URL.format(order.exchange_order_id)) regex_url = re.compile(url + r"\?.*") mock_api.get(regex_url, status=400, callback=callback) return url diff --git a/test/hummingbot/connector/exchange/injective_v2/data_sources/test_injective_data_source.py b/test/hummingbot/connector/exchange/injective_v2/data_sources/test_injective_data_source.py index 73154c28be..33fd09a57e 100644 --- a/test/hummingbot/connector/exchange/injective_v2/data_sources/test_injective_data_source.py +++ b/test/hummingbot/connector/exchange/injective_v2/data_sources/test_injective_data_source.py @@ -190,6 +190,7 @@ def _usdt_usdc_market_info(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.0001"), min_quantity_tick_size=Decimal("100"), + min_notional=Decimal("1000000"), ) return native_market @@ -225,6 +226,7 @@ def _inj_usdt_market_info(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return native_market @@ -471,6 +473,7 @@ def _inj_usdt_market_info(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return native_market diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_market.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_market.py index 31670419e9..9587d7c0c7 100644 --- a/test/hummingbot/connector/exchange/injective_v2/test_injective_market.py +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_market.py @@ -55,6 +55,7 @@ def setUp(self) -> None: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) self._inj_usdt_market = InjectiveSpotMarket( market_id="0xa508cb32923323679f29a032c70342c147c17d0145625922b0ef22e955c844c0", # noqa: mock @@ -109,6 +110,12 @@ def test_min_quantity_tick_size(self): self.assertEqual(expected_value, market.min_quantity_tick_size()) + def test_min_notional(self): + market = self._inj_usdt_market + expected_value = market.native_market.min_notional / Decimal(f"1e{self._usdt_token.decimals}") + + self.assertEqual(expected_value, market.min_notional()) + class InjectiveDerivativeMarketTests(TestCase): @@ -145,6 +152,7 @@ def setUp(self) -> None: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("100"), min_quantity_tick_size=Decimal("0.0001"), + min_notional=Decimal("1000000"), ) self._inj_usdt_derivative_market = InjectiveDerivativeMarket( market_id="0x17ef48032cb24375ba7c2e39f384e56433bcab20cbee9a7357e4cba2eb00abe6", # noqa: mock @@ -205,6 +213,12 @@ def test_get_oracle_info(self): self.assertEqual(market.native_market.oracle_quote, market.oracle_quote()) self.assertEqual(market.native_market.oracle_type, market.oracle_type()) + def test_min_notional(self): + market = self._inj_usdt_derivative_market + expected_value = market.native_market.min_notional / Decimal(f"1e{self._usdt_token.decimals}") + + self.assertEqual(expected_value, market.min_notional()) + class InjectiveTokenTests(TestCase): diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_api_order_book_data_source.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_api_order_book_data_source.py index 4fc87d482b..458d798821 100644 --- a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_api_order_book_data_source.py +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_api_order_book_data_source.py @@ -514,6 +514,7 @@ def _spot_markets_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_delegated_account.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_delegated_account.py index 6460526488..9dfa25316d 100644 --- a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_delegated_account.py +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_delegated_account.py @@ -190,6 +190,7 @@ def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return ("INVALID_MARKET", response) @@ -234,6 +235,7 @@ def trading_rules_request_erroneous_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=None, min_quantity_tick_size=None, + min_notional=None, ) return {native_market.id: native_market} @@ -241,7 +243,8 @@ def trading_rules_request_erroneous_mock_response(self): @property def order_creation_request_successful_mock_response(self): return {"txhash": "017C130E3602A48E5C9D661CAC657BF1B79262D4B71D5C25B1DA62DE2338DA0E", # noqa: mock" - "rawLog": "[]"} # noqa: mock + "rawLog": "[]", + "code": 0} # noqa: mock @property def balance_request_mock_response_for_base_and_quote(self): @@ -348,12 +351,14 @@ def expected_trading_rule(self): * Decimal(f"1e{market.base_token.decimals - market.quote_token.decimals}")) min_quantity_tick_size = market.min_quantity_tick_size * Decimal( f"1e{-market.base_token.decimals}") + min_notional = market.min_notional * Decimal(f"1e{-market.quote_token.decimals}") trading_rule = TradingRule( trading_pair=self.trading_pair, min_order_size=min_quantity_tick_size, min_price_increment=min_price_tick_size, min_base_amount_increment=min_quantity_tick_size, min_quote_amount_increment=min_price_tick_size, + min_notional_size=min_notional, ) return trading_rule @@ -425,6 +430,7 @@ def all_markets_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} @@ -1163,7 +1169,7 @@ def test_create_order_fails_and_raises_failure_event(self, mock_api): self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -1206,7 +1212,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -2469,11 +2475,13 @@ def _msg_exec_simulation_mock_response(self) -> Any: def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", # noqa: mock" - "rawLog": "[]"} # noqa: mock + "rawLog": "[]", + "code": 0} # noqa: mock def _order_cancelation_request_erroneous_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", # noqa: mock" - "rawLog": "Error"} # noqa: mock + "rawLog": "Error", + "code": 11} # noqa: mock def _order_status_request_open_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: return { diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_offchain_vault.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_offchain_vault.py index 7ba3d884bf..1cdee16e00 100644 --- a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_offchain_vault.py +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange_for_offchain_vault.py @@ -180,6 +180,7 @@ def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return ("INVALID_MARKET", response) @@ -224,13 +225,14 @@ def trading_rules_request_erroneous_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=None, min_quantity_tick_size=None, + min_notional=None, ) return {native_market.id: native_market} @property def order_creation_request_successful_mock_response(self): - return {"txhash": self._transaction_hash, "rawLog": "[]"} # noqa: mock + return {"txhash": self._transaction_hash, "rawLog": "[]", "code": 0} # noqa: mock @property def balance_request_mock_response_for_base_and_quote(self): @@ -332,12 +334,14 @@ def expected_trading_rule(self): * Decimal(f"1e{market.base_token.decimals - market.quote_token.decimals}")) min_quantity_tick_size = market.min_quantity_tick_size * Decimal( f"1e{-market.base_token.decimals}") + min_notional = market.min_notional * Decimal(f"1e{-market.quote_token.decimals}") trading_rule = TradingRule( trading_pair=self.trading_pair, min_order_size=min_quantity_tick_size, min_price_increment=min_price_tick_size, min_base_amount_increment=min_quantity_tick_size, min_quote_amount_increment=min_price_tick_size, + min_notional_size=min_notional, ) return trading_rule @@ -409,6 +413,7 @@ def all_markets_mock_response(self): service_provider_fee=Decimal("0.4"), min_price_tick_size=Decimal("0.000000000000001"), min_quantity_tick_size=Decimal("1000000000000000"), + min_notional=Decimal("1000000"), ) return {native_market.id: native_market} @@ -1053,7 +1058,7 @@ def test_create_order_fails_and_raises_failure_event(self, mock_api): self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -1096,7 +1101,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( transaction_simulation_response) - response = {"txhash": "", "rawLog": "Error"} + response = {"txhash": "", "rawLog": "Error", "code": 11} mock_queue = AsyncMock() mock_queue.get.side_effect = partial( self._callback_wrapper_with_response, @@ -1828,10 +1833,10 @@ def _msg_exec_simulation_mock_response(self) -> Any: } def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: - return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "[]"} # noqa: mock + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "[]", "code": 0} # noqa: mock def _order_cancelation_request_erroneous_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: - return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "Error"} # noqa: mock + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "Error", "code": 11} # noqa: mock def _order_status_request_open_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: return { diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_utils.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_utils.py index d5a3f511f4..75897d5f2e 100644 --- a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_utils.py +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_utils.py @@ -47,27 +47,28 @@ def test_testnet_network_config_creation(self): def test_custom_network_config_creation(self): network_config = InjectiveCustomNetworkMode( - lcd_endpoint='https://devnet.lcd.injective.dev', - tm_websocket_endpoint='wss://devnet.tm.injective.dev/websocket', - grpc_endpoint='devnet.injective.dev:9900', - grpc_exchange_endpoint='devnet.injective.dev:9910', - grpc_explorer_endpoint='devnet.injective.dev:9911', - chain_stream_endpoint='devnet.injective.dev:9999', - chain_id='injective-777', - env='devnet', + lcd_endpoint="https://devnet.lcd.injective.dev", + tm_websocket_endpoint="wss://devnet.tm.injective.dev/websocket", + grpc_endpoint="devnet.injective.dev:9900", + grpc_exchange_endpoint="devnet.injective.dev:9910", + grpc_explorer_endpoint="devnet.injective.dev:9911", + chain_stream_endpoint="devnet.injective.dev:9999", + chain_id="injective-777", + env="devnet", secure_connection=False, ) network = network_config.network() expected_network = Network.custom( - lcd_endpoint='https://devnet.lcd.injective.dev', - tm_websocket_endpoint='wss://devnet.tm.injective.dev/websocket', - grpc_endpoint='devnet.injective.dev:9900', - grpc_exchange_endpoint='devnet.injective.dev:9910', - grpc_explorer_endpoint='devnet.injective.dev:9911', - chain_stream_endpoint='devnet.injective.dev:9999', - chain_id='injective-777', - env='devnet' + lcd_endpoint="https://devnet.lcd.injective.dev", + tm_websocket_endpoint="wss://devnet.tm.injective.dev/websocket", + grpc_endpoint="devnet.injective.dev:9900", + grpc_exchange_endpoint="devnet.injective.dev:9910", + grpc_explorer_endpoint="devnet.injective.dev:9911", + chain_stream_endpoint="devnet.injective.dev:9999", + chain_id="injective-777", + env="devnet", + official_tokens_list_url="", ) self.assertEqual(expected_network.string(), network.string()) diff --git a/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py b/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py index 55548fa473..4a612a7641 100644 --- a/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py +++ b/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py @@ -564,7 +564,8 @@ def test_create_limit_order_successfully(self, mock_api): self.assertTrue( self._is_logged( "INFO", - f"Created LIMIT BUY order OID1 for {Decimal('100.000000')} {self.trading_pair}." + f"Created LIMIT BUY order OID1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000.0000')}." ) ) @@ -620,7 +621,8 @@ def test_create_limit_maker_order_successfully(self, mock_api): self.assertTrue( self._is_logged( "INFO", - f"Created LIMIT_MAKER BUY order OID1 for {Decimal('100.000000')} {self.trading_pair}." + f"Created LIMIT_MAKER BUY order OID1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000.0000')}." ) ) @@ -708,7 +710,8 @@ def test_create_market_order_successfully(self, mock_api, get_price_mock): self.assertTrue( self._is_logged( "INFO", - f"Created MARKET SELL order OID1 for {Decimal('100.000000')} {self.trading_pair}." + f"Created MARKET SELL order OID1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {None}." ) ) @@ -2246,7 +2249,14 @@ def test_update_order_status_when_request_fails_marks_order_as_not_found(self, m self.assertEqual(1, self.exchange._order_tracker._order_not_found_records[order.client_order_id]) - def test_update_order_status_marks_order_with_no_exchange_id_as_not_found(self): + @aioresponses() + def test_update_order_status_marks_order_with_no_exchange_id_as_not_found(self, mock_api): + url_fills = web_utils.private_rest_url( + f"{CONSTANTS.FILLS_PATH_URL}?pageSize=500&startAt=") + regex_url_fills = re.compile(f"^{url_fills}".replace(".", r"\.").replace("?", r"\?")) + + mock_api.get(regex_url_fills, body=json.dumps({})) + update_event = MagicMock() update_event.wait.side_effect = asyncio.TimeoutError @@ -2330,7 +2340,8 @@ def test_user_stream_update_for_new_order_does_not_update_status(self): self._is_logged( "INFO", f"Created {order.order_type.name.upper()} {order.trade_type.name.upper()} order " - f"{order.client_order_id} for {order.amount} {order.trading_pair}." + f"{order.client_order_id} for {order.amount} {order.trading_pair} " + f"at {Decimal('10000')}." ) ) @@ -2464,7 +2475,7 @@ def test_user_stream_update_for_order_partial_fill(self): self.assertTrue( self._is_logged("INFO", f"The {order.trade_type.name} order {order.client_order_id} amounting to " - f"0.1/{order.amount} {order.base_asset} has been filled.") + f"0.1/{order.amount} {order.base_asset} has been filled at {Decimal('10010.5')} HBOT.") ) def test_user_stream_update_for_order_fill(self): @@ -2671,3 +2682,13 @@ def test_initial_status_dict(self): self.assertEqual(expected_initial_dict, status_dict) self.assertFalse(self.exchange.ready) + + def test_time_synchronizer_related_request_error_detection(self): + error_code = CONSTANTS.RET_CODE_AUTH_TIMESTAMP_ERROR + response = {"code": error_code, "msg": "Invalid KC-API-TIMESTAMP"} + exception = IOError(f"Error executing request GET https://someurl. HTTP status is 400. Error: {json.dumps(response)}") + self.assertTrue(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) + + error_code = CONSTANTS.RET_CODE_ORDER_NOT_EXIST_OR_NOT_ALLOW_TO_CANCEL + exception = IOError(f"{error_code} - Failed to cancel order because it was not found.") + self.assertFalse(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) diff --git a/test/hummingbot/connector/exchange/mexc/test_mexc_exchange.py b/test/hummingbot/connector/exchange/mexc/test_mexc_exchange.py index f7947ff5b5..1241c02d83 100644 --- a/test/hummingbot/connector/exchange/mexc/test_mexc_exchange.py +++ b/test/hummingbot/connector/exchange/mexc/test_mexc_exchange.py @@ -17,8 +17,10 @@ from hummingbot.connector.utils import get_new_client_order_id from hummingbot.core.data_type.common import OrderType, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState +from hummingbot.core.data_type.order_book import OrderBook +from hummingbot.core.data_type.order_book_row import OrderBookRow from hummingbot.core.data_type.trade_fee import DeductedFromReturnsTradeFee, TokenAmount, TradeFeeBase -from hummingbot.core.event.events import MarketOrderFailureEvent, OrderFilledEvent +from hummingbot.core.event.events import BuyOrderCreatedEvent, MarketOrderFailureEvent, OrderFilledEvent class MexcExchangeTests(AbstractExchangeConnectorTests.ExchangeConnectorTests): @@ -63,7 +65,7 @@ def all_symbols_request_mock_response(self): "symbols": [ { "symbol": self.exchange_symbol_for_tokens(self.base_asset, self.quote_asset), - "status": "ENABLED", + "status": "1", "baseAsset": self.base_asset, "baseSizePrecision": 1e-8, "quotePrecision": 8, @@ -130,7 +132,7 @@ def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: "symbols": [ { "symbol": self.exchange_symbol_for_tokens(self.base_asset, self.quote_asset), - "status": "ENABLED", + "status": "1", "baseAsset": self.base_asset, "baseSizePrecision": 1e-8, "quotePrecision": 8, @@ -159,7 +161,7 @@ def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: }, { "symbol": self.exchange_symbol_for_tokens("INVALID", "PAIR"), - "status": "ENABLED", + "status": "1", "baseAsset": "INVALID", "baseSizePrecision": 1e-8, "quotePrecision": 8, @@ -205,7 +207,7 @@ def trading_rules_request_mock_response(self): "symbols": [ { "symbol": self.exchange_symbol_for_tokens(self.base_asset, self.quote_asset), - "status": "ENABLED", + "status": "1", "baseAsset": self.base_asset, "baseSizePrecision": 1e-8, "quotePrecision": 8, @@ -253,7 +255,7 @@ def trading_rules_request_erroneous_mock_response(self): "symbols": [ { "symbol": self.exchange_symbol_for_tokens(self.base_asset, self.quote_asset), - "status": "ENABLED", + "status": "1", "baseAsset": self.base_asset, "baseAssetPrecision": 8, "quoteAsset": self.quote_asset, @@ -1016,19 +1018,9 @@ def test_client_order_id_on_order(self, mocked_nonce): def test_time_synchronizer_related_request_error_detection(self): exception = IOError("Error executing request POST https://api.mexc.com/api/v3/order. HTTP status is 400. " - "Error: {'code':-1021,'msg':'Timestamp for this request is outside of the recvWindow.'}") + "Error: {'code':700003,'msg':'Timestamp for this request is outside of the recvWindow.'}") self.assertTrue(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) - exception = IOError("Error executing request POST https://api.mexc.com/api/v3/order. HTTP status is 400. " - "Error: {'code':-1021,'msg':'Timestamp for this request was 1000ms ahead of the server's " - "time.'}") - self.assertTrue(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) - - exception = IOError("Error executing request POST https://api.mexc.com/api/v3/order. HTTP status is 400. " - "Error: {'code':-1022,'msg':'Timestamp for this request was 1000ms ahead of the server's " - "time.'}") - self.assertFalse(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) - exception = IOError("Error executing request POST https://api.mexc.com/api/v3/order. HTTP status is 400. " "Error: {'code':-1021,'msg':'Other error.'}") self.assertFalse(self.exchange._is_request_exception_related_to_time_synchronizer(exception)) @@ -1091,13 +1083,55 @@ def test_place_order_manage_server_overloaded_error_failure(self, mock_api): price=Decimal("2"), )) + @aioresponses() + def test_create_market_order_price_is_nan(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + resp = self.order_creation_request_successful_mock_response + url = self.order_creation_url + mock_api.post(url, + body=json.dumps(resp), + status=201, + callback=lambda *args, **kwargs: request_sent_event.set()) + + order_book = OrderBook() + self.exchange.order_book_tracker._order_books[self.trading_pair] = order_book + order_book.apply_snapshot( + bids=[OrderBookRow(price=5.0, amount=10, update_id=1)], + asks=[OrderBookRow(price=5.1, amount=10, update_id=1)], + update_id=1, + ) + + order_id = self.place_buy_order( + amount=Decimal("1"), price=Decimal("NaN"), order_type=OrderType.MARKET + ) + self.async_run_with_timeout(request_sent_event.wait(), timeout=3) + + order_request = self._all_executed_requests(mock_api, url)[0] + request_data = order_request.kwargs["data"] + self.assertIn(order_id, self.exchange.in_flight_orders) + self.assertEqual("5.1000000", request_data["quoteOrderQty"]) + self.assertEqual("MARKET", request_data["type"]) + self.assertEqual("BUY", request_data["side"]) + + self.assertEqual(1, len(self.buy_order_created_logger.event_log)) + create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.MARKET, create_event.type) + self.assertEqual(Decimal("1"), create_event.amount) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(resp["orderId"]), create_event.exchange_order_id) + def test_format_trading_rules__min_notional_present(self): trading_rules = [{ "symbol": "COINALPHAHBOT", "baseSizePrecision": 1e-8, "quotePrecision": 8, "baseAssetPrecision": 8, - "status": "ENABLED", + "status": "1", "quoteAmountPrecision": "0.001", "orderTypes": ["LIMIT", "MARKET"], "filters": [ diff --git a/test/hummingbot/connector/exchange/mexc/test_mexc_utils.py b/test/hummingbot/connector/exchange/mexc/test_mexc_utils.py index 0c8632c443..afa8ac5a13 100644 --- a/test/hummingbot/connector/exchange/mexc/test_mexc_utils.py +++ b/test/hummingbot/connector/exchange/mexc/test_mexc_utils.py @@ -30,14 +30,14 @@ def test_is_exchange_information_valid(self): self.assertFalse(utils.is_exchange_information_valid(invalid_info_2)) invalid_info_3 = { - "status": "ENABLED", + "status": "1", "permissions": ["MARGIN"], } self.assertFalse(utils.is_exchange_information_valid(invalid_info_3)) invalid_info_4 = { - "status": "ENABLED", + "status": "1", "permissions": ["SPOT"], } diff --git a/test/hummingbot/connector/exchange/okx/test_okx_exchange.py b/test/hummingbot/connector/exchange/okx/test_okx_exchange.py index 1b4cc4cc07..a99a248350 100644 --- a/test/hummingbot/connector/exchange/okx/test_okx_exchange.py +++ b/test/hummingbot/connector/exchange/okx/test_okx_exchange.py @@ -17,7 +17,7 @@ from hummingbot.connector.utils import get_new_client_order_id from hummingbot.core.data_type.in_flight_order import InFlightOrder from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase -from hummingbot.core.event.events import OrderCancelledEvent, OrderType, TradeType +from hummingbot.core.event.events import BuyOrderCreatedEvent, OrderCancelledEvent, OrderType, TradeType class OkxExchangeTests(AbstractExchangeConnectorTests.ExchangeConnectorTests): @@ -480,8 +480,12 @@ def validate_order_creation_request(self, order: InFlightOrder, request_call: Re self.assertEqual(order.trade_type.name.lower(), request_data["side"]) self.assertEqual(order.order_type.name.lower(), request_data["ordType"]) self.assertEqual(Decimal("100"), Decimal(request_data["sz"])) - self.assertEqual(Decimal("10000"), Decimal(request_data["px"])) self.assertEqual(order.client_order_id, request_data["clOrdId"]) + if request_data["ordType"] == "market": + self.assertNotIn("px", request_data) + self.assertEqual("base_ccy", request_data["tgtCcy"]) + else: + self.assertEqual(Decimal("10000"), Decimal(request_data["px"])) def validate_order_cancelation_request(self, order: InFlightOrder, request_call: RequestCall): request_data = json.loads(request_call.kwargs["data"]) @@ -771,6 +775,66 @@ def order_event_for_canceled_order_websocket_update(self, order: InFlightOrder): } def order_event_for_full_fill_websocket_update(self, order: InFlightOrder): + return { + "arg": { + "channel": "orders", + "uid": "77982378738415879", + "instType": "SPOT", + "instId": self.exchange_symbol_for_tokens(order.base_asset, order.quote_asset) + }, + "data": [ + { + "instType": "SPOT", + "instId": self.exchange_symbol_for_tokens(order.base_asset, order.quote_asset), + "ccy": "BTC", + "ordId": order.exchange_order_id or "EOID1", + "clOrdId": order.client_order_id, + "tag": "", + "px": str(order.price), + "sz": str(order.amount), + "notionalUsd": "", + "ordType": "limit", + "side": order.trade_type.name.lower(), + "posSide": "long", + "tdMode": "cross", + "tgtCcy": "", + "fillSz": "", + "fillPx": "", + "tradeId": "", + "accFillSz": "323", + "fillNotionalUsd": "", + "fillTime": "0", + "fillFee": str(self.expected_fill_fee.flat_fees[0].amount), + "fillFeeCcy": self.expected_fill_fee.flat_fees[0].token, + "execType": "T", + "state": "filled", + "avgPx": "0", + "lever": "20", + "tpTriggerPx": "0", + "tpTriggerPxType": "last", + "tpOrdPx": "20", + "slTriggerPx": "0", + "slTriggerPxType": "last", + "slOrdPx": "20", + "feeCcy": "", + "fee": "", + "rebateCcy": "", + "rebate": "", + "tgtCcy": "", + "source": "", + "pnl": "", + "category": "", + "uTime": "1597026383085", + "cTime": "1597026383085", + "reqId": "", + "amendResult": "", + "code": "0", + "msg": "" + } + ] + } + + def trade_event_for_full_fill_websocket_update(self, order: InFlightOrder): return { "arg": { "channel": "orders", @@ -830,9 +894,6 @@ def order_event_for_full_fill_websocket_update(self, order: InFlightOrder): ] } - def trade_event_for_full_fill_websocket_update(self, order: InFlightOrder): - return {} - @patch("hummingbot.connector.utils.get_tracking_nonce") def test_client_order_id_on_order(self, mocked_nonce): mocked_nonce.return_value = 9 @@ -1182,3 +1243,44 @@ def test_cancel_order_successfully(self, mock_api): else: self.assertIn(order.client_order_id, self.exchange.in_flight_orders) self.assertTrue(order.is_pending_cancel_confirmation) + + @aioresponses() + def test_create_buy_market_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + url = self.order_creation_url + + creation_response = self.order_creation_request_successful_mock_response + + mock_api.post(url, + body=json.dumps(creation_response), + callback=lambda *args, **kwargs: request_sent_event.set()) + + order_id = self.place_buy_order(order_type=OrderType.MARKET) + self.async_run_with_timeout(request_sent_event.wait()) + + order_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_request) + self.assertIn(order_id, self.exchange.in_flight_orders) + self.validate_order_creation_request( + order=self.exchange.in_flight_orders[order_id], + request_call=order_request) + + create_event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, create_event.timestamp) + self.assertEqual(self.trading_pair, create_event.trading_pair) + self.assertEqual(OrderType.MARKET, create_event.type) + self.assertEqual(Decimal("100"), create_event.amount) + self.assertEqual(Decimal("10000"), create_event.price) + self.assertEqual(order_id, create_event.order_id) + self.assertEqual(str(self.expected_exchange_order_id), create_event.exchange_order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Created {OrderType.MARKET.name} {TradeType.BUY.name} order {order_id} for " + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000')}." + ) + ) diff --git a/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py b/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py index 8161081640..3c7f843ba5 100644 --- a/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py +++ b/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py @@ -772,7 +772,7 @@ def test_create_buy_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000000')} {self.trading_pair}.", + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}.", ) ) @@ -804,7 +804,7 @@ def test_create_sell_limit_order_successfully(self, mock_api): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000000')} {self.trading_pair}.", + f"{Decimal('100.000000')} {self.trading_pair} at {Decimal('10000.0000')}.", ) ) diff --git a/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py b/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py index 7e6e7d3b04..5cfbbddd11 100644 --- a/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py +++ b/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py @@ -769,7 +769,8 @@ def test_create_limit_order_successfully(self, mock_api): self.assertEqual("ABC1", create_event.order_id) self.assertTrue( - self._is_logged("INFO", f"Created LIMIT BUY order ABC1 for {Decimal('100.000000')} {self.trading_pair}.") + self._is_logged("INFO", f"Created LIMIT BUY order ABC1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000.0000')}.") ) @aioresponses() @@ -821,7 +822,8 @@ def test_create_limit_maker_order_successfully(self, mock_api): self.assertTrue( self._is_logged( - "INFO", f"Created LIMIT_MAKER BUY order ABC1 for {Decimal('100.000000')} {self.trading_pair}." + "INFO", f"Created LIMIT_MAKER BUY order ABC1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000.0000')}." ) ) @@ -873,7 +875,8 @@ def test_create_market_order_successfully(self, mock_api, get_price_mock): self.assertEqual("ABC1", create_event.order_id) self.assertTrue( - self._is_logged("INFO", f"Created MARKET SELL order ABC1 for {Decimal('100.000000')} {self.trading_pair}.") + self._is_logged("INFO", f"Created MARKET SELL order ABC1 for {Decimal('100.000000')} {self.trading_pair} " + f"at {Decimal('10000')}.") ) @aioresponses() @@ -1406,7 +1409,7 @@ def test_user_stream_update_for_order_partial_fill(self): self._is_logged( "INFO", f"The {order.trade_type.name} order {order.client_order_id} amounting to " - f"{fill_event.amount}/{order.amount} {order.base_asset} has been filled.", + f"{fill_event.amount}/{order.amount} {order.base_asset} has been filled at {Decimal('25000')} USDC.", ) ) diff --git a/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_order_book_data_source.py b/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_order_book_data_source.py index 3cbfc52bd9..d5cc743a16 100644 --- a/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_order_book_data_source.py +++ b/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_order_book_data_source.py @@ -37,6 +37,7 @@ def setUp(self) -> None: xrpl_secret_key="", wss_node_url="wss://sample.com", wss_second_node_url="wss://sample.com", + wss_third_node_url="wss://sample.com", trading_pairs=[self.trading_pair], trading_required=False, ) @@ -262,6 +263,12 @@ def test_request_order_book_snapshot_exception(self, mock_book_offers): self.assertTrue("Error fetching order book snapshot" in str(context.exception)) + def test_fetch_order_book_side_exception(self): + self.data_source._xrpl_client.request.side_effect = TimeoutError + + with self.assertRaises(TimeoutError): + self.async_run_with_timeout(self.data_source.fetch_order_book_side(self.data_source._xrpl_client, 12345, {}, {}, 50)) + @patch("hummingbot.connector.exchange.xrpl.xrpl_api_order_book_data_source.XRPLAPIOrderBookDataSource._get_client") def test_process_websocket_messages_for_pair(self, mock_get_client): mock_client = AsyncMock() diff --git a/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_user_stream_data_source.py b/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_user_stream_data_source.py index bbabc82984..f88b8de289 100644 --- a/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_user_stream_data_source.py +++ b/test/hummingbot/connector/exchange/xrpl/test_xrpl_api_user_stream_data_source.py @@ -36,6 +36,7 @@ def setUp(self) -> None: xrpl_secret_key="", wss_node_url="wss://sample.com", wss_second_node_url="wss://sample.com", + wss_third_node_url="wss://sample.com", trading_pairs=[self.trading_pair], trading_required=False, ) diff --git a/test/hummingbot/connector/exchange/xrpl/test_xrpl_exchange.py b/test/hummingbot/connector/exchange/xrpl/test_xrpl_exchange.py index 5b9f18ec3b..a5e052a0a8 100644 --- a/test/hummingbot/connector/exchange/xrpl/test_xrpl_exchange.py +++ b/test/hummingbot/connector/exchange/xrpl/test_xrpl_exchange.py @@ -5,7 +5,8 @@ from typing import Awaitable from unittest.mock import AsyncMock, MagicMock, patch -from xrpl.models import Request, Response, Transaction +from xrpl.asyncio.clients import XRPLRequestFailureException +from xrpl.models import OfferCancel, Request, Response, Transaction from xrpl.models.requests.request import RequestMethod from xrpl.models.response import ResponseStatus, ResponseType from xrpl.models.transactions.types import TransactionType @@ -49,6 +50,7 @@ def setUp(self) -> None: xrpl_secret_key="", wss_node_url="wss://sample.com", wss_second_node_url="wss://sample.com", + wss_third_node_url="wss://sample.com", trading_pairs=[self.trading_pair, self.trading_pair_usd], trading_required=False, ) @@ -126,9 +128,9 @@ def setUp(self) -> None: self.connector._user_stream_tracker = UserStreamTracker(data_source=self.user_stream_source) - self.connector._xrpl_client = AsyncMock() - self.connector._xrpl_client.__aenter__.return_value = self.connector._xrpl_client - self.connector._xrpl_client.__aexit__.return_value = None + self.connector._xrpl_query_client = AsyncMock() + self.connector._xrpl_query_client.__aenter__.return_value = self.connector._xrpl_query_client + self.connector._xrpl_query_client.__aexit__.return_value = None self.connector._xrpl_place_order_client = AsyncMock() self.connector._xrpl_place_order_client.__aenter__.return_value = self.connector._xrpl_place_order_client @@ -663,150 +665,231 @@ def _client_response_account_info(self): return resp + def _client_response_account_lines(self): + resp = Response( + status=ResponseStatus.SUCCESS, + result={ + "account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "ledger_hash": "6626B7AC7E184B86EE29D8B9459E0BC0A56E12C8DA30AE747051909CF16136D3", # noqa: mock + "ledger_index": 89692233, + "validated": True, + "limit": 200, + "lines": [ + { + "account": "rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B", # noqa: mock + "balance": "0.9957725256649131", + "currency": "USD", + "limit": "0", + "limit_peer": "0", + "quality_in": 0, + "quality_out": 0, + "no_ripple": True, + "no_ripple_peer": False, + }, + { + "account": "rcEGREd8NmkKRE8GE424sksyt1tJVFZwu", # noqa: mock + "balance": "2.981957518895808", + "currency": "5553444300000000000000000000000000000000", # noqa: mock + "limit": "0", + "limit_peer": "0", + "quality_in": 0, + "quality_out": 0, + "no_ripple": True, + "no_ripple_peer": False, + }, + { + "account": "rhub8VRN55s94qWKDv6jmDy1pUykJzF3wq", # noqa: mock + "balance": "0.011094399237562", + "currency": "USD", + "limit": "0", + "limit_peer": "0", + "quality_in": 0, + "quality_out": 0, + "no_ripple": True, + "no_ripple_peer": False, + }, + { + "account": "rpakCr61Q92abPXJnVboKENmpKssWyHpwu", # noqa: mock + "balance": "104.9021857197376", + "currency": "457175696C69627269756D000000000000000000", # noqa: mock + "limit": "0", + "limit_peer": "0", + "quality_in": 0, + "quality_out": 0, + "no_ripple": True, + "no_ripple_peer": False, + }, + { + "account": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock + "balance": "35.95165691730148", + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "limit": "1000000000", + "limit_peer": "0", + "quality_in": 0, + "quality_out": 0, + "no_ripple": True, + "no_ripple_peer": False, + }, + ], + }, # noqa: mock + id="account_lines_144811", + type=ResponseType.RESPONSE, + ) + + return resp + def _client_response_account_objects(self): resp = Response( status=ResponseStatus.SUCCESS, result={ - "account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock "account_objects": [ { "Balance": { - "currency": "5553444300000000000000000000000000000000", # noqa: mock - "issuer": "rrrrrrrrrrrrrrrrrrrrBZbvji", # noqa: mock + "currency": "5553444300000000000000000000000000000000", # noqa: mock + "issuer": "rrrrrrrrrrrrrrrrrrrrBZbvji", # noqa: mock "value": "2.981957518895808", }, "Flags": 1114112, "HighLimit": { - "currency": "5553444300000000000000000000000000000000", # noqa: mock - "issuer": "rcEGREd8NmkKRE8GE424sksyt1tJVFZwu", # noqa: mock + "currency": "5553444300000000000000000000000000000000", # noqa: mock + "issuer": "rcEGREd8NmkKRE8GE424sksyt1tJVFZwu", # noqa: mock "value": "0", }, "HighNode": "f9", "LedgerEntryType": "RippleState", "LowLimit": { - "currency": "5553444300000000000000000000000000000000", # noqa: mock - "issuer": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "currency": "5553444300000000000000000000000000000000", # noqa: mock + "issuer": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock "value": "0", }, "LowNode": "0", - "PreviousTxnID": "C6EFE5E21ABD5F457BFCCE6D5393317B90821F443AD41FF193620E5980A52E71", # noqa: mock + "PreviousTxnID": "C6EFE5E21ABD5F457BFCCE6D5393317B90821F443AD41FF193620E5980A52E71", # noqa: mock "PreviousTxnLgrSeq": 86277627, - "index": "55049B8164998B0566FC5CDB3FC7162280EFE5A84DB9333312D3DFF98AB52380", # noqa: mock + "index": "55049B8164998B0566FC5CDB3FC7162280EFE5A84DB9333312D3DFF98AB52380", # noqa: mock }, { - "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock - "BookDirectory": "C73FAC6C294EBA5B9E22A8237AAE80725E85372510A6CA794F10652F287D59AD", # noqa: mock + "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "BookDirectory": "C73FAC6C294EBA5B9E22A8237AAE80725E85372510A6CA794F10652F287D59AD", # noqa: mock "BookNode": "0", "Flags": 131072, "LedgerEntryType": "Offer", "OwnerNode": "0", - "PreviousTxnID": "44038CD94CDD0A6FD7912F788FA5FBC575A3C44948E31F4C21B8BC3AA0C2B643", # noqa: mock + "PreviousTxnID": "44038CD94CDD0A6FD7912F788FA5FBC575A3C44948E31F4C21B8BC3AA0C2B643", # noqa: mock "PreviousTxnLgrSeq": 89078756, "Sequence": 84439998, "TakerGets": "499998", "TakerPays": { - "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock - "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock "value": "2.307417192565501", }, - "index": "BE4ACB6610B39F2A9CD1323F63D479177917C02AA8AF2122C018D34AAB6F4A35", # noqa: mock + "index": "BE4ACB6610B39F2A9CD1323F63D479177917C02AA8AF2122C018D34AAB6F4A35", # noqa: mock }, { "Balance": { "currency": "USD", - "issuer": "rrrrrrrrrrrrrrrrrrrrBZbvji", # noqa: mock + "issuer": "rrrrrrrrrrrrrrrrrrrrBZbvji", # noqa: mock "value": "0.011094399237562", }, "Flags": 1114112, - "HighLimit": {"currency": "USD", "issuer": "rhub8VRN55s94qWKDv6jmDy1pUykJzF3wq", "value": "0"}, # noqa: mock + "HighLimit": { + "currency": "USD", + "issuer": "rhub8VRN55s94qWKDv6jmDy1pUykJzF3wq", + "value": "0", + }, # noqa: mock "HighNode": "22d3", "LedgerEntryType": "RippleState", - "LowLimit": {"currency": "USD", "issuer": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", "value": "0"}, # noqa: mock + "LowLimit": { + "currency": "USD", + "issuer": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", + "value": "0", + }, # noqa: mock "LowNode": "0", - "PreviousTxnID": "1A9E685EA694157050803B76251C0A6AFFCF1E69F883BF511CF7A85C3AC002B8", # noqa: mock + "PreviousTxnID": "1A9E685EA694157050803B76251C0A6AFFCF1E69F883BF511CF7A85C3AC002B8", # noqa: mock "PreviousTxnLgrSeq": 85648064, - "index": "C510DDAEBFCE83469032E78B9F41D352DABEE2FB454E6982AA5F9D4ECC4D56AA", # noqa: mock + "index": "C510DDAEBFCE83469032E78B9F41D352DABEE2FB454E6982AA5F9D4ECC4D56AA", # noqa: mock }, { - "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock - "BookDirectory": "C73FAC6C294EBA5B9E22A8237AAE80725E85372510A6CA794F10659A9DE833CA", # noqa: mock + "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "BookDirectory": "C73FAC6C294EBA5B9E22A8237AAE80725E85372510A6CA794F10659A9DE833CA", # noqa: mock "BookNode": "0", "Flags": 131072, "LedgerEntryType": "Offer", "OwnerNode": "0", - "PreviousTxnID": "262201134A376F2E888173680EDC4E30E2C07A6FA94A8C16603EB12A776CBC66", # noqa: mock + "PreviousTxnID": "262201134A376F2E888173680EDC4E30E2C07A6FA94A8C16603EB12A776CBC66", # noqa: mock "PreviousTxnLgrSeq": 89078756, "Sequence": 84439997, "TakerGets": "499998", "TakerPays": { - "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock - "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock "value": "2.307647957361237", }, - "index": "D6F2B37690FA7540B7640ACC61AA2641A6E803DAF9E46CC802884FA5E1BF424E", # noqa: mock + "index": "D6F2B37690FA7540B7640ACC61AA2641A6E803DAF9E46CC802884FA5E1BF424E", # noqa: mock }, { - "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock - "BookDirectory": "5C8970D155D65DB8FF49B291D7EFFA4A09F9E8A68D9974B25A07B39757FA194D", # noqa: mock + "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "BookDirectory": "5C8970D155D65DB8FF49B291D7EFFA4A09F9E8A68D9974B25A07B39757FA194D", # noqa: mock "BookNode": "0", "Flags": 131072, "LedgerEntryType": "Offer", "OwnerNode": "0", - "PreviousTxnID": "254F74BF0E5A2098DDE998609F4E8697CCF6A7FD61D93D76057467366A18DA24", # noqa: mock + "PreviousTxnID": "254F74BF0E5A2098DDE998609F4E8697CCF6A7FD61D93D76057467366A18DA24", # noqa: mock "PreviousTxnLgrSeq": 89078757, "Sequence": 84440000, "TakerGets": { - "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock - "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock "value": "2.30649459472761", }, "TakerPays": "499999", - "index": "D8F57C7C230FA5DE98E8FEB6B75783693BDECAD1266A80538692C90138E7BADE", # noqa: mock + "index": "D8F57C7C230FA5DE98E8FEB6B75783693BDECAD1266A80538692C90138E7BADE", # noqa: mock }, { "Balance": { - "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock - "issuer": "rrrrrrrrrrrrrrrrrrrrBZbvji", # noqa: mock + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "issuer": "rrrrrrrrrrrrrrrrrrrrBZbvji", # noqa: mock "value": "47.21480375660969", }, "Flags": 1114112, "HighLimit": { - "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock - "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock "value": "0", }, "HighNode": "3799", "LedgerEntryType": "RippleState", "LowLimit": { - "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock - "issuer": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "issuer": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock "value": "1000000000", }, "LowNode": "0", - "PreviousTxnID": "E1260EC17725167D0407F73F6B73D7DAF1E3037249B54FC37F2E8B836703AB95", # noqa: mock + "PreviousTxnID": "E1260EC17725167D0407F73F6B73D7DAF1E3037249B54FC37F2E8B836703AB95", # noqa: mock "PreviousTxnLgrSeq": 89077268, - "index": "E1C84325F137AD05CB78F59968054BCBFD43CB4E70F7591B6C3C1D1C7E44C6FC", # noqa: mock + "index": "E1C84325F137AD05CB78F59968054BCBFD43CB4E70F7591B6C3C1D1C7E44C6FC", # noqa: mock }, { - "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock - "BookDirectory": "5C8970D155D65DB8FF49B291D7EFFA4A09F9E8A68D9974B25A07B2FFFC6A7DA8", # noqa: mock + "Account": "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", # noqa: mock + "BookDirectory": "5C8970D155D65DB8FF49B291D7EFFA4A09F9E8A68D9974B25A07B2FFFC6A7DA8", # noqa: mock "BookNode": "0", "Flags": 131072, "LedgerEntryType": "Offer", "OwnerNode": "0", - "PreviousTxnID": "819FF36C6F44F3F858B25580F1E3A900F56DCC59F2398626DB35796AF9E47E7A", # noqa: mock + "PreviousTxnID": "819FF36C6F44F3F858B25580F1E3A900F56DCC59F2398626DB35796AF9E47E7A", # noqa: mock "PreviousTxnLgrSeq": 89078756, "Sequence": 84439999, "TakerGets": { - "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock - "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock + "currency": "534F4C4F00000000000000000000000000000000", # noqa: mock + "issuer": "rsoLo2S1kiGeCcn6hCUXVrCpGMWLrRrLZz", # noqa: mock "value": "2.307186473918109", }, "TakerPays": "499999", - "index": "ECF76E93DBD7923D0B352A7719E5F9BBF6A43D5BA80173495B0403C646184301", # noqa: mock + "index": "ECF76E93DBD7923D0B352A7719E5F9BBF6A43D5BA80173495B0403C646184301", # noqa: mock }, ], - "ledger_hash": "5A76A3A3D115DBC7CE0E4D9868D1EA15F593C8D74FCDF1C0153ED003B5621671", # noqa: mock + "ledger_hash": "5A76A3A3D115DBC7CE0E4D9868D1EA15F593C8D74FCDF1C0153ED003B5621671", # noqa: mock "ledger_index": 89078774, "limit": 200, "validated": True, @@ -876,21 +959,25 @@ def test_get_new_order_book_successful(self): self.assertEqual(0.22452700389932698, asks[0].price) self.assertEqual(91.846106, asks[0].amount) + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._verify_transaction_result") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_sign") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_submit") @patch("hummingbot.connector.client_order_tracker.ClientOrderTracker.process_order_update") - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") def test_place_limit_order( self, - network_mock, process_order_update_mock, submit_mock, sign_mock, autofill_mock, verify_transaction_result_mock, + mock_async_websocket_client ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + autofill_mock.return_value = {} verify_transaction_result_mock.return_value = True, {} sign_mock.return_value = Transaction( @@ -963,28 +1050,31 @@ def test_place_limit_order( self.assertEqual(order_id.split("-")[0], "hbot") - self.assertTrue(network_mock.called) self.assertTrue(process_order_update_mock.called) self.assertTrue(verify_transaction_result_mock.called) self.assertTrue(submit_mock.called) self.assertTrue(autofill_mock.called) self.assertTrue(sign_mock.called) + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._verify_transaction_result") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_sign") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_submit") @patch("hummingbot.connector.client_order_tracker.ClientOrderTracker.process_order_update") - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") def test_place_market_order( self, - network_mock, process_order_update_mock, submit_mock, sign_mock, autofill_mock, verify_transaction_result_mock, + mock_async_websocket_client ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + autofill_mock.return_value = {} verify_transaction_result_mock.return_value = True, {} sign_mock.return_value = Transaction( @@ -995,6 +1085,21 @@ def test_place_market_order( status=ResponseStatus.SUCCESS, result={"engine_result": "tesSUCCESS", "engine_result_message": "something"} ) + class MockGetPriceReturn: + def __init__(self, result_price): + self.result_price = result_price + + # get_price_for_volume_mock.return_value = Decimal("1") + self.connector.order_books[self.trading_pair] = MagicMock() + self.connector.order_books[self.trading_pair].get_price_for_volume = MagicMock( + return_value=MockGetPriceReturn(result_price=Decimal("1")) + ) + + self.connector.order_books[self.trading_pair_usd] = MagicMock() + self.connector.order_books[self.trading_pair_usd].get_price_for_volume = MagicMock( + return_value=MockGetPriceReturn(result_price=Decimal("1")) + ) + self.async_run_with_timeout( self.connector._place_order( "hbot", self.trading_pair, Decimal("1"), TradeType.BUY, OrderType.MARKET, Decimal("1") @@ -1037,7 +1142,6 @@ def test_place_market_order( self.assertEqual(order_id.split("-")[0], "hbot") - self.assertTrue(network_mock.called) self.assertTrue(process_order_update_mock.called) self.assertTrue(verify_transaction_result_mock.called) self.assertTrue(submit_mock.called) @@ -1045,8 +1149,8 @@ def test_place_market_order( self.assertTrue(sign_mock.called) @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.autofill", new_callable=MagicMock) - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.submit", new_callable=MagicMock) - def test_place_order_exception_handling_not_found_market(self, submit_mock, autofill_mock): + # @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.submit", new_callable=MagicMock) + def test_place_order_exception_handling_not_found_market(self, autofill_mock): with self.assertRaises(Exception) as context: self.async_run_with_timeout( self.connector._place_order( @@ -1062,12 +1166,14 @@ def test_place_order_exception_handling_not_found_market(self, submit_mock, auto # Verify the exception was raised and contains the expected message self.assertTrue("Market NOT_FOUND not found in markets list" in str(context.exception)) - # Ensure the submit method was not called due to the exception in autofill - submit_mock.assert_not_called() - + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.autofill", new_callable=MagicMock) - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.submit", new_callable=MagicMock) - def test_place_order_exception_handling_autofill(self, submit_mock, autofill_mock): + # @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.submit", new_callable=MagicMock) + def test_place_order_exception_handling_autofill(self, autofill_mock, mock_async_websocket_client): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + # Simulate an exception during the autofill operation autofill_mock.side_effect = Exception("Test exception during autofill") @@ -1088,9 +1194,7 @@ def test_place_order_exception_handling_autofill(self, submit_mock, autofill_moc "Order None (test_order) creation failed: Test exception during autofill" in str(context.exception) ) - # Ensure the submit method was not called due to the exception in autofill - submit_mock.assert_not_called() - + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange_py_base.ExchangePyBase._sleep") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._verify_transaction_result") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @@ -1107,7 +1211,12 @@ def test_place_order_exception_handling_failed_verify( autofill_mock, verify_transaction_result_mock, sleep_mock, + mock_async_websocket_client ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + autofill_mock.return_value = {} verify_transaction_result_mock.return_value = False, {} sign_mock.return_value = Transaction( @@ -1136,6 +1245,7 @@ def test_place_order_exception_handling_failed_verify( in str(context.exception) ) + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange_py_base.ExchangePyBase._sleep") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._verify_transaction_result") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @@ -1152,7 +1262,12 @@ def test_place_order_exception_handling_none_verify_resp( autofill_mock, verify_transaction_result_mock, sleep_mock, + mock_async_websocket_client ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + autofill_mock.return_value = {} verify_transaction_result_mock.return_value = False, None sign_mock.return_value = Transaction( @@ -1178,6 +1293,7 @@ def test_place_order_exception_handling_none_verify_resp( # # Verify the exception was raised and contains the expected message self.assertTrue("Order 1-1 (hbot) creation failed: Failed to place order hbot (1-1)" in str(context.exception)) + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange_py_base.ExchangePyBase._sleep") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._verify_transaction_result") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @@ -1194,7 +1310,12 @@ def test_place_order_exception_handling_failed_submit( autofill_mock, verify_transaction_result_mock, sleep_mock, + mock_async_websocket_client ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + autofill_mock.return_value = {} verify_transaction_result_mock.return_value = False, None sign_mock.return_value = Transaction( @@ -1220,17 +1341,21 @@ def test_place_order_exception_handling_failed_submit( # # Verify the exception was raised and contains the expected message self.assertTrue("Order 1-1 (hbot) creation failed: Failed to place order hbot (1-1)" in str(context.exception)) + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_sign") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_submit") - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") def test_place_cancel( self, - network_mock, submit_mock, sign_mock, autofill_mock, + mock_async_websocket_client, ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + autofill_mock.return_value = {} sign_mock.return_value = Transaction( sequence=1, last_ledger_sequence=1, account="r1234", transaction_type=TransactionType.OFFER_CREATE @@ -1251,17 +1376,16 @@ def test_place_cancel( ) self.async_run_with_timeout(self.connector._place_cancel("hbot", tracked_order=in_flight_order)) - self.assertTrue(network_mock.called) self.assertTrue(submit_mock.called) self.assertTrue(autofill_mock.called) self.assertTrue(sign_mock.called) + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._verify_transaction_result") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_sign") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_submit") @patch("hummingbot.connector.client_order_tracker.ClientOrderTracker.process_order_update") - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") @patch("hummingbot.connector.client_order_tracker.ClientOrderTracker.process_trade_update") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.process_trade_fills") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._request_order_status") @@ -1270,13 +1394,17 @@ def test_place_order_and_process_update( request_order_status_mock, process_trade_fills_mock, process_trade_update_mock, - network_mock, process_order_update_mock, submit_mock, sign_mock, autofill_mock, verify_transaction_result_mock, + mock_async_websocket_client, ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + request_order_status_mock.return_value = OrderUpdate( trading_pair=self.trading_pair, new_state=OrderState.FILLED, @@ -1307,7 +1435,6 @@ def test_place_order_and_process_update( exchange_order_id = self.async_run_with_timeout( self.connector._place_order_and_process_update(order=in_flight_order) ) - self.assertTrue(network_mock.called) self.assertTrue(submit_mock.called) self.assertTrue(autofill_mock.called) self.assertTrue(sign_mock.called) @@ -1316,6 +1443,7 @@ def test_place_order_and_process_update( self.assertTrue(process_trade_fills_mock.called) self.assertEqual("1-1", exchange_order_id) + @patch('hummingbot.connector.exchange.xrpl.xrpl_exchange.AsyncWebsocketClient') @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._verify_transaction_result") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_autofill") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.tx_sign") @@ -1332,7 +1460,12 @@ def test_execute_order_cancel_and_process_update( sign_mock, autofill_mock, verify_transaction_result_mock, + mock_async_websocket_client, ): + # Create a mock client to be returned by the context manager + mock_client = AsyncMock() + mock_async_websocket_client.return_value.__aenter__.return_value = mock_client + request_order_status_mock.return_value = OrderUpdate( trading_pair=self.trading_pair, new_state=OrderState.FILLED, @@ -1365,10 +1498,18 @@ def test_execute_order_cancel_and_process_update( result = self.async_run_with_timeout( self.connector._execute_order_cancel_and_process_update(order=in_flight_order) ) - self.assertTrue(network_mock.called) - self.assertTrue(submit_mock.called) - self.assertTrue(autofill_mock.called) - self.assertTrue(sign_mock.called) + self.assertTrue(process_order_update_mock.called) + self.assertTrue(result) + + request_order_status_mock.return_value = OrderUpdate( + trading_pair=self.trading_pair, + new_state=OrderState.OPEN, + update_timestamp=1, + ) + + result = self.async_run_with_timeout( + self.connector._execute_order_cancel_and_process_update(order=in_flight_order) + ) self.assertTrue(process_order_update_mock.called) self.assertTrue(result) @@ -1509,37 +1650,36 @@ def side_effect_function(arg: Request): return self._client_response_account_info() elif arg.method == RequestMethod.ACCOUNT_OBJECTS: return self._client_response_account_objects() + elif arg.method == RequestMethod.ACCOUNT_LINES: + return self._client_response_account_lines() else: raise ValueError("Invalid method") - self.connector._xrpl_client.request.side_effect = side_effect_function + self.connector._xrpl_query_client.request.side_effect = side_effect_function self.async_run_with_timeout(self.connector._update_balances()) - self.assertTrue(network_mock.called) self.assertTrue(get_account_mock.called) self.assertEqual(self.connector._account_balances["XRP"], Decimal("57.030864")) self.assertEqual(self.connector._account_balances["USD"], Decimal("0.011094399237562")) - self.assertEqual(self.connector._account_balances["SOLO"], Decimal("47.21480375660969")) + self.assertEqual(self.connector._account_balances["SOLO"], Decimal("35.95165691730148")) self.assertEqual(self.connector._account_available_balances["XRP"], Decimal("32.030868")) self.assertEqual(self.connector._account_available_balances["USD"], Decimal("0.011094399237562")) - self.assertEqual(self.connector._account_available_balances["SOLO"], Decimal("42.601122687963971")) + self.assertEqual(self.connector._account_available_balances["SOLO"], Decimal("31.337975848655761")) - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") - def test_make_trading_rules_request(self, network_mock): + def test_make_trading_rules_request(self): def side_effect_function(arg: Request): if arg.method == RequestMethod.ACCOUNT_INFO: return self._client_response_account_info_issuer() else: raise ValueError("Invalid method") - self.connector._xrpl_client.request.side_effect = side_effect_function + self.connector._xrpl_query_client.request.side_effect = side_effect_function result = self.async_run_with_timeout(self.connector._make_trading_rules_request()) - self.assertTrue(network_mock.called) self.assertEqual( result["SOLO-XRP"]["base_currency"].currency, "534F4C4F00000000000000000000000000000000" ) # noqa: mock @@ -1557,7 +1697,6 @@ def side_effect_function(arg: Request): Decimal("9.99999999999999954748111825886258685613938723690807819366455078125E-7"), # noqa: mock ) - self.assertTrue(network_mock.called) self.assertEqual( result["SOLO-USD"]["base_currency"].currency, "534F4C4F00000000000000000000000000000000" # noqa: mock ) @@ -1598,13 +1737,9 @@ def test_verify_transaction_exception(self, network_check_mock, wait_for_outcome "ERROR:hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange:Submitted transaction failed: Test exception", ) - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.wait_for_final_transaction_outcome") - @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") - def test_verify_transaction_exception_none_transaction(self, network_check_mock, wait_for_outcome_mock): - wait_for_outcome_mock.side_effect = Exception("Test exception") - + def test_verify_transaction_exception_none_transaction(self): with self.assertLogs(level="ERROR") as log: - result, response = self.async_run_with_timeout( + self.async_run_with_timeout( self.connector._verify_transaction_result({"transaction": None, "prelim_result": "tesSUCCESS"}) ) @@ -1614,6 +1749,47 @@ def test_verify_transaction_exception_none_transaction(self, network_check_mock, "ERROR:hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange:Failed to verify transaction result, transaction is None", ) + self.connector.wait_for_final_transaction_outcome = AsyncMock() + self.connector.wait_for_final_transaction_outcome.side_effect = TimeoutError + self.connector._sleep = AsyncMock() + + with self.assertLogs(level="ERROR") as log: + self.async_run_with_timeout( + self.connector._verify_transaction_result( + { + "transaction": Transaction( + account="r1234", transaction_type=TransactionType.ACCOUNT_SET + ), # noqa: mock + "prelim_result": "tesSUCCESS", + } + ) + ) + + log_output = log.output[0] + self.assertEqual( + log_output, + "ERROR:hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange:Max retries reached. Verify transaction failed due to timeout.", + ) + + with self.assertLogs(level="ERROR") as log: + self.async_run_with_timeout( + self.connector._verify_transaction_result( + { + "transaction": Transaction( + account="r1234", transaction_type=TransactionType.ACCOUNT_SET + ), # noqa: mock + "prelim_result": "tesSUCCESS", + }, + try_count=CONSTANTS.VERIFY_TRANSACTION_MAX_RETRY, + ) + ) + + log_output = log.output[0] + self.assertEqual( + log_output, + "ERROR:hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange:Max retries reached. Verify transaction failed due to timeout.", + ) + @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.wait_for_final_transaction_outcome") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") def test_verify_transaction_exception_none_prelim(self, network_check_mock, wait_for_outcome_mock): @@ -2175,6 +2351,20 @@ def test_request_order_status(self, fetch_account_transactions_mock, network_che order_update = self.async_run_with_timeout(self.connector._request_order_status(in_flight_order)) self.assertEqual(order_update.new_state, OrderState.PENDING_CREATE) + in_flight_order = InFlightOrder( + client_order_id="hbot-1719868942218900-SSOXP61c36315c76a2aa2eb3bb461924f46f4336f2", # noqa: mock + trading_pair="SOLO-XRP", + order_type=OrderType.LIMIT, + trade_type=TradeType.SELL, + price=Decimal("0.217090"), + amount=Decimal("2.303184724670496"), + creation_timestamp=time.time(), + ) + + in_flight_order.current_state = OrderState.PENDING_CREATE + order_update = self.async_run_with_timeout(self.connector._request_order_status(in_flight_order)) + self.assertEqual(order_update.new_state, OrderState.PENDING_CREATE) + @patch("hummingbot.connector.exchange.xrpl.xrpl_auth.XRPLAuth.get_account") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._make_network_check_request") @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange._fetch_account_transactions") @@ -2435,7 +2625,7 @@ def test_get_trade_fills(self, fetch_account_transactions_mock, network_check_mo self.assertEqual(len(trade_fills), 1) self.assertEqual( - trade_fills[0].trade_id, "1B74D0FE8F6CBAC807D3C7137D4C265F49CBC30B3EC2FEB8F94CD0EB39162F41" # noqa: mock + trade_fills[0].trade_id, "1B74D0FE8F6CBAC807D3C7137D4C265F49CBC30B3EC2FEB8F94CD0EB39162F41" # noqa: mock ) # noqa: mock self.assertEqual( trade_fills[0].client_order_id, @@ -2448,7 +2638,8 @@ def test_get_trade_fills(self, fetch_account_transactions_mock, network_check_mo self.assertEqual(trade_fills[0].fill_base_amount, Decimal("5.619196007179491")) self.assertEqual(trade_fills[0].fill_quote_amount, Decimal("1.249995")) self.assertEqual( - trade_fills[0].fee.percent, Decimal("0.01000000000000000020816681711721685132943093776702880859375") # noqa: mock + trade_fills[0].fee.percent, + Decimal("0.01000000000000000020816681711721685132943093776702880859375"), # noqa: mock ) self.assertEqual(trade_fills[0].fee.percent_token, "XRP") self.assertEqual(trade_fills[0].fee.flat_fees, []) @@ -2708,10 +2899,12 @@ def test_get_trade_fills(self, fetch_account_transactions_mock, network_check_mo trade_fills = self.async_run_with_timeout(self.connector._all_trade_updates_for_order(in_flight_order)) self.assertEqual(len(trade_fills), 1) - self.assertEqual(trade_fills[0].trade_id, "1B74D0FE8F6CBAC807D3C7137D4C265F49CBC30B3EC2FEB8F94CD0EB39162F41") # noqa: mock + self.assertEqual( + trade_fills[0].trade_id, "1B74D0FE8F6CBAC807D3C7137D4C265F49CBC30B3EC2FEB8F94CD0EB39162F41" # noqa: mock + ) self.assertEqual( trade_fills[0].client_order_id, - "hbot-1718906078435341-BSOXP61b56023518294a8eb046fb3701345edf3cf5", # noqa: mock + "hbot-1718906078435341-BSOXP61b56023518294a8eb046fb3701345edf3cf5", # noqa: mock ) self.assertEqual(trade_fills[0].exchange_order_id, "84436571-88824981") self.assertEqual(trade_fills[0].trading_pair, "SOLO-XRP") @@ -2719,3 +2912,45 @@ def test_get_trade_fills(self, fetch_account_transactions_mock, network_check_mo self.assertEqual(trade_fills[0].fill_price, Decimal("4.417734611892777801348826549")) self.assertEqual(trade_fills[0].fill_base_amount, Decimal("306.599028007179491")) self.assertEqual(trade_fills[0].fill_quote_amount, Decimal("1354.473138")) + + @patch("hummingbot.connector.exchange.xrpl.xrpl_auth.XRPLAuth.get_account") + @patch("hummingbot.connector.exchange.xrpl.xrpl_exchange.XrplExchange.request_with_retry") + def test_fetch_account_transactions(self, request_with_retry_mock, get_account_mock): + + get_account_mock.return_value = "r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK" # noqa: mock + request_with_retry_mock.return_value = Response( + status=ResponseStatus.SUCCESS, + result={"transactions": ["something"]}, + id="account_info_644216", + type=ResponseType.RESPONSE, + ) + + txs = self.async_run_with_timeout(self.connector._fetch_account_transactions(ledger_index=88824981)) + self.assertEqual(len(txs), 1) + + def test_tx_submit(self): + mock_client = AsyncMock() + mock_client._request_impl.return_value = Response( + status=ResponseStatus.SUCCESS, + result={"transactions": ["something"]}, + id="something_1234", + type=ResponseType.RESPONSE, + ) + + some_tx = OfferCancel(account="r2XdzWFVoHGfGVmXugtKhxMu3bqhsYiWK", offer_sequence=88824981) + + resp = self.async_run_with_timeout(self.connector.tx_submit(some_tx, mock_client)) + self.assertEqual(resp.status, ResponseStatus.SUCCESS) + + # check if there is exception if response status is not success + mock_client._request_impl.return_value = Response( + status=ResponseStatus.ERROR, + result={"error": "something"}, + id="something_1234", + type=ResponseType.RESPONSE, + ) + + with self.assertRaises(XRPLRequestFailureException) as context: + self.async_run_with_timeout(self.connector.tx_submit(some_tx, mock_client)) + + self.assertTrue("something" in str(context.exception)) diff --git a/test/hummingbot/connector/exchange/xrpl/test_xrpl_utils.py b/test/hummingbot/connector/exchange/xrpl/test_xrpl_utils.py index f14051f07d..b2d7593cb5 100644 --- a/test/hummingbot/connector/exchange/xrpl/test_xrpl_utils.py +++ b/test/hummingbot/connector/exchange/xrpl/test_xrpl_utils.py @@ -1,9 +1,38 @@ +import asyncio import unittest +from typing import Awaitable +from unittest.mock import AsyncMock -from hummingbot.connector.exchange.xrpl.xrpl_utils import XRPLConfigMap, compute_order_book_changes +from xrpl.asyncio.clients import XRPLRequestFailureException +from xrpl.asyncio.transaction import XRPLReliableSubmissionException +from xrpl.models import OfferCancel, Response +from xrpl.models.response import ResponseStatus +from hummingbot.connector.exchange.xrpl import xrpl_constants as CONSTANTS +from hummingbot.connector.exchange.xrpl.xrpl_utils import ( + XRPLConfigMap, + _wait_for_final_transaction_outcome, + autofill, + compute_order_book_changes, +) -class TestGetOfferChange(unittest.TestCase): + +class TestXRPLUtils(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + + def setUp(self) -> None: + super().setUp() + + def tearDown(self) -> None: + super().tearDown() + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 5): + ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret def _event_message_limit_order_partially_filled(self): resp = { @@ -242,3 +271,101 @@ def test_validate_wss_second_node_url_invalid(self): with self.assertRaises(ValueError) as context: XRPLConfigMap.validate_wss_second_node_url(invalid_url) self.assertIn("Invalid node url", str(context.exception)) + + def test_auto_fill(self): + client = AsyncMock() + + request = OfferCancel( + account="rsoLoDTcxn9wCEHHBR7enMhzQMThkB2w28", # noqa: mock + offer_sequence=69870875, + ) + + client.network_id = None + client.build_version = None + client._request_impl.return_value = Response( + status=ResponseStatus.SUCCESS, + result={ + "info": {"network_id": 1026, "build_version": "1.11.1"}, + "account_data": {"Sequence": 99999911}, + "drops": { + "open_ledger_fee": "10", + "minimum_fee": "10", + }, + "ledger_index": 99999221, + }, + ) + + filled_request = self.async_run_with_timeout(autofill(request, client)) + + self.assertIsInstance(filled_request, OfferCancel) + self.assertEqual(filled_request.fee, str(10 * CONSTANTS.FEE_MULTIPLIER)) + self.assertEqual(filled_request.last_ledger_sequence, 99999221 + 20) + self.assertEqual(filled_request.network_id, 1026) + + client._request_impl.side_effect = Exception("Error") + + with self.assertRaises(Exception): + self.async_run_with_timeout(autofill(request, client)) + + def test_wait_for_final_transaction_outcome(self): + client = AsyncMock() + client.network_id = None + client.build_version = None + client._request_impl.return_value = Response( + status=ResponseStatus.SUCCESS, + result={ + "ledger_index": 99999221, + "validated": True, + "meta": { + "TransactionResult": "tesSUCCESS", + }, + }, + ) + + with self.assertRaises(XRPLReliableSubmissionException): + self.async_run_with_timeout( + _wait_for_final_transaction_outcome("transaction_hash", client, "something", 12345) + ) + + with self.assertRaises(XRPLRequestFailureException): + client._request_impl.return_value = Response( + status=ResponseStatus.ERROR, + result={"error": "something happened"}, + ) + self.async_run_with_timeout( + _wait_for_final_transaction_outcome("transaction_hash", client, "something", 12345) + ) + + with self.assertRaises(XRPLReliableSubmissionException): + client._request_impl.return_value = Response( + status=ResponseStatus.SUCCESS, + result={ + "ledger_index": 99999221, + "validated": True, + "meta": { + "TransactionResult": "tecKilled", + }, + }, + ) + self.async_run_with_timeout( + _wait_for_final_transaction_outcome("transaction_hash", client, "something", 12345) + ) + + client._request_impl.return_value = Response( + status=ResponseStatus.SUCCESS, + result={ + "ledger_index": 99999221, + "validated": True, + "meta": { + "TransactionResult": "tesSUCCESS", + }, + }, + ) + + response = self.async_run_with_timeout( + _wait_for_final_transaction_outcome("transaction_hash", client, "something", 1234500000) + ) + + self.assertEqual(response.result["ledger_index"], 99999221) + self.assertEqual(response.result["validated"], True) + self.assertEqual(response.result["meta"]["TransactionResult"], "tesSUCCESS") diff --git a/test/hummingbot/connector/gateway/clob_spot/data_sources/injective/test_injective_utils.py b/test/hummingbot/connector/gateway/clob_spot/data_sources/injective/test_injective_utils.py index db1ddca814..49ce9f5766 100644 --- a/test/hummingbot/connector/gateway/clob_spot/data_sources/injective/test_injective_utils.py +++ b/test/hummingbot/connector/gateway/clob_spot/data_sources/injective/test_injective_utils.py @@ -28,6 +28,7 @@ def test_derivative_quantity_to_backend_utility_method(self): quote=6, min_price_tick_size=1000, min_quantity_tick_size=100, + min_notional=0, ) backend_quantity = derivative_quantity_to_backend(quantity=Decimal("1"), denom=denom) @@ -41,6 +42,7 @@ def test_derivative_price_to_backend_utility_method(self): quote=6, min_price_tick_size=1000, min_quantity_tick_size=100, + min_notional=0, ) backend_quantity = derivative_price_to_backend(price=Decimal("123.45"), denom=denom) diff --git a/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py b/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py index 8b2f7c8d1d..8a9c971f11 100644 --- a/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py +++ b/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py @@ -502,7 +502,7 @@ def test_create_buy_limit_order_successfully(self): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name} order {order_id} for " - f"{Decimal('100.000')} {self.trading_pair}." + f"{Decimal('100.000')} {self.trading_pair} at {Decimal('10000.00000')}." ) ) @@ -544,7 +544,7 @@ def test_create_sell_limit_order_successfully(self): self.is_logged( "INFO", f"Created {OrderType.LIMIT.name} {TradeType.SELL.name} order {order_id} for " - f"{Decimal('100.000')} {self.trading_pair}." + f"{Decimal('100.000')} {self.trading_pair} at {Decimal('10000.00000')}." ) ) @@ -845,7 +845,7 @@ def test_batch_order_create(self): "INFO", f"Created {OrderType.LIMIT.name} {TradeType.BUY.name}" f" order {buy_order_to_create_in_flight.client_order_id} for " - f"{buy_create_event.amount} {self.trading_pair}." + f"{buy_create_event.amount} {self.trading_pair} at {buy_create_event.price}." ) ) diff --git a/test/hummingbot/connector/test_client_order_tracker.py b/test/hummingbot/connector/test_client_order_tracker.py index 686ffd5cc6..f006dbc27b 100644 --- a/test/hummingbot/connector/test_client_order_tracker.py +++ b/test/hummingbot/connector/test_client_order_tracker.py @@ -343,7 +343,7 @@ def test_process_order_update_trigger_order_creation_event(self): self._is_logged( "INFO", f"Created {order.order_type.name} {order.trade_type.name} order {order.client_order_id} for " - f"{order.amount} {order.trading_pair}.", + f"{order.amount} {order.trading_pair} at {order.price}.", ) ) @@ -394,7 +394,7 @@ def test_process_order_update_trigger_order_creation_event_without_client_order_ self._is_logged( "INFO", f"Created {order.order_type.name} {order.trade_type.name} order {order.client_order_id} for " - f"{order.amount} {order.trading_pair}.", + f"{order.amount} {order.trading_pair} at {order.price}.", ) ) @@ -638,7 +638,7 @@ def test_process_trade_update_trigger_filled_event_flat_fee(self): self._is_logged( "INFO", f"The {order.trade_type.name.upper()} order {order.client_order_id} amounting to " - f"{trade_filled_amount}/{order.amount} {order.base_asset} has been filled.", + f"{trade_filled_amount}/{order.amount} {order.base_asset} has been filled at {trade_filled_price} {order.quote_asset}.", ) ) @@ -690,7 +690,7 @@ def test_process_trade_update_does_not_trigger_filled_event_update_status_when_c self._is_logged( "INFO", f"The {order.trade_type.name.upper()} order {order.client_order_id} amounting to " - f"{order.amount}/{order.amount} {order.base_asset} has been filled.", + f"{order.amount}/{order.amount} {order.base_asset} has been filled at {order.price} {order.quote_asset}.", ) ) diff --git a/test/hummingbot/core/mock_api/test_mock_web_server.py b/test/hummingbot/core/mock_api/test_mock_web_server.py deleted file mode 100644 index 8bffc5d8cd..0000000000 --- a/test/hummingbot/core/mock_api/test_mock_web_server.py +++ /dev/null @@ -1,73 +0,0 @@ -import asyncio -from aiohttp import ClientSession -import unittest.mock -import requests -import json -from hummingbot.core.mock_api.mock_web_server import MockWebServer - - -class MockWebServerTest(unittest.TestCase): - @classmethod - def setUpClass(cls) -> None: - cls.ev_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() - cls.web_app: MockWebServer = MockWebServer.get_instance() - cls.host = "www.google.com" - cls.web_app.add_host_to_mock(cls.host) - cls.web_app.start() - cls.ev_loop.run_until_complete(cls.web_app.wait_til_started()) - cls._patcher = unittest.mock.patch("aiohttp.client.URL") - cls._url_mock = cls._patcher.start() - cls._url_mock.side_effect = MockWebServer.reroute_local - - cls._req_patcher = unittest.mock.patch.object(requests.Session, "request", autospec=True) - cls._req_url_mock = cls._req_patcher.start() - cls._req_url_mock.side_effect = MockWebServer.reroute_request - - @classmethod - def tearDownClass(cls) -> None: - cls.web_app.stop() - cls._patcher.stop() - cls._req_patcher.stop() - - async def _test_web_app_response(self): - self.web_app.clear_responses() - self.web_app.update_response("get", self.host, "/", data=self.web_app.TEST_RESPONSE, is_json=False) - async with ClientSession() as client: - async with client.get("http://www.google.com/") as resp: - text: str = await resp.text() - print(text) - self.assertEqual(self.web_app.TEST_RESPONSE, text) - - def test_web_app_response(self): - self.ev_loop.run_until_complete(asyncio.wait_for(self._test_web_app_response(), 20)) - - def test_get_request_response(self): - self.web_app.clear_responses() - self.web_app.update_response("get", self.host, "/", data=self.web_app.TEST_RESPONSE, is_json=False) - r = requests.get("http://www.google.com/") - self.assertEqual(self.web_app.TEST_RESPONSE, r.text) - - def test_update_response(self): - self.web_app.clear_responses() - self.web_app.update_response('get', 'www.google.com', '/', {"a": 1, "b": 2}) - r = requests.get("http://www.google.com/") - r_json = json.loads(r.text) - self.assertEqual(r_json["a"], 1) - - self.web_app.update_response('post', 'www.google.com', '/', "default") - self.web_app.update_response('post', 'www.google.com', '/', {"a": 1, "b": 2}, params={"para_a": '11'}) - r = requests.post("http://www.google.com/", data={"para_a": 11, "para_b": 22}) - r_json = json.loads(r.text) - self.assertEqual(r_json["a"], 1) - - def test_query_string(self): - self.web_app.clear_responses() - self.web_app.update_response('get', 'www.google.com', '/', "default") - self.web_app.update_response('get', 'www.google.com', '/', {"a": 1}, params={"qs1": "1"}) - r = requests.get("http://www.google.com/?qs1=1") - r_json = json.loads(r.text) - self.assertEqual(r_json["a"], 1) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/hummingbot/core/mock_api/test_mock_web_socket_server.py b/test/hummingbot/core/mock_api/test_mock_web_socket_server.py deleted file mode 100644 index 972e967541..0000000000 --- a/test/hummingbot/core/mock_api/test_mock_web_socket_server.py +++ /dev/null @@ -1,59 +0,0 @@ -import aiohttp -from aiounittest import async_test -import asyncio -import unittest.mock -import json - -from hummingbot.core.mock_api.mock_web_socket_server import MockWebSocketServerFactory - -ev_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() - - -class MockWebSocketServerFactoryTest(unittest.TestCase): - @classmethod - def setUpClass(cls) -> None: - cls.ws_server = MockWebSocketServerFactory.start_new_server("wss://www.google.com/ws/") - cls._patcher = unittest.mock.patch("aiohttp.client.ClientSession.ws_connect", autospec=True) - cls._mock = cls._patcher.start() - cls._mock.side_effect = MockWebSocketServerFactory.reroute_ws_connect - # need to wait a bit for the server to be available - ev_loop.run_until_complete(asyncio.wait_for(cls.ws_server.wait_til_started(), 1)) - - @classmethod - def tearDownClass(cls) -> None: - cls.ws_server.stop() - cls._patcher.stop() - - @async_test(loop=ev_loop) - async def test_web_socket(self): - uri = "wss://www.google.com/ws/" - - # Retry up to 3 times if there is any error connecting to the mock server address and port - async with aiohttp.ClientSession() as client: - for retry_attempt in range(3): - try: - async with client.ws_connect(uri) as websocket: - await MockWebSocketServerFactory.send_str(uri, "aaa") - answer = await websocket.receive_str() - self.assertEqual("aaa", answer) - - await MockWebSocketServerFactory.send_json(uri, data={"foo": "bar"}) - answer = await websocket.receive_str() - answer = json.loads(answer) - self.assertEqual(answer["foo"], "bar") - - await self.ws_server.websocket.send_str("xxx") - answer = await websocket.receive_str() - self.assertEqual("xxx", answer) - except OSError: - if retry_attempt == 2: - raise - # Continue retrying - continue - - # Stop the retries cycle - break - - -if __name__ == '__main__': - unittest.main() diff --git a/test/hummingbot/core/test_clock.py b/test/hummingbot/core/test_clock.py index 0d44d74743..66cbec1a4a 100644 --- a/test/hummingbot/core/test_clock.py +++ b/test/hummingbot/core/test_clock.py @@ -1,12 +1,10 @@ -import unittest import asyncio -import pandas as pd import time +import unittest + +import pandas as pd -from hummingbot.core.clock import ( - Clock, - ClockMode -) +from hummingbot.core.clock import Clock, ClockMode from hummingbot.core.time_iterator import TimeIterator @@ -19,7 +17,7 @@ class ClockUnitTest(unittest.TestCase): @classmethod def setUpClass(cls): super().setUpClass() - cls.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() + cls.ev_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() def setUp(self): super().setUp() diff --git a/test/hummingbot/core/test_network_base.py b/test/hummingbot/core/test_network_base.py index ff8adfb769..c45b808222 100644 --- a/test/hummingbot/core/test_network_base.py +++ b/test/hummingbot/core/test_network_base.py @@ -1,16 +1,18 @@ import asyncio +from test.isolated_asyncio_wrapper_test_case import IsolatedAsyncioWrapperTestCase +from unittest.mock import patch + from hummingbot.core.network_base import NetworkBase from hummingbot.core.network_iterator import NetworkStatus -import unittest class SampleNetwork(NetworkBase): async def check_network(self) -> NetworkStatus: - "Override check_network to always return connected" + """Override check_network to always return connected""" return NetworkStatus.CONNECTED -class NetworkBaseTest(unittest.TestCase): +class NetworkBaseTest(IsolatedAsyncioWrapperTestCase): def test_init(self): """ This lets us know if the initial values have changed and extends @@ -35,7 +37,7 @@ def test_init(self): nb.check_network_timeout = 45.0 self.assertEqual(nb.check_network_timeout, 45.0) - def test_network(self): + async def test_network(self): """ NetworkBase has a couple of method sketches that do not do anything but are used by child classes. @@ -43,17 +45,14 @@ def test_network(self): nb = NetworkBase() - self.assertEqual(asyncio.get_event_loop().run_until_complete(nb.start_network()), None) - - self.assertEqual(asyncio.get_event_loop().run_until_complete(nb.stop_network()), None) - - self.assertEqual(asyncio.get_event_loop().run_until_complete(nb.check_network()), NetworkStatus.NOT_CONNECTED) + await nb.start_network() + await nb.stop_network() + self.assertEqual(await nb.check_network(), NetworkStatus.NOT_CONNECTED) def test_start_and_stop_network(self): """ Assert that start and stop update the started property. """ - nb = NetworkBase() nb.start() @@ -62,7 +61,7 @@ def test_start_and_stop_network(self): nb.stop() self.assertEqual(nb.started, False) - def test_update_network_status(self): + async def test_update_network_status(self): """ Use SampleNetwork to test that the network status gets updated """ @@ -75,11 +74,41 @@ def test_update_network_status(self): sample.check_network_timeout = 0.1 sample.start() - asyncio.get_event_loop().run_until_complete(asyncio.sleep(0.2)) + await asyncio.sleep(0.2) self.assertEqual(sample.network_status, NetworkStatus.CONNECTED) sample.stop() - asyncio.get_event_loop().run_until_complete(asyncio.sleep(0.2)) + await asyncio.sleep(0.2) self.assertEqual(sample.started, False) + + @patch.object(SampleNetwork, "check_network") + @patch.object(NetworkBase, "_sleep") + async def test_check_network_loop_raises_timeout(self, mock_sleep, mock_check_network): + """ + Test that the check_network_loop raises a timeout error + """ + mock_check_network.side_effect = [asyncio.TimeoutError] + mock_sleep.side_effect = [asyncio.CancelledError] + nb = SampleNetwork() + nb._check_network_timeout = 0.1 + with self.assertRaises(asyncio.CancelledError): + await nb._check_network_loop() + + self.assertEqual(nb.network_status, NetworkStatus.NOT_CONNECTED) + + @patch.object(SampleNetwork, "check_network") + @patch.object(NetworkBase, "_sleep") + async def test_check_network_loop_raises_exception(self, mock_sleep, mock_check_network): + """ + Test that the check_network_loop raises an exception + """ + mock_check_network.side_effect = [Exception] + mock_sleep.side_effect = [asyncio.CancelledError] + nb = SampleNetwork() + nb._check_network_timeout = 0.1 + with self.assertRaises(asyncio.CancelledError): + await nb._check_network_loop() + + self.assertEqual(nb.network_status, NetworkStatus.NOT_CONNECTED) diff --git a/test/hummingbot/core/web_assistant/connections/test_data_types.py b/test/hummingbot/core/web_assistant/connections/test_data_types.py index 99ae60181f..dd3937c0cb 100644 --- a/test/hummingbot/core/web_assistant/connections/test_data_types.py +++ b/test/hummingbot/core/web_assistant/connections/test_data_types.py @@ -6,9 +6,7 @@ import aiohttp from aioresponses import aioresponses -from hummingbot.core.web_assistant.connections.data_types import ( - RESTMethod, RESTResponse, EndpointRESTRequest -) +from hummingbot.core.web_assistant.connections.data_types import EndpointRESTRequest, RESTMethod, RESTResponse class DataTypesTest(unittest.TestCase): @@ -34,7 +32,7 @@ def test_rest_response_properties(self, mocked_api): body_str = json.dumps(body) headers = {"content-type": "application/json"} mocked_api.get(url=url, body=body_str, headers=headers) - aiohttp_response = self.async_run_with_timeout(aiohttp.ClientSession().get(url)) + aiohttp_response = self.async_run_with_timeout(aiohttp.ClientSession(loop=self.ev_loop).get(url)) response = RESTResponse(aiohttp_response) @@ -58,7 +56,7 @@ def test_rest_response_repr(self, mocked_api): body_str = json.dumps(body) headers = {"content-type": "application/json"} mocked_api.get(url=url, body=body_str, headers=headers) - aiohttp_response = self.async_run_with_timeout(aiohttp.ClientSession().get(url)) + aiohttp_response = self.async_run_with_timeout(aiohttp.ClientSession(loop=self.ev_loop).get(url)) response = RESTResponse(aiohttp_response) diff --git a/test/hummingbot/core/web_assistant/connections/test_rest_connection.py b/test/hummingbot/core/web_assistant/connections/test_rest_connection.py index 48b53e3e11..ff3259cab3 100644 --- a/test/hummingbot/core/web_assistant/connections/test_rest_connection.py +++ b/test/hummingbot/core/web_assistant/connections/test_rest_connection.py @@ -6,8 +6,8 @@ import aiohttp from aioresponses import aioresponses -from hummingbot.core.web_assistant.connections.rest_connection import RESTConnection from hummingbot.core.web_assistant.connections.data_types import RESTMethod, RESTRequest, RESTResponse +from hummingbot.core.web_assistant.connections.rest_connection import RESTConnection class RESTConnectionTest(unittest.TestCase): @@ -26,7 +26,7 @@ def test_rest_connection_call(self, mocked_api): resp = {"one": 1} mocked_api.get(url, body=json.dumps(resp).encode()) - client_session = aiohttp.ClientSession() + client_session = aiohttp.ClientSession(loop=self.ev_loop) connection = RESTConnection(client_session) request = RESTRequest(method=RESTMethod.GET, url=url) diff --git a/test/hummingbot/core/web_assistant/connections/test_ws_connection.py b/test/hummingbot/core/web_assistant/connections/test_ws_connection.py index 3c88bf5f01..b14af03879 100644 --- a/test/hummingbot/core/web_assistant/connections/test_ws_connection.py +++ b/test/hummingbot/core/web_assistant/connections/test_ws_connection.py @@ -21,7 +21,7 @@ def setUpClass(cls) -> None: def setUp(self) -> None: super().setUp() self.mocking_assistant = NetworkMockingAssistant() - self.client_session = aiohttp.ClientSession() + self.client_session = aiohttp.ClientSession(loop=self.ev_loop) self.ws_connection = WSConnection(self.client_session) self.async_tasks: List[asyncio.Task] = [] diff --git a/test/hummingbot/core/web_assistant/test_rest_assistant.py b/test/hummingbot/core/web_assistant/test_rest_assistant.py index d37644f288..cd56644320 100644 --- a/test/hummingbot/core/web_assistant/test_rest_assistant.py +++ b/test/hummingbot/core/web_assistant/test_rest_assistant.py @@ -48,7 +48,7 @@ async def post_process(self, response: RESTResponse) -> RESTResponse: pre_processors = [PreProcessor()] post_processors = [PostProcessor()] - connection = RESTConnection(aiohttp.ClientSession()) + connection = RESTConnection(aiohttp.ClientSession(loop=self.ev_loop)) assistant = RESTAssistant( connection=connection, throttler=AsyncThrottler(rate_limits=[]), @@ -85,7 +85,7 @@ async def rest_authenticate(self, request: RESTRequest) -> RESTRequest: async def ws_authenticate(self, request: WSRequest) -> WSRequest: pass - connection = RESTConnection(aiohttp.ClientSession()) + connection = RESTConnection(aiohttp.ClientSession(loop=self.ev_loop)) assistant = RESTAssistant(connection, throttler=AsyncThrottler(rate_limits=[]), auth=AuthDummy()) req = RESTRequest(method=RESTMethod.GET, url=url) auth_req = RESTRequest(method=RESTMethod.GET, url=url, is_auth_required=True) diff --git a/test/hummingbot/core/web_assistant/test_ws_assistant.py b/test/hummingbot/core/web_assistant/test_ws_assistant.py index 9d13e9e978..edd5f924ef 100644 --- a/test/hummingbot/core/web_assistant/test_ws_assistant.py +++ b/test/hummingbot/core/web_assistant/test_ws_assistant.py @@ -15,18 +15,15 @@ class WSAssistantTest(unittest.TestCase): - ev_loop: asyncio.AbstractEventLoop @classmethod def setUpClass(cls) -> None: super().setUpClass() cls.ev_loop = asyncio.get_event_loop() - for task in asyncio.all_tasks(cls.ev_loop): - task.cancel() def setUp(self) -> None: super().setUp() - aiohttp_client_session = aiohttp.ClientSession() + aiohttp_client_session = aiohttp.ClientSession(loop=self.ev_loop) self.ws_connection = WSConnection(aiohttp_client_session) self.ws_assistant = WSAssistant(self.ws_connection) self.mocking_assistant = NetworkMockingAssistant() diff --git a/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/test_ascend_ex_spot_candles.py b/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/test_ascend_ex_spot_candles.py index d68e57b8da..d1efb144d9 100644 --- a/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/test_ascend_ex_spot_candles.py +++ b/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/test_ascend_ex_spot_candles.py @@ -1,18 +1,13 @@ import asyncio -import json -import re -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch - -from aioresponses import aioresponses +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase +import hummingbot.data_feed.candles_feed.okx_spot_candles.constants as CONSTANTS from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles import AscendExSpotCandles, constants as CONSTANTS +from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles import AscendExSpotCandles -class TestAscendExSpotCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestOKXPerpetualCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -23,31 +18,20 @@ def setUpClass(cls) -> None: cls.quote_asset = "USDT" cls.interval = "1h" cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" - cls.ex_trading_pair = cls.base_asset + "/" + cls.quote_asset + cls.ex_trading_pair = f"{cls.base_asset}/{cls.quote_asset}" + cls.max_records = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST def setUp(self) -> None: super().setUp() + self.data_feed = AscendExSpotCandles(trading_pair=self.trading_pair, + interval=self.interval, + max_records=self.max_records) self.mocking_assistant = NetworkMockingAssistant() - self.data_feed = AscendExSpotCandles(trading_pair=self.trading_pair, interval=self.interval) - - self.log_records = [] self.data_feed.logger().setLevel(1) self.data_feed.logger().addHandler(self) - self.resume_test_event = asyncio.Event() - - def handle(self, record): - self.log_records.append(record) - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 2): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret - - def get_candles_rest_data_mock(self): + @staticmethod + def get_candles_rest_data_mock(): data = { "code": 0, "data": [ @@ -69,7 +53,7 @@ def get_candles_rest_data_mock(self): "s": "BTC/USDT", "data": { "i": "1", - "ts": 1688973900000, + "ts": 1688977440000, "o": "30096.84", "c": "30097.88", "h": "30115.67", @@ -82,7 +66,7 @@ def get_candles_rest_data_mock(self): "s": "BTC/USDT", "data": { "i": "1", - "ts": 1688973960000, + "ts": 1688981040000, "o": "30092.53", "c": "30087.11", "h": "30115.97", @@ -95,7 +79,7 @@ def get_candles_rest_data_mock(self): "s": "BTC/USDT", "data": { "i": "1", - "ts": 1688974020000, + "ts": 1688984640000, "o": "30086.51", "c": "30102.34", "h": "30102.34", @@ -108,7 +92,7 @@ def get_candles_rest_data_mock(self): "s": "BTC/USDT", "data": { "i": "1", - "ts": 1688974080000, + "ts": 1688988240000, "o": "30095.93", "c": "30085.25", "h": "30103.04", @@ -120,7 +104,15 @@ def get_candles_rest_data_mock(self): } return data - def get_candles_ws_data_mock_1(self): + def get_fetch_candles_data_mock(self): + return [[1688973840.0, '30105.52', '30099.41', '30115.58', '30098.19', 0, '0.13736', 0, 0, 0], + [1688977440.0, '30096.84', '30115.67', '30096.84', '30097.88', 0, '0.16625', 0, 0, 0], + [1688981040.0, '30092.53', '30115.97', '30087.11', '30087.11', 0, '0.06992', 0, 0, 0], + [1688984640.0, '30086.51', '30102.34', '30082.68', '30102.34', 0, '0.14145', 0, 0, 0], + [1688988240.0, '30095.93', '30103.04', '30077.94', '30085.25', 0, '0.15819', 0, 0, 0],] + + @staticmethod + def get_candles_ws_data_mock_1(): data = { "m": "bar", "s": "BTC/USDT", @@ -136,7 +128,8 @@ def get_candles_ws_data_mock_1(self): } return data - def get_candles_ws_data_mock_2(self): + @staticmethod + def get_candles_ws_data_mock_2(): data = { "m": "bar", "s": "BTC/USDT", @@ -152,160 +145,6 @@ def get_candles_ws_data_mock_2(self): } return data - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - start_time = 1685167200 - end_time = 1685172600 - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - data_mock = self.get_candles_rest_data_mock() - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) - - self.assertEqual(resp.shape[0], len(data_mock['data'])) - self.assertEqual(resp.shape[1], 10) - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "result": None, - "id": 1 - } - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(1, len(sent_subscription_messages)) - expected_kline_subscription = { - "op": CONSTANTS.SUB_ENDPOINT_NAME, - "ch": f"bar:{CONSTANTS.INTERVALS[self.interval]}:{self.ex_trading_pair}" - } - self.assertEqual(expected_kline_subscription["ch"], sent_subscription_messages[0]["ch"]) - - self.assertTrue(self.is_logged( - "INFO", - "Subscribed to public klines..." - )) - - @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect") - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): - mock_ws.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait()) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles.fill_historical_candles", - new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles.fill_historical_candles", - new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles.fill_historical_candles") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_2())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/connector/exchange/kucoin/__init__.py b/test/hummingbot/data_feed/candles_feed/binance_perpetual_candles/__init__.py similarity index 100% rename from test/connector/exchange/kucoin/__init__.py rename to test/hummingbot/data_feed/candles_feed/binance_perpetual_candles/__init__.py diff --git a/test/hummingbot/data_feed/candles_feed/binance_perpetual_candles/test_binance_perpetual_candles.py b/test/hummingbot/data_feed/candles_feed/binance_perpetual_candles/test_binance_perpetual_candles.py new file mode 100644 index 0000000000..603e8ae82f --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/binance_perpetual_candles/test_binance_perpetual_candles.py @@ -0,0 +1,154 @@ +import asyncio +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.binance_perpetual_candles import BinancePerpetualCandles + + +class TestBinancePerpetualCandles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "BTC" + cls.quote_asset = "USDT" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = BinancePerpetualCandles(trading_pair=self.trading_pair, interval=self.interval) + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def get_candles_rest_data_mock(self): + data = [ + [ + 1718654400000, + "66661.40", + "66746.20", + "66122.30", + "66376.00", + "14150.996", + 1718657999999, + "939449103.58380", + 155369, + "7106.240", + "471742093.14380", + "0" + ], + [ + 1718658000000, + "66376.00", + "66697.00", + "66280.40", + "66550.00", + "4381.088", + 1718661599999, + "291370566.35900", + 70240, + "2273.176", + "151198574.00840", + "0" + ], + [ + 1718661600000, + "66550.00", + "66686.30", + "66455.20", + "66632.40", + "3495.412", + 1718665199999, + "232716285.32220", + 52041, + "1634.229", + "108805961.31540", + "0" + ], + [ + 1718665200000, + "66632.40", + "66694.40", + "66537.00", + "66537.00", + "813.988", + 1718668799999, + "54243407.92930", + 10655, + "320.268", + "21346153.24270", + "0" + ] + ] + return data + + def get_fetch_candles_data_mock(self): + return [[1718654400.0, '66661.40', '66746.20', '66122.30', '66376.00', '14150.996', '939449103.58380', 155369, '7106.240', '471742093.14380'], + [1718658000.0, '66376.00', '66697.00', '66280.40', '66550.00', '4381.088', '291370566.35900', 70240, '2273.176', '151198574.00840'], + [1718661600.0, '66550.00', '66686.30', '66455.20', '66632.40', '3495.412', '232716285.32220', 52041, '1634.229', '108805961.31540'], + [1718665200.0, '66632.40', '66694.40', '66537.00', '66537.00', '813.988', '54243407.92930', 10655, '320.268', '21346153.24270']] + + def get_candles_ws_data_mock_1(self): + return { + "e": "kline", + "E": 1638747660000, + "s": "BTCUSDT", + "k": { + "t": 1638747660000, + "T": 1638747719999, + "s": "BTCUSDT", + "i": "1m", + "f": 100, + "L": 200, + "o": "0.0010", + "c": "0.0020", + "h": "0.0025", + "l": "0.0015", + "v": "1000", + "n": 100, + "x": False, + "q": "1.0000", + "V": "500", + "Q": "0.500", + "B": "123456" + } + } + + def get_candles_ws_data_mock_2(self): + return { + "e": "kline", + "E": 1638751260000, + "s": "BTCUSDT", + "k": { + "t": 1638751260000, + "T": 1638754860000, + "s": "BTCUSDT", + "i": "1m", + "f": 100, + "L": 200, + "o": "0.0010", + "c": "0.0020", + "h": "0.0025", + "l": "0.0015", + "v": "1000", + "n": 100, + "x": False, + "q": "1.0000", + "V": "500", + "Q": "0.500", + "B": "123456" + } + } + + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/hummingbot/data_feed/candles_feed/binance_spot_candles/test_binance_spot_candles.py b/test/hummingbot/data_feed/candles_feed/binance_spot_candles/test_binance_spot_candles.py index 2d3c6e3dc4..8854c8588e 100644 --- a/test/hummingbot/data_feed/candles_feed/binance_spot_candles/test_binance_spot_candles.py +++ b/test/hummingbot/data_feed/candles_feed/binance_spot_candles/test_binance_spot_candles.py @@ -1,18 +1,12 @@ import asyncio -import json -import re -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch - -from aioresponses import aioresponses +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.data_feed.candles_feed.binance_spot_candles import BinanceSpotCandles, constants as CONSTANTS +from hummingbot.data_feed.candles_feed.binance_spot_candles import BinanceSpotCandles -class TestBinanceSpotCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestBinanceSpotCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -24,6 +18,7 @@ def setUpClass(cls) -> None: cls.interval = "1h" cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.max_records = 150 def setUp(self) -> None: super().setUp() @@ -35,17 +30,11 @@ def setUp(self) -> None: self.data_feed.logger().addHandler(self) self.resume_test_event = asyncio.Event() - def handle(self, record): - self.log_records.append(record) - - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret + def get_fetch_candles_data_mock(self): + return [[1672981200.0, '16823.24000000', '16823.63000000', '16792.12000000', '16810.18000000', '6230.44034000', '104737787.36570630', 162086, '3058.60695000', '51418990.63131130'], + [1672984800.0, '16809.74000000', '16816.45000000', '16779.96000000', '16786.86000000', '6529.22759000', '109693209.64287010', 175249, '3138.11977000', '52721850.46080600'], + [1672988400.0, '16786.60000000', '16802.87000000', '16780.15000000', '16794.06000000', '5763.44917000', '96775667.56265520', 160778, '3080.59468000', '51727251.37008490'], + [1672992000.0, '16794.33000000', '16812.22000000', '16791.47000000', '16802.11000000', '5475.13940000', '92000245.54341140', 164303, '2761.40926000', '46400964.30558100']] def get_candles_rest_data_mock(self): data = [ @@ -109,211 +98,57 @@ def get_candles_rest_data_mock(self): return data def get_candles_ws_data_mock_1(self): - data = { - "e": "kline", - "E": 123456789, - "s": "BTCUSDT", - "k": {"t": 123400000, - "T": 123460000, - "s": "BNBBTC", - "i": "1m", - "f": 100, - "L": 200, - "o": "0.0010", - "c": "0.0020", - "h": "0.0025", - "l": "0.0015", - "v": "1000", - "n": 100, - "x": False, - "q": "1.0000", - "V": "500", - "Q": "0.500", - "B": "123456" - } + return { + 'e': 'kline', + 'E': 1718667728540, + 's': 'BTCUSDT', + 'k': { + 't': 1718667720000, + 'T': 1718667779999, + 's': 'BTCUSDT', + 'i': '1m', + 'f': 3640284441, + 'L': 3640284686, + 'o': '66477.91000000', + 'c': '66472.20000000', + 'h': '66477.91000000', + 'l': '66468.00000000', + 'v': '10.75371000', + 'n': 246, + 'x': False, + 'q': '714783.46215380', + 'V': '9.29532000', + 'Q': '617844.95963270', + 'B': '0' + } } - return data def get_candles_ws_data_mock_2(self): - data = { - "e": "kline", - "E": 123516789, - "s": "BTCUSDT", - "k": {"t": 123460000, - "T": 123460000, - "s": "BNBBTC", - "i": "1m", - "f": 100, - "L": 200, - "o": "0.0010", - "c": "0.0020", - "h": "0.0025", - "l": "0.0015", - "v": "1000", - "n": 100, - "x": False, - "q": "1.0000", - "V": "500", - "Q": "0.500", - "B": "123456" - } - } - return data - - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - start_time = 1672981200000 - end_time = 1672992000000 - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}?endTime={end_time}&interval={self.interval}&limit=500" \ - f"&startTime={start_time}&symbol={self.ex_trading_pair}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - data_mock = self.get_candles_rest_data_mock() - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) - - self.assertEqual(resp.shape[0], len(data_mock)) - self.assertEqual(resp.shape[1], 10) - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "result": None, - "id": 1 + return { + 'e': 'kline', + 'E': 1718667728540, + 's': 'BTCUSDT', + 'k': { + 't': 1718671320000, + 'T': 1718674920000, + 's': 'BTCUSDT', + 'i': '1m', + 'f': 3640284441, + 'L': 3640284686, + 'o': '66477.91000000', + 'c': '66472.20000000', + 'h': '66477.91000000', + 'l': '66468.00000000', + 'v': '10.75371000', + 'n': 246, + 'x': False, + 'q': '714783.46215380', + 'V': '9.29532000', + 'Q': '617844.95963270', + 'B': '0' + } } - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(1, len(sent_subscription_messages)) - expected_kline_subscription = { - "method": "SUBSCRIBE", - "params": [f"{self.ex_trading_pair.lower()}@kline_{self.interval}"], - "id": 1} - - self.assertEqual(expected_kline_subscription, sent_subscription_messages[0]) - - self.assertTrue(self.is_logged( - "INFO", - "Subscribed to public klines..." - )) - - @patch("hummingbot.data_feed.candles_feed.binance_spot_candles.BinanceSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect") - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): - mock_ws.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.binance_spot_candles.BinanceSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait()) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @patch("hummingbot.data_feed.candles_feed.binance_spot_candles.BinanceSpotCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @patch("hummingbot.data_feed.candles_feed.binance_spot_candles.BinanceSpotCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.binance_spot_candles.BinanceSpotCandles.fill_historical_candles") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_2())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/connector/exchange/mexc/__init__.py b/test/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/__init__.py similarity index 100% rename from test/connector/exchange/mexc/__init__.py rename to test/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/__init__.py diff --git a/test/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/test_bybit_perpetual_candles.py b/test/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/test_bybit_perpetual_candles.py new file mode 100644 index 0000000000..80d5614b10 --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/bybit_perpetual_candles/test_bybit_perpetual_candles.py @@ -0,0 +1,85 @@ +import asyncio +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.bybit_perpetual_candles import BybitPerpetualCandles + + +class TestBybitPerpetualCandles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "BTC" + cls.quote_asset = "USDT" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}{cls.quote_asset}" + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = BybitPerpetualCandles(trading_pair=self.trading_pair, interval=self.interval) + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def get_fetch_candles_data_mock(self): + return [[1715162400.0, '62308.69', '62524.28', '62258.76', '62439.82', '421.80928', 0.0, 0.0, 0.0, 0.0], [1715166000.0, '62439.82', '62512.32', '62130.38', '62245.79', '423.537479', 0.0, 0.0, 0.0, 0.0], [1715169600.0, '62245.79', '62458.45', '62083.67', '62236.73', '603.163403', 0.0, 0.0, 0.0, 0.0], [1715173200.0, '62236.73', '62466.32', '61780.77', '62440.14', '907.398902', 0.0, 0.0, 0.0, 0.0], [1715176800.0, '62440.14', '62841.64', '62160.72', '62564.68', '706.187244', 0.0, 0.0, 0.0, 0.0]] + + def get_candles_rest_data_mock(self): + return {'retCode': 0, 'retMsg': 'OK', 'result': {'category': 'spot', 'symbol': 'BTCUSDT', 'list': [['1715176800000', '62440.14', '62841.64', '62160.72', '62564.68', '706.187244', '44137837.83110939'], ['1715173200000', '62236.73', '62466.32', '61780.77', '62440.14', '907.398902', '56295800.30345675'], ['1715169600000', '62245.79', '62458.45', '62083.67', '62236.73', '603.163403', '37546804.69133172'], ['1715166000000', '62439.82', '62512.32', '62130.38', '62245.79', '423.537479', '26383831.12979059'], ['1715162400000', '62308.69', '62524.28', '62258.76', '62439.82', '421.80928', '26322162.21650143']]}, 'retExtInfo': {}, 'time': 1718761678876} + + def get_candles_ws_data_mock_1(self): + return { + "topic": "kline.5.BTCUSDT", + "data": [ + { + "start": 1672324800000, + "end": 1672325099999, + "interval": "5", + "open": "16649.5", + "close": "16677", + "high": "16677", + "low": "16608", + "volume": "2.081", + "turnover": "34666.4005", + "confirm": False, + "timestamp": 1672324988882 + } + ], + "ts": 1672324988882, + "type": "snapshot" + } + + def get_candles_ws_data_mock_2(self): + return { + "topic": "kline.5.BTCUSDT", + "data": [ + { + "start": 1672328400000, + "end": 1672331000000, + "interval": "5", + "open": "16649.5", + "close": "16677", + "high": "16677", + "low": "16608", + "volume": "2.081", + "turnover": "34666.4005", + "confirm": False, + "timestamp": 1672324988882 + } + ], + "ts": 1672324988882, + "type": "snapshot" + } + + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/debug/__init__.py b/test/hummingbot/data_feed/candles_feed/bybit_spot_candles/__init__.py similarity index 100% rename from test/debug/__init__.py rename to test/hummingbot/data_feed/candles_feed/bybit_spot_candles/__init__.py diff --git a/test/hummingbot/data_feed/candles_feed/bybit_spot_candles/test_bybit_spot_candles.py b/test/hummingbot/data_feed/candles_feed/bybit_spot_candles/test_bybit_spot_candles.py new file mode 100644 index 0000000000..08d083fc5b --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/bybit_spot_candles/test_bybit_spot_candles.py @@ -0,0 +1,85 @@ +import asyncio +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.bybit_spot_candles import BybitSpotCandles + + +class TestBybitSpotCandles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "BTC" + cls.quote_asset = "USDT" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = BybitSpotCandles(trading_pair=self.trading_pair, interval=self.interval) + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def get_fetch_candles_data_mock(self): + return [[1715162400.0, '62308.69', '62524.28', '62258.76', '62439.82', '421.80928', 0.0, 0.0, 0.0, 0.0], [1715166000.0, '62439.82', '62512.32', '62130.38', '62245.79', '423.537479', 0.0, 0.0, 0.0, 0.0], [1715169600.0, '62245.79', '62458.45', '62083.67', '62236.73', '603.163403', 0.0, 0.0, 0.0, 0.0], [1715173200.0, '62236.73', '62466.32', '61780.77', '62440.14', '907.398902', 0.0, 0.0, 0.0, 0.0], [1715176800.0, '62440.14', '62841.64', '62160.72', '62564.68', '706.187244', 0.0, 0.0, 0.0, 0.0]] + + def get_candles_rest_data_mock(self): + return {'retCode': 0, 'retMsg': 'OK', 'result': {'category': 'spot', 'symbol': 'BTCUSDT', 'list': [['1715176800000', '62440.14', '62841.64', '62160.72', '62564.68', '706.187244', '44137837.83110939'], ['1715173200000', '62236.73', '62466.32', '61780.77', '62440.14', '907.398902', '56295800.30345675'], ['1715169600000', '62245.79', '62458.45', '62083.67', '62236.73', '603.163403', '37546804.69133172'], ['1715166000000', '62439.82', '62512.32', '62130.38', '62245.79', '423.537479', '26383831.12979059'], ['1715162400000', '62308.69', '62524.28', '62258.76', '62439.82', '421.80928', '26322162.21650143']]}, 'retExtInfo': {}, 'time': 1718761678876} + + def get_candles_ws_data_mock_1(self): + return { + "topic": "kline.5.BTCUSDT", + "data": [ + { + "start": 1672324800000, + "end": 1672325099999, + "interval": "5", + "open": "16649.5", + "close": "16677", + "high": "16677", + "low": "16608", + "volume": "2.081", + "turnover": "34666.4005", + "confirm": False, + "timestamp": 1672324988882 + } + ], + "ts": 1672324988882, + "type": "snapshot" + } + + def get_candles_ws_data_mock_2(self): + return { + "topic": "kline.5.BTCUSDT", + "data": [ + { + "start": 1672328400000, + "end": 1672331000000, + "interval": "5", + "open": "16649.5", + "close": "16677", + "high": "16677", + "low": "16608", + "volume": "2.081", + "turnover": "34666.4005", + "confirm": False, + "timestamp": 1672324988882 + } + ], + "ts": 1672324988882, + "type": "snapshot" + } + + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/test_gate_io_perpetual_candles.py b/test/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/test_gate_io_perpetual_candles.py index e103853a80..66b76d8a13 100644 --- a/test/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/test_gate_io_perpetual_candles.py +++ b/test/hummingbot/data_feed/candles_feed/gate_io_perpetual_candles/test_gate_io_perpetual_candles.py @@ -1,19 +1,12 @@ import asyncio -import json -import re -import time -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch - -from aioresponses import aioresponses +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.data_feed.candles_feed.gate_io_perpetual_candles import GateioPerpetualCandles, constants as CONSTANTS +from hummingbot.data_feed.candles_feed.gate_io_perpetual_candles import GateioPerpetualCandles -class TestGateioPerpetualCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestGateioPerpetualCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -25,7 +18,7 @@ def setUpClass(cls) -> None: cls.interval = "1h" cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" cls.ex_trading_pair = cls.base_asset + "_" + cls.quote_asset - cls.quanto_multiplier = 0.0001 + cls.max_records = 150 def setUp(self) -> None: super().setUp() @@ -38,19 +31,15 @@ def setUp(self) -> None: self.data_feed.logger().addHandler(self) self.resume_test_event = asyncio.Event() - def handle(self, record): - self.log_records.append(record) - - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret + @staticmethod + def get_fetch_candles_data_mock(): + return [[1685167200, '1.032', '1.032', '1.032', '1.032', 9.7151, '3580', 0, 0, 0], + [1685170800, '1.032', '1.032', '1.032', '1.032', 9.7151, '3580', 0, 0, 0], + [1685174400, '1.032', '1.032', '1.032', '1.032', 9.7151, '3580', 0, 0, 0], + [1685178000, '1.032', '1.032', '1.032', '1.032', 9.7151, '3580', 0, 0, 0]] - def get_candles_rest_data_mock(self): + @staticmethod + def get_candles_rest_data_mock(): data = [ { "t": 1685167200, @@ -61,7 +50,7 @@ def get_candles_rest_data_mock(self): "o": "1.032", "sum": "3580" }, { - "t": 1685167300, + "t": 1685170800, "v": 97151, "c": "1.032", "h": "1.032", @@ -69,7 +58,7 @@ def get_candles_rest_data_mock(self): "o": "1.032", "sum": "3580" }, { - "t": 1685167400, + "t": 1685174400, "v": 97151, "c": "1.032", "h": "1.032", @@ -77,7 +66,7 @@ def get_candles_rest_data_mock(self): "o": "1.032", "sum": "3580" }, { - "t": 1685172600, + "t": 1685178000, "v": 97151, "c": "1.032", "h": "1.032", @@ -88,11 +77,13 @@ def get_candles_rest_data_mock(self): ] return data - def get_exchange_trading_pair_quanto_multiplier_data_mock(self): + @staticmethod + def get_exchange_trading_pair_quanto_multiplier_data_mock(): data = {"quanto_multiplier": 0.0001} return data - def get_candles_ws_data_mock_1(self): + @staticmethod + def get_candles_ws_data_mock_1(): data = { "time": 1542162490, "time_ms": 1542162490123, @@ -113,7 +104,8 @@ def get_candles_ws_data_mock_1(self): } return data - def get_candles_ws_data_mock_2(self): + @staticmethod + def get_candles_ws_data_mock_2(): data = { "time": 1542162490, "time_ms": 1542162490123, @@ -134,172 +126,6 @@ def get_candles_ws_data_mock_2(self): } return data - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - start_time = 1685167200 - end_time = 1685172600 - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - data_mock = self.get_candles_rest_data_mock() - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) - - self.assertEqual(resp.shape[0], len(data_mock)) - self.assertEqual(resp.shape[1], 10) - - @aioresponses() - def test_get_exchange_trading_pair_quanto_multiplier(self, mock_api: aioresponses): - url = CONSTANTS.REST_URL + CONSTANTS.CONTRACT_INFO_URL.format(contract=self.ex_trading_pair) - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - data_mock = self.get_exchange_trading_pair_quanto_multiplier_data_mock() - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - resp = self.async_run_with_timeout(self.data_feed.get_exchange_trading_pair_quanto_multiplier()) - - self.assertEqual(resp, 0.0001) - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "result": None, - "id": 1 - } - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(1, len(sent_subscription_messages)) - expected_kline_subscription = { - "time": int(time.time()), - "channel": CONSTANTS.WS_CANDLES_ENDPOINT, - "event": "subscribe", - "payload": [self.interval, self.ex_trading_pair] - } - self.assertEqual(expected_kline_subscription["channel"], sent_subscription_messages[0]["channel"]) - self.assertEqual(expected_kline_subscription["payload"], sent_subscription_messages[0]["payload"]) - - self.assertTrue(self.is_logged( - "INFO", - "Subscribed to public klines..." - )) - - @patch("hummingbot.data_feed.candles_feed.gate_io_perpetual_candles.GateioPerpetualCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect") - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): - mock_ws.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.gate_io_perpetual_candles.GateioPerpetualCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait()) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @patch("hummingbot.data_feed.candles_feed.gate_io_perpetual_candles.GateioPerpetualCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @patch("hummingbot.data_feed.candles_feed.gate_io_perpetual_candles.GateioPerpetualCandles.fill_historical_candles", - new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.gate_io_perpetual_candles.GateioPerpetualCandles.fill_historical_candles") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_2())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/hummingbot/data_feed/candles_feed/gate_io_spot_candles/test_gate_io_spot_candles.py b/test/hummingbot/data_feed/candles_feed/gate_io_spot_candles/test_gate_io_spot_candles.py index df9b45311b..2779f6a5b2 100644 --- a/test/hummingbot/data_feed/candles_feed/gate_io_spot_candles/test_gate_io_spot_candles.py +++ b/test/hummingbot/data_feed/candles_feed/gate_io_spot_candles/test_gate_io_spot_candles.py @@ -2,9 +2,7 @@ import json import re import time -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase from aioresponses import aioresponses @@ -12,8 +10,8 @@ from hummingbot.data_feed.candles_feed.gate_io_spot_candles import GateioSpotCandles, constants as CONSTANTS -class TestGateioSpotCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestGateioSpotCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -25,6 +23,7 @@ def setUpClass(cls) -> None: cls.interval = "1h" cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" cls.ex_trading_pair = cls.base_asset + "_" + cls.quote_asset + cls.max_records = 150 def setUp(self) -> None: super().setUp() @@ -36,26 +35,42 @@ def setUp(self) -> None: self.data_feed.logger().addHandler(self) self.resume_test_event = asyncio.Event() - def handle(self, record): - self.log_records.append(record) + @aioresponses() + def test_fetch_candles_raises_exception(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.candles_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_candles_rest_data_mock() + mock_api.get(url=regex_url, body=json.dumps(data_mock)) + + with self.assertRaises(ValueError, msg="Gate.io REST API does not support fetching more than 10000 candles ago."): + self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=self.start_time, + end_time=self.end_time)) + + @aioresponses() + def test_fetch_candles(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.candles_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_candles_rest_data_mock() + mock_api.get(url=regex_url, body=json.dumps(data_mock)) + + self.start_time = int(time.time()) - (CONSTANTS.MAX_CANDLES_AGO - 1) * self.data_feed.interval_in_seconds + self.end_time = int(time.time()) - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) + candles = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=self.start_time, + end_time=self.end_time)) + self.assertEqual(len(candles), len(data_mock)) - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 2): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret + def get_fetch_candles_data_mock(self): + return [[1685167200, '26728.1', '26736.1', '26718.4', '26718.4', '4.856410775', '129807.73747903012', 0, 0, 0], + [1685170800, '26718.4', '26758.1', '26709.2', '26746.2', '24.5891110488', '657338.79714685262', 0, 0, 0], + [1685174400, '26746.2', '26746.2', '26720', '26723.1', '7.5659923741', '202249.7345089816', 0, 0, 0], + [1685178000, '26723.1', '26723.1', '26710.1', '26723.1', '4.5305391649', '121057.96936704352', 0, 0, 0]] def get_candles_rest_data_mock(self): - data = [ + return [ ['1685167200', '129807.73747903012', '26718.4', '26736.1', '26718.4', '26728.1', '4.856410775'], - ['1685169000', '657338.79714685262', '26746.2', '26758.1', '26709.2', '26718.4', '24.5891110488'], - ['1685170800', '202249.7345089816', '26723.1', '26746.2', '26720', '26746.2', '7.5659923741'], - ['1685172600', '121057.96936704352', '26723.1', '26723.1', '26710.1', '26723.1', '4.5305391649'] + ['1685170800', '657338.79714685262', '26746.2', '26758.1', '26709.2', '26718.4', '24.5891110488'], + ['1685174400', '202249.7345089816', '26723.1', '26746.2', '26720', '26746.2', '7.5659923741'], + ['1685178000', '121057.96936704352', '26723.1', '26723.1', '26710.1', '26723.1', '4.5305391649'] ] - return data def get_candles_ws_data_mock_1(self): data = { @@ -95,162 +110,6 @@ def get_candles_ws_data_mock_2(self): } return data - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - start_time = 1685167200 - end_time = 1685172600 - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - data_mock = self.get_candles_rest_data_mock() - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) - - self.assertEqual(resp.shape[0], len(data_mock)) - self.assertEqual(resp.shape[1], 10) - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "result": None, - "id": 1 - } - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(1, len(sent_subscription_messages)) - expected_kline_subscription = { - "time": int(time.time()), - "channel": CONSTANTS.WS_CANDLES_ENDPOINT, - "event": "subscribe", - "payload": [self.interval, self.ex_trading_pair] - } - self.assertEqual(expected_kline_subscription["channel"], sent_subscription_messages[0]["channel"]) - self.assertEqual(expected_kline_subscription["payload"], sent_subscription_messages[0]["payload"]) - - self.assertTrue(self.is_logged( - "INFO", - "Subscribed to public klines..." - )) - - @patch("hummingbot.data_feed.candles_feed.gate_io_spot_candles.GateioSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect") - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): - mock_ws.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.gate_io_spot_candles.GateioSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait()) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @patch("hummingbot.data_feed.candles_feed.gate_io_spot_candles.GateioSpotCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @patch("hummingbot.data_feed.candles_feed.gate_io_spot_candles.GateioSpotCandles.fill_historical_candles", - new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.gate_io_spot_candles.GateioSpotCandles.fill_historical_candles") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_2())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/hummingbot/connector/exchange/coinbase_pro/__init__.py b/test/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/__init__.py similarity index 100% rename from test/hummingbot/connector/exchange/coinbase_pro/__init__.py rename to test/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/__init__.py diff --git a/test/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/test_hyperliquid_perpetual_candles.py b/test/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/test_hyperliquid_perpetual_candles.py new file mode 100644 index 0000000000..38c06324af --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/hyperliquid_perpetual_candles/test_hyperliquid_perpetual_candles.py @@ -0,0 +1,156 @@ +import asyncio +import json +import re +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from aioresponses import aioresponses + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.hyperliquid_perpetual_candles import HyperliquidPerpetualCandles + + +class TestHyperliquidPerpetualCandles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "BTC" + cls.quote_asset = "USD" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}{cls.quote_asset}" + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = HyperliquidPerpetualCandles(trading_pair=self.trading_pair, interval=self.interval) + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def get_fetch_candles_data_mock(self): + return [[1718895600.0, '64942.0', '65123.0', '64812.0', '64837.0', '190.58479', 0.0, 1789, 0.0, 0.0], + [1718899200.0, '64837.0', '64964.0', '64564.0', '64898.0', '271.68638', 0.0, 2296, 0.0, 0.0], + [1718902800.0, '64900.0', '65034.0', '64714.0', '64997.0', '104.80095', 0.0, 1229, 0.0, 0.0], + [1718906400.0, '64999.0', '65244.0', '64981.0', '65157.0', '158.51753', 0.0, 1598, 0.0, 0.0], + [1718910000.0, '65153.0', '65153.0', '64882.0', '65095.0', '209.75558', 0.0, 1633, 0.0, 0.0]] + + def get_candles_rest_data_mock(self): + return [ + { + "t": 1718895600000, + "T": 1718899199999, + "s": "BTC", + "i": "1h", + "o": "64942.0", + "c": "64837.0", + "h": "65123.0", + "l": "64812.0", + "v": "190.58479", + "n": 1789 + }, + { + "t": 1718899200000, + "T": 1718902799999, + "s": "BTC", + "i": "1h", + "o": "64837.0", + "c": "64898.0", + "h": "64964.0", + "l": "64564.0", + "v": "271.68638", + "n": 2296 + }, + { + "t": 1718902800000, + "T": 1718906399999, + "s": "BTC", + "i": "1h", + "o": "64900.0", + "c": "64997.0", + "h": "65034.0", + "l": "64714.0", + "v": "104.80095", + "n": 1229 + }, + { + "t": 1718906400000, + "T": 1718909999999, + "s": "BTC", + "i": "1h", + "o": "64999.0", + "c": "65157.0", + "h": "65244.0", + "l": "64981.0", + "v": "158.51753", + "n": 1598 + }, + { + "t": 1718910000000, + "T": 1718913599999, + "s": "BTC", + "i": "1h", + "o": "65153.0", + "c": "65095.0", + "h": "65153.0", + "l": "64882.0", + "v": "209.75558", + "n": 1633 + } + ] + + def get_candles_ws_data_mock_1(self): + return { + "channel": "candle", + "data": { + "t": 1718914860000, + "T": 1718914919999, + "s": "BTC", + "i": "1h", + "o": "65162.0", + "c": "65156.0", + "h": "65162.0", + "l": "65156.0", + "v": "0.00296", + "n": 2 + } + } + + def get_candles_ws_data_mock_2(self): + return { + "channel": "candle", + "data": { + "t": 1718918460000, + "T": 1718922059999, + "s": "BTC", + "i": "1h", + "o": "65162.0", + "c": "65156.0", + "h": "65162.0", + "l": "65156.0", + "v": "0.00296", + "n": 2 + } + } + + @staticmethod + def _success_subscription_mock(): + return {} + + @aioresponses() + def test_fetch_candles(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.candles_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_candles_rest_data_mock() + mock_api.post(url=regex_url, body=json.dumps(data_mock)) + + resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=self.start_time, + end_time=self.end_time)) + + self.assertEqual(resp.shape[0], len(self.get_fetch_candles_data_mock())) + self.assertEqual(resp.shape[1], 10) diff --git a/test/hummingbot/core/mock_api/__init__.py b/test/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/__init__.py similarity index 100% rename from test/hummingbot/core/mock_api/__init__.py rename to test/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/__init__.py diff --git a/test/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/test_hyperliquid_spot_candles.py b/test/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/test_hyperliquid_spot_candles.py new file mode 100644 index 0000000000..493ecd7907 --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/hyperliquid_spot_candles/test_hyperliquid_spot_candles.py @@ -0,0 +1,168 @@ +import asyncio +import json +import re +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from aioresponses import aioresponses + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.hyperliquid_spot_candles import HyperliquidSpotCandles + + +class TestHyperliquidSpotC0andles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "HFUN" + cls.quote_asset = "USDC" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}{cls.quote_asset}" + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = HyperliquidSpotCandles(trading_pair=self.trading_pair, interval=self.interval) + self.data_feed._coins_dict = {"USDC": 0, "HFUN": 1} + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def get_fetch_candles_data_mock(self): + return [[1718895600.0, '7.8095', '7.8819', '7.7403', '7.765', '1746.14', 0.0, 267, 0.0, 0.0], + [1718899200.0, '7.765', '7.7882', '7.711', '7.7418', '2065.26', 0.0, 187, 0.0, 0.0], + [1718902800.0, '7.7418', '7.765', '7.7418', '7.7478', '1084.02', 0.0, 364, 0.0, 0.0], + [1718906400.0, '7.747', '7.7646', '7.5655', '7.5872', '3312.84', 0.0, 975, 0.0, 0.0], + [1718910000.0, '7.5887', '7.5937', '7.5276', '7.5379', '3316.37', 0.0, 934, 0.0, 0.0]] + + def get_candles_rest_data_mock(self): + return [ + { + "t": 1718895600000, + "T": 1718899199999, + "s": "@1", + "i": "1h", + "o": "7.8095", + "c": "7.765", + "h": "7.8819", + "l": "7.7403", + "v": "1746.14", + "n": 267 + }, + { + "t": 1718899200000, + "T": 1718902799999, + "s": "@1", + "i": "1h", + "o": "7.765", + "c": "7.7418", + "h": "7.7882", + "l": "7.711", + "v": "2065.26", + "n": 187 + }, + { + "t": 1718902800000, + "T": 1718906399999, + "s": "@1", + "i": "1h", + "o": "7.7418", + "c": "7.7478", + "h": "7.765", + "l": "7.7418", + "v": "1084.02", + "n": 364 + }, + { + "t": 1718906400000, + "T": 1718909999999, + "s": "@1", + "i": "1h", + "o": "7.747", + "c": "7.5872", + "h": "7.7646", + "l": "7.5655", + "v": "3312.84", + "n": 975 + }, + { + "t": 1718910000000, + "T": 1718913599999, + "s": "@1", + "i": "1h", + "o": "7.5887", + "c": "7.5379", + "h": "7.5937", + "l": "7.5276", + "v": "3316.37", + "n": 934 + } + ] + + def get_candles_ws_data_mock_1(self): + return { + "channel": "candle", + "data": { + "t": 1718914860000, + "T": 1718914919999, + "s": "@1", + "i": "1h", + "o": "65162.0", + "c": "65156.0", + "h": "65162.0", + "l": "65156.0", + "v": "0.00296", + "n": 2 + } + } + + def get_candles_ws_data_mock_2(self): + return { + "channel": "candle", + "data": { + "t": 1718918460000, + "T": 1718922059999, + "s": "@1", + "i": "1h", + "o": "65162.0", + "c": "65156.0", + "h": "65162.0", + "l": "65156.0", + "v": "0.00296", + "n": 2 + } + } + + @staticmethod + def _success_subscription_mock(): + return {} + + @aioresponses() + def test_fetch_candles(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.candles_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_candles_rest_data_mock() + mock_api.post(url=regex_url, body=json.dumps(data_mock)) + + resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=self.start_time, + end_time=self.end_time)) + + self.assertEqual(resp.shape[0], len(self.get_fetch_candles_data_mock())) + self.assertEqual(resp.shape[1], 10) + + @staticmethod + def get_universe_data_mock(): + return {'universe': [{'tokens': [1, 0], 'name': 'PURR/USDC', 'index': 0, 'isCanonical': True}, {'tokens': [2, 0], 'name': '@1', 'index': 1, 'isCanonical': False}, {'tokens': [3, 0], 'name': '@2', 'index': 2, 'isCanonical': False}, {'tokens': [4, 0], 'name': '@3', 'index': 3, 'isCanonical': False}, {'tokens': [5, 0], 'name': '@4', 'index': 4, 'isCanonical': False}, {'tokens': [6, 0], 'name': '@5', 'index': 5, 'isCanonical': False}, {'tokens': [7, 0], 'name': '@6', 'index': 6, 'isCanonical': False}, {'tokens': [8, 0], 'name': '@7', 'index': 7, 'isCanonical': False}, {'tokens': [9, 0], 'name': '@8', 'index': 8, 'isCanonical': False}, {'tokens': [10, 0], 'name': '@9', 'index': 9, 'isCanonical': False}, {'tokens': [11, 0], 'name': '@10', 'index': 10, 'isCanonical': False}, {'tokens': [12, 0], 'name': '@11', 'index': 11, 'isCanonical': False}, {'tokens': [13, 0], 'name': '@12', 'index': 12, 'isCanonical': False}, {'tokens': [14, 0], 'name': '@13', 'index': 13, 'isCanonical': False}, {'tokens': [15, 0], 'name': '@14', 'index': 14, 'isCanonical': False}, {'tokens': [16, 0], 'name': '@15', 'index': 15, 'isCanonical': False}, {'tokens': [17, 0], 'name': '@16', 'index': 16, 'isCanonical': False}, {'tokens': [18, 0], 'name': '@17', 'index': 17, 'isCanonical': False}, {'tokens': [19, 0], 'name': '@18', 'index': 18, 'isCanonical': False}, {'tokens': [20, 0], 'name': '@19', 'index': 19, 'isCanonical': False}], 'tokens': [{'name': 'USDC', 'szDecimals': 8, 'weiDecimals': 8, 'index': 0, 'tokenId': '0x6d1e7cde53ba9467b783cb7c530ce054', 'isCanonical': True}, {'name': 'PURR', 'szDecimals': 0, 'weiDecimals': 5, 'index': 1, 'tokenId': '0xc1fb593aeffbeb02f85e0308e9956a90', 'isCanonical': True}, {'name': 'HFUN', 'szDecimals': 2, 'weiDecimals': 8, 'index': 2, 'tokenId': '0xbaf265ef389da684513d98d68edf4eae', 'isCanonical': False}, {'name': 'LICK', 'szDecimals': 0, 'weiDecimals': 5, 'index': 3, 'tokenId': '0xba3aaf468f793d9b42fd3328e24f1de9', 'isCanonical': False}, {'name': 'MANLET', 'szDecimals': 0, 'weiDecimals': 5, 'index': 4, 'tokenId': '0xe9ced9225d2a69ccc8d6a5b224524b99', 'isCanonical': False}, {'name': 'JEFF', 'szDecimals': 0, 'weiDecimals': 5, 'index': 5, 'tokenId': '0xfcf28885456bf7e7cbe5b7a25407c5bc', 'isCanonical': False}, {'name': 'SIX', 'szDecimals': 2, 'weiDecimals': 8, 'index': 6, 'tokenId': '0x50a9391b4a40caffbe8b16303b95a0c1', 'isCanonical': False}, {'name': 'WAGMI', 'szDecimals': 2, 'weiDecimals': 8, 'index': 7, 'tokenId': '0x649efea44690cf88d464f512bc7e2818', 'isCanonical': False}, {'name': 'CAPPY', 'szDecimals': 0, 'weiDecimals': 5, 'index': 8, 'tokenId': '0x3f8abf62220007cc7ab6d33ef2963d88', 'isCanonical': False}, {'name': 'POINTS', 'szDecimals': 0, 'weiDecimals': 5, 'index': 9, 'tokenId': '0xbb03842e1f71ed27ed8fa012b29affd4', 'isCanonical': False}, {'name': 'TRUMP', 'szDecimals': 2, 'weiDecimals': 7, 'index': 10, 'tokenId': '0x368cb581f0d51e21aa19996d38ffdf6f', 'isCanonical': False}, {'name': 'GMEOW', 'szDecimals': 0, 'weiDecimals': 8, 'index': 11, 'tokenId': '0x07615193eaa63d1da6feda6e0ac9e014', 'isCanonical': False}, {'name': 'PEPE', 'szDecimals': 2, 'weiDecimals': 7, 'index': 12, 'tokenId': '0x79b6e1596ea0deb2e6912ff8392c9325', 'isCanonical': False}, {'name': 'XULIAN', 'szDecimals': 0, 'weiDecimals': 5, 'index': 13, 'tokenId': '0x6cc648be7e4c38a8c7fcd8bfa6714127', 'isCanonical': False}, {'name': 'RUG', 'szDecimals': 0, 'weiDecimals': 5, 'index': 14, 'tokenId': '0x4978f3f49f30776d9d7397b873223c2d', 'isCanonical': False}, {'name': 'ILIENS', 'szDecimals': 0, 'weiDecimals': 5, 'index': 15, 'tokenId': '0xa74984ea379be6d899c1bf54db923604', 'isCanonical': False}, {'name': 'FUCKY', 'szDecimals': 2, 'weiDecimals': 8, 'index': 16, 'tokenId': '0x7de5b7a8c115edf0174333446ba0ea78', 'isCanonical': False}, {'name': 'CZ', 'szDecimals': 2, 'weiDecimals': 7, 'index': 17, 'tokenId': '0x3b5ff6cb91f71032578b53960090adfb', 'isCanonical': False}, {'name': 'BAGS', 'szDecimals': 0, 'weiDecimals': 5, 'index': 18, 'tokenId': '0x979978fd8cb07141f97dcab921ba697a', 'isCanonical': False}, {'name': 'ANSEM', 'szDecimals': 0, 'weiDecimals': 5, 'index': 19, 'tokenId': '0xa96cfac10eaecba151f646c5cb4c5507', 'isCanonical': False}, {'name': 'TATE', 'szDecimals': 0, 'weiDecimals': 5, 'index': 20, 'tokenId': '0xfba416cad5d8944e954deb6bfb2a8672', 'isCanonical': False}, {'name': 'FUN', 'szDecimals': 1, 'weiDecimals': 6, 'index': 21, 'tokenId': '0x3dc9f93c39ddd9f0182ad1e584bae0d4', 'isCanonical': False}, {'name': 'SUCKY', 'szDecimals': 0, 'weiDecimals': 5, 'index': 22, 'tokenId': '0xfd2ac85551ac85d3f04369e296ed8cd3', 'isCanonical': False}, {'name': 'BIGBEN', 'szDecimals': 2, 'weiDecimals': 8, 'index': 23, 'tokenId': '0x231f2a687770b13fe12adb1f339ff722', 'isCanonical': False}, {'name': 'KOBE', 'szDecimals': 0, 'weiDecimals': 5, 'index': 24, 'tokenId': '0x0d2556646326733d86c3fc4c2fa22ad4', 'isCanonical': False}, {'name': 'VEGAS', 'szDecimals': 2, 'weiDecimals': 8, 'index': 25, 'tokenId': '0xb693d596cd02f5f38e532e647bb43b69', 'isCanonical': False}]} + + @aioresponses() + def test_initialize_coins_dict(self, mock_api): + url = self.data_feed.rest_url + mock_api.post(url=url, payload=self.get_universe_data_mock()) + self.async_run_with_timeout(self.data_feed._initialize_coins_dict()) + self.assertEqual(self.data_feed._universe, self.get_universe_data_mock()) diff --git a/test/hummingbot/data_feed/candles_feed/kraken_spot_candles/test_kraken_spot_candles.py b/test/hummingbot/data_feed/candles_feed/kraken_spot_candles/test_kraken_spot_candles.py index ff749d4d6f..36c9d8610c 100644 --- a/test/hummingbot/data_feed/candles_feed/kraken_spot_candles/test_kraken_spot_candles.py +++ b/test/hummingbot/data_feed/candles_feed/kraken_spot_candles/test_kraken_spot_candles.py @@ -1,9 +1,8 @@ import asyncio import json import re -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch +import time +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase from aioresponses import aioresponses @@ -11,8 +10,8 @@ from hummingbot.data_feed.candles_feed.kraken_spot_candles import KrakenSpotCandles, constants as CONSTANTS -class TestKrakenSpotCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestKrakenSpotCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -25,6 +24,7 @@ def setUpClass(cls) -> None: cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" cls.ex_trading_pair = f"XBT{cls.quote_asset}" cls.ws_ex_trading_pair = f"XBT/{cls.quote_asset}" + cls.max_records = CONSTANTS.MAX_RESULTS_PER_CANDLESTICK_REST_REQUEST def setUp(self) -> None: super().setUp() @@ -36,51 +36,86 @@ def setUp(self) -> None: self.data_feed.logger().addHandler(self) self.resume_test_event = asyncio.Event() - def handle(self, record): - self.log_records.append(record) - - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret + def _candles_data_mock(self): + return [[1716127200, '66934.0', '66951.8', '66800.0', '66901.6', '28.50228560', 1906800.0564114398, 0, 0, 0], + [1716130800, '66901.7', '66989.3', '66551.7', '66669.9', '53.13722207', 3546489.7891181475, 0, 0, 0], + [1716134400, '66669.9', '66797.5', '66595.1', '66733.4', '40.08457819', 2673585.246863534, 0, 0, 0], + [1716138000, '66733.4', '66757.4', '66550.0', '66575.4', '21.05882277', 1403517.8905635749, 0, 0, 0]] def get_candles_rest_data_mock(self): data = { - "error": [ - - ], + "error": [], "result": { - "XBTUSDT": [ + self.ex_trading_pair: [ [ - 1706374800, - "41803.5", - "41849.7", - "41782.9", - "41784.0", - "41817.4", - "0.48046825", - 87 + 1716130800, + "66934.0", + "66951.8", + "66800.0", + "66901.6", + "66899.9", + "28.50228560", + 763 ], [ - 1706378400, - "41793.3", - "41812.9", - "41756.0", - "41797.9", - "41788.3", - "0.09876552", - 17 - ] + 1716134400, + "66901.7", + "66989.3", + "66551.7", + "66669.9", + "66742.1", + "53.13722207", + 1022 + ], + [ + 1716138000, + "66669.9", + "66797.5", + "66595.1", + "66733.4", + "66698.6", + "40.08457819", + 746 + ], + [ + 1716141600, + "66733.4", + "66757.4", + "66550.0", + "66575.4", + "66647.5", + "21.05882277", + 702 + ], ], - "last": 1706374800 + "last": 1718715600 } } return data + @aioresponses() + def test_fetch_candles_raises_exception(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.candles_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_candles_rest_data_mock() + mock_api.get(url=regex_url, body=json.dumps(data_mock)) + + with self.assertRaises(ValueError, msg="Gate.io REST API does not support fetching more than 10000 candles ago."): + self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=self.start_time, + end_time=self.end_time)) + + @aioresponses() + def test_fetch_candles(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.candles_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_candles_rest_data_mock() + mock_api.get(url=regex_url, body=json.dumps(data_mock)) + + self.start_time = int(time.time()) - (CONSTANTS.MAX_CANDLES_AGO - 1) * self.data_feed.interval_in_seconds + self.end_time = int(time.time()) + + candles = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=self.start_time, + end_time=self.end_time)) + self.assertEqual(len(candles), len(data_mock["result"][self.ex_trading_pair])) + def get_candles_ws_data_mock_1(self): data = [ 42, @@ -119,165 +154,5 @@ def get_candles_ws_data_mock_2(self): ] return data - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - start_time = 1706371200 - end_time = 1706378401 - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - data_mock = self.get_candles_rest_data_mock() - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) - - self.assertEqual(resp.shape[0], len(data_mock["result"])) - self.assertEqual(resp.shape[1], 10) - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "result": None, - "id": 1 - } - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(1, len(sent_subscription_messages)) - - expected_kline_subscription = { - "event": "subscribe", - "pair": [self.ws_ex_trading_pair], - "subscription": {"name": CONSTANTS.WS_CANDLES_ENDPOINT, "interval": int(CONSTANTS.INTERVALS[self.interval])} - } - self.assertEqual(expected_kline_subscription["subscription"], sent_subscription_messages[0]["subscription"]) - self.assertEqual(expected_kline_subscription["pair"], sent_subscription_messages[0]["pair"]) - - self.assertTrue(self.is_logged( - "INFO", - "Subscribed to public klines..." - )) - - @patch("hummingbot.data_feed.candles_feed.kraken_spot_candles.KrakenSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect") - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): - mock_ws.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.kraken_spot_candles.KrakenSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait(), timeout=1) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @patch("hummingbot.data_feed.candles_feed.kraken_spot_candles.KrakenSpotCandles.fill_historical_candles", - new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @patch("hummingbot.data_feed.candles_feed.kraken_spot_candles.KrakenSpotCandles.fill_historical_candles", - new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.data_feed._time = MagicMock(return_value=5) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.kraken_spot_candles.KrakenSpotCandles.fill_historical_candles") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_2())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + def _success_subscription_mock(self): + return {} diff --git a/test/hummingbot/data_feed/candles_feed/binance_perpetuals_candles/__init__.py b/test/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/__init__.py similarity index 100% rename from test/hummingbot/data_feed/candles_feed/binance_perpetuals_candles/__init__.py rename to test/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/__init__.py diff --git a/test/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/test_kucoin_perpetual_candles.py b/test/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/test_kucoin_perpetual_candles.py new file mode 100644 index 0000000000..64bcdcb2eb --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/kucoin_perpetual_candles/test_kucoin_perpetual_candles.py @@ -0,0 +1,478 @@ +import asyncio +import json +import re +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from aioresponses import aioresponses + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.core.utils.tracking_nonce import get_tracking_nonce +from hummingbot.data_feed.candles_feed.kucoin_perpetual_candles import KucoinPerpetualCandles + + +class TestKucoinPerpetualCandles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.kucoin_base_asset = "XBT" + cls.base_asset = "BTC" + cls.quote_asset = "USDT" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.kucoin_base_asset}-{cls.quote_asset}" + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = KucoinPerpetualCandles(trading_pair=self.trading_pair, interval=self.interval) + self.data_feed.symbols_dict = self.get_symbols_dict_mock() + self.data_feed._ws_url = "wss://api.kucoin.com" + self.data_feed._ws_token = "test" + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + @staticmethod + def get_symbols_dict_mock(): + return { + "XBT-USDT": "XBTUSDTM", + "ETH-USDT": "ETHUSDTM", + "SOL-USDT": "SOLUSDTM", + "WIF-USDT": "WIFUSDTM" + } + + @staticmethod + def get_symbols_response_mock(): + return { + "code": "200000", + "data": [ + { + "symbol": "XBTUSDTM", + "rootSymbol": "USDT", + "type": "FFWCSX", + "firstOpenDate": 1585555200000, + "expireDate": "", + "settleDate": "", + "baseCurrency": "XBT", + "quoteCurrency": "USDT", + "settleCurrency": "USDT", + "maxOrderQty": 1000000, + "maxPrice": 1000000, + "lotSize": 1, + "tickSize": 0.1, + "indexPriceTickSize": 0.01, + "multiplier": 0.001, + "initialMargin": 0.008, + "maintainMargin": 0.004, + "maxRiskLimit": 100000, + "minRiskLimit": 100000, + "riskStep": 50000, + "makerFeeRate": 0.0002, + "takerFeeRate": 0.0006, + "takerFixFee": 0, + "makerFixFee": 0, + "settlementFee": "", + "isDeleverage": True, + "isQuanto": True, + "isInverse": False, + "markMethod": "FairPrice", + "fairMethod": "FundingRate", + "fundingBaseSymbol": ".XBTINT8H", + "fundingQuoteSymbol": ".USDTINT8H", + "fundingRateSymbol": ".XBTUSDTMFPI8H", + "indexSymbol": ".KXBTUSDT", + "settlementSymbol": "", + "status": "Open", + "fundingFeeRate": 0.000132, + "predictedFundingFeeRate": 0.000176, + "fundingRateGranularity": 28800000, + "openInterest": "8306597", + "turnoverOf24h": 560148040.312645, + "volumeOf24h": 8544.241, + "markPrice": 64681.31, + "indexPrice": 64681.1, + "lastTradePrice": 64679.9, + "nextFundingRateTime": 7466987, + "maxLeverage": 125, + "sourceExchanges": [ + "okex", + "binance", + "kucoin", + "bybit", + "bitget", + "bitmart", + "gateio" + ], + "premiumsSymbol1M": ".XBTUSDTMPI", + "premiumsSymbol8H": ".XBTUSDTMPI8H", + "fundingBaseSymbol1M": ".XBTINT", + "fundingQuoteSymbol1M": ".USDTINT", + "lowPrice": 64278, + "highPrice": 67277.7, + "priceChgPct": -0.0245, + "priceChg": -1629.5 + }, + { + "symbol": "ETHUSDTM", + "rootSymbol": "USDT", + "type": "FFWCSX", + "firstOpenDate": 1591086000000, + "expireDate": "", + "settleDate": "", + "baseCurrency": "ETH", + "quoteCurrency": "USDT", + "settleCurrency": "USDT", + "maxOrderQty": 1000000, + "maxPrice": 1000000, + "lotSize": 1, + "tickSize": 0.01, + "indexPriceTickSize": 0.01, + "multiplier": 0.01, + "initialMargin": 0.01, + "maintainMargin": 0.005, + "maxRiskLimit": 100000, + "minRiskLimit": 100000, + "riskStep": 50000, + "makerFeeRate": 0.0002, + "takerFeeRate": 0.0006, + "takerFixFee": 0, + "makerFixFee": 0, + "settlementFee": "", + "isDeleverage": True, + "isQuanto": True, + "isInverse": False, + "markMethod": "FairPrice", + "fairMethod": "FundingRate", + "fundingBaseSymbol": ".ETHINT8H", + "fundingQuoteSymbol": ".USDTINT8H", + "fundingRateSymbol": ".ETHUSDTMFPI8H", + "indexSymbol": ".KETHUSDT", + "settlementSymbol": "", + "status": "Open", + "fundingFeeRate": 0.000094, + "predictedFundingFeeRate": 0.000074, + "fundingRateGranularity": 28800000, + "openInterest": "6506611", + "turnoverOf24h": 237761018.67718124, + "volumeOf24h": 69065.8, + "markPrice": 3409.13, + "indexPrice": 3409.11, + "lastTradePrice": 3409.38, + "nextFundingRateTime": 7466984, + "maxLeverage": 100, + "sourceExchanges": [ + "okex", + "binance", + "kucoin", + "gateio", + "bybit", + "bitmart", + "bitget" + ], + "premiumsSymbol1M": ".ETHUSDTMPI", + "premiumsSymbol8H": ".ETHUSDTMPI8H", + "fundingBaseSymbol1M": ".ETHINT", + "fundingQuoteSymbol1M": ".USDTINT", + "lowPrice": 3350, + "highPrice": 3578.04, + "priceChgPct": -0.0371, + "priceChg": -131.59 + }, + { + "symbol": "SOLUSDTM", + "rootSymbol": "USDT", + "type": "FFWCSX", + "firstOpenDate": 1614153600000, + "expireDate": "", + "settleDate": "", + "baseCurrency": "SOL", + "quoteCurrency": "USDT", + "settleCurrency": "USDT", + "maxOrderQty": 1000000, + "maxPrice": 1000000, + "lotSize": 1, + "tickSize": 0.001, + "indexPriceTickSize": 0.001, + "multiplier": 0.1, + "initialMargin": 0.014, + "maintainMargin": 0.007, + "maxRiskLimit": 50000, + "minRiskLimit": 50000, + "riskStep": 25000, + "makerFeeRate": 0.0002, + "takerFeeRate": 0.0006, + "takerFixFee": 0, + "makerFixFee": 0, + "settlementFee": "", + "isDeleverage": True, + "isQuanto": False, + "isInverse": False, + "markMethod": "FairPrice", + "fairMethod": "FundingRate", + "fundingBaseSymbol": ".SOLINT8H", + "fundingQuoteSymbol": ".USDTINT8H", + "fundingRateSymbol": ".SOLUSDTMFPI8H", + "indexSymbol": ".KSOLUSDT", + "settlementSymbol": "", + "status": "Open", + "fundingFeeRate": -0.000027, + "predictedFundingFeeRate": 0.000012, + "fundingRateGranularity": 28800000, + "openInterest": "7254789", + "turnoverOf24h": 194771311.87900543, + "volumeOf24h": 1422531.1, + "markPrice": 133.026, + "indexPrice": 133.031, + "lastTradePrice": 133.002, + "nextFundingRateTime": 7466981, + "maxLeverage": 75, + "sourceExchanges": [ + "binance", + "okex", + "gateio", + "bybit", + "kucoin" + ], + "premiumsSymbol1M": ".SOLUSDTMPI", + "premiumsSymbol8H": ".SOLUSDTMPI8H", + "fundingBaseSymbol1M": ".SOLINT", + "fundingQuoteSymbol1M": ".USDTINT", + "lowPrice": 125.847, + "highPrice": 146.808, + "priceChgPct": -0.0783, + "priceChg": -11.303 + }, + { + "symbol": "WIFUSDTM", + "rootSymbol": "USDT", + "type": "FFWCSX", + "firstOpenDate": 1707292800000, + "expireDate": "", + "settleDate": "", + "baseCurrency": "WIF", + "quoteCurrency": "USDT", + "settleCurrency": "USDT", + "maxOrderQty": 1000000, + "maxPrice": 1000000, + "lotSize": 1, + "tickSize": 0.0001, + "indexPriceTickSize": 0.0001, + "multiplier": 10, + "initialMargin": 0.014, + "maintainMargin": 0.007, + "maxRiskLimit": 25000, + "minRiskLimit": 25000, + "riskStep": 12500, + "makerFeeRate": 0.0002, + "takerFeeRate": 0.0006, + "takerFixFee": 0, + "makerFixFee": 0, + "settlementFee": "", + "isDeleverage": True, + "isQuanto": False, + "isInverse": False, + "markMethod": "FairPrice", + "fairMethod": "FundingRate", + "fundingBaseSymbol": ".WIFINT8H", + "fundingQuoteSymbol": ".USDTINT8H", + "fundingRateSymbol": ".WIFUSDTMFPI8H", + "indexSymbol": ".KWIFUSDT", + "settlementSymbol": "", + "status": "Open", + "fundingFeeRate": 0.000131, + "predictedFundingFeeRate": 0.000045, + "fundingRateGranularity": 28800000, + "openInterest": "626433", + "turnoverOf24h": 55265460.63083267, + "volumeOf24h": 26115500, + "markPrice": 1.9407, + "indexPrice": 1.9405, + "lastTradePrice": 1.9405, + "nextFundingRateTime": 7466978, + "maxLeverage": 75, + "sourceExchanges": [ + "gateio", + "bitmart", + "kucoin", + "mexc", + "bitget", + "binance" + ], + "premiumsSymbol1M": ".WIFUSDTMPI", + "premiumsSymbol8H": ".WIFUSDTMPI8H", + "fundingBaseSymbol1M": ".WIFINT", + "fundingQuoteSymbol1M": ".USDTINT", + "lowPrice": 1.9206, + "highPrice": 2.4554, + "priceChgPct": -0.1912, + "priceChg": -0.457 + } + ] + } + + def get_fetch_candles_data_mock(self): + return [ + [1672981200, '16823.24000000', '16792.12000000', '16810.18000000', '16823.63000000', '6230.44034000', 0.0, + 0.0, 0.0, 0.0], + [1672984800, '16809.74000000', '16779.96000000', '16786.86000000', '16816.45000000', '6529.22759000', 0.0, + 0.0, 0.0, 0.0], + [1672988400, '16786.60000000', '16780.15000000', '16794.06000000', '16802.87000000', '5763.44917000', 0.0, + 0.0, 0.0, 0.0], + [1672992000, '16794.33000000', '16791.47000000', '16802.11000000', '16812.22000000', '5475.13940000', 0.0, + 0.0, 0.0, 0.0], + ] + + def get_candles_rest_data_mock(self): + data = [ + [ + 1672981200, + "16823.24000000", + "16823.63000000", + "16792.12000000", + "16810.18000000", + "6230.44034000", + ], + [ + 1672984800, + "16809.74000000", + "16816.45000000", + "16779.96000000", + "16786.86000000", + "6529.22759000" + ], + [ + 1672988400, + "16786.60000000", + "16802.87000000", + "16780.15000000", + "16794.06000000", + "5763.44917000" + ], + [ + 1672992000, + "16794.33000000", + "16812.22000000", + "16791.47000000", + "16802.11000000", + "5475.13940000" + ], + ] + return {"code": "200000", "data": data} + + def get_candles_ws_data_mock_1(self): + data = { + "type": "message", + "topic": "/market/candles:XBT-USDT_1hour", + "subject": "trade.candles.update", + "data": { + "symbol": "XBT-USDT", # symbol + "candles": [ + "1589968800", # Start time of the candle cycle + "9786.9", # open price + "9740.8", # close price + "9806.1", # high price + "9732", # low price + "27.45649579", # Transaction volume + "268280.09830877" # Transaction amount + ], + "time": 1589970010253893337 # now(us) + } + } + return data + + def get_candles_ws_data_mock_2(self): + data = { + "type": "message", + "topic": "/market/candles:XBT-USDT_1hour", + "subject": "trade.candles.update", + "data": { + "symbol": "XBT-USDT", # symbol + "candles": [ + "1589972400", # Start time of the candle cycle + "9786.9", # open price + "9740.8", # close price + "9806.1", # high price + "9732", # low price + "27.45649579", # Transaction volume + "268280.09830877" # Transaction amount + ], + "time": 1589970010253893337 # now(us) + } + } + return data + + @staticmethod + def _success_subscription_mock(): + return {'id': str(get_tracking_nonce()), + 'privateChannel': False, + 'response': False, + 'topic': '/market/candles:XBT-USDT_1hour', + 'type': 'subscribe'} + + @staticmethod + def get_public_token_response_mock(): + return { + "code": "200000", + "data": { + "token": "2neAiuYvAU61ZDXANAGAsiL4-iAExhsBXZxftpOeh_55i3Ysy2q2LEsEWU64mdzUOPusi34M_wGoSf7iNyEWJ4aBZXpWhrmY9jKtqkdWoFa75w3istPvPtiYB9J6i9GjsxUuhPw3BlrzazF6ghq4L_u0MhKxG3x8TeN4aVbNiYo=.mvnekBb8DJegZIgYLs2FBQ==", + "instanceServers": [ + { + "endpoint": "wss://ws-api-spot.kucoin.com/", + "encrypt": True, + "protocol": "websocket", + "pingInterval": 18000, + "pingTimeout": 10000 + } + ] + } + } + + @aioresponses() + def test_get_ws_token(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.public_ws_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_public_token_response_mock() + mock_api.post(url=regex_url, body=json.dumps(data_mock)) + + self.data_feed._ws_token = None + self.data_feed._ws_url = None + + self.async_run_with_timeout(self.data_feed._get_ws_token(), timeout=5) + + self.assertEqual(self.data_feed._ws_token, data_mock["data"]["token"]) + self.assertEqual(self.data_feed._ws_url, data_mock["data"]["instanceServers"][0]["endpoint"]) + + @aioresponses() + def test_get_ws_token_raises_exception(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.public_ws_url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.post(url=regex_url, status=500) + + self.data_feed._ws_token = None + self.data_feed._ws_url = None + + with self.assertRaises(Exception): + self.async_run_with_timeout(self.data_feed._get_ws_token(), timeout=5) + + @aioresponses() + def test_get_symbols_dict(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.symbols_url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_symbols_response_mock() + mock_api.get(url=regex_url, body=json.dumps(data_mock)) + + self.async_run_with_timeout(self.data_feed._get_symbols_dict(), timeout=5) + + self.assertEqual(self.data_feed.symbols_dict, self.get_symbols_dict_mock()) + + @aioresponses() + def test_get_symbols_dict_raises_exception(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.symbols_url}".replace(".", r"\.").replace("?", r"\?")) + mock_api.get(url=regex_url, status=500) + + with self.assertRaises(Exception): + self.async_run_with_timeout(self.data_feed._get_symbols_dict(), timeout=5) diff --git a/test/hummingbot/data_feed/candles_feed/kucoin_spot_candles/test_kucoin_spot_candles.py b/test/hummingbot/data_feed/candles_feed/kucoin_spot_candles/test_kucoin_spot_candles.py index 71fd9fe232..546d1321a4 100644 --- a/test/hummingbot/data_feed/candles_feed/kucoin_spot_candles/test_kucoin_spot_candles.py +++ b/test/hummingbot/data_feed/candles_feed/kucoin_spot_candles/test_kucoin_spot_candles.py @@ -1,18 +1,13 @@ import asyncio -import json -import re -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch - -from aioresponses import aioresponses +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.data_feed.candles_feed.kucoin_spot_candles import KucoinSpotCandles, constants as CONSTANTS +from hummingbot.core.utils.tracking_nonce import get_tracking_nonce +from hummingbot.data_feed.candles_feed.kucoin_spot_candles import KucoinSpotCandles -class TestKucoinSpotCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestKucoinSpotCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -24,6 +19,7 @@ def setUpClass(cls) -> None: cls.interval = "1h" cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" cls.ex_trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.max_records = 150 def setUp(self) -> None: super().setUp() @@ -35,17 +31,13 @@ def setUp(self) -> None: self.data_feed.logger().addHandler(self) self.resume_test_event = asyncio.Event() - def handle(self, record): - self.log_records.append(record) - - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret + def get_fetch_candles_data_mock(self): + return [ + [1672981200, '16823.24000000', '16792.12000000', '16810.18000000', '16823.63000000', '6230.44034000', 1672984799999, 0.0, 0.0, 0.0], + [1672984800, '16809.74000000', '16779.96000000', '16786.86000000', '16816.45000000', '6529.22759000', 1672988399999, 0.0, 0.0, 0.0], + [1672988400, '16786.60000000', '16780.15000000', '16794.06000000', '16802.87000000', '5763.44917000', 1672991999999, 0.0, 0.0, 0.0], + [1672992000, '16794.33000000', '16791.47000000', '16802.11000000', '16812.22000000', '5475.13940000', 1672995599999, 0.0, 0.0, 0.0], + ] def get_candles_rest_data_mock(self): data = [ @@ -130,205 +122,10 @@ def get_candles_ws_data_mock_2(self): } return data - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - start_time = 1672981200 - end_time = 1672992000 - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}?endAt={end_time}&startAt={start_time}" \ - f"&symbol={self.ex_trading_pair}&type={CONSTANTS.INTERVALS[self.interval]}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - data_mock = self.get_candles_rest_data_mock() - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time * 1000, end_time=end_time * 1000)) - - self.assertEqual(resp.shape[0], len(data_mock["data"])) - self.assertEqual(resp.shape[1], 10) - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - @staticmethod - def _token_generation_for_ws_subscription_mock_response(): - return { - "code": "200000", - "data": { - "instanceServers": [ - { - "endpoint": "wss://test.url/endpoint", - "protocol": "websocket", - "encrypt": True, - "pingInterval": 50000, - "pingTimeout": 10000 - } - ], - "token": "testToken" - } - } - - @aioresponses() - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, mock_api, ws_connect_mock): - url = self.data_feed.public_ws_url - - resp = self._token_generation_for_ws_subscription_mock_response() - - mock_api.post(url, body=json.dumps(resp)) - - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "id": "hQvf8jkno", - "type": "welcome" - } - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(2, len(sent_subscription_messages)) - expected_topic = f"/market/candles:{self.ex_trading_pair.upper()}_{CONSTANTS.INTERVALS[self.interval]}" - self.assertEqual(expected_topic, sent_subscription_messages[0]["topic"]) - self.assertTrue(self.is_logged( - "INFO", - "Subscribed to public klines..." - )) - - @aioresponses() - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_api, ws_connect_mock): - url = self.data_feed.public_ws_url - - resp = self._token_generation_for_ws_subscription_mock_response() - mock_api.post(url, body=json.dumps(resp)) - - ws_connect_mock.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.kucoin_spot_candles.KucoinSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait(), timeout=1) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @aioresponses() - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - @patch("hummingbot.data_feed.candles_feed.kucoin_spot_candles.KucoinSpotCandles.fill_historical_candles", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, mock_api, fill_historical_candles_mock, ws_connect_mock): - url = self.data_feed.public_ws_url - - resp = self._token_generation_for_ws_subscription_mock_response() - mock_api.post(url, body=json.dumps(resp)) - - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @aioresponses() - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - @patch("hummingbot.data_feed.candles_feed.kucoin_spot_candles.KucoinSpotCandles.fill_historical_candles", - new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, mock_api, fill_historical_candles, ws_connect_mock): - url = self.data_feed.public_ws_url - - resp = self._token_generation_for_ws_subscription_mock_response() - mock_api.post(url, body=json.dumps(resp)) - - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.data_feed._time = MagicMock(return_value=5) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @aioresponses() - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, mock_api, ws_connect_mock): - url = self.data_feed.public_ws_url - - resp = self._token_generation_for_ws_subscription_mock_response() - mock_api.post(url, body=json.dumps(resp)) - - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_2())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + def _success_subscription_mock(): + return {'id': str(get_tracking_nonce()), + 'privateChannel': False, + 'response': False, + 'topic': '/market/candles:BTC-USDT_1hour', + 'type': 'subscribe'} diff --git a/test/hummingbot/pmm_script/__init__.py b/test/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/__init__.py similarity index 100% rename from test/hummingbot/pmm_script/__init__.py rename to test/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/__init__.py diff --git a/test/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/test_mexc_perpetual_candles.py b/test/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/test_mexc_perpetual_candles.py new file mode 100644 index 0000000000..192fb57e8a --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/mexc_perpetual_candles/test_mexc_perpetual_candles.py @@ -0,0 +1,159 @@ +import asyncio +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.mexc_perpetual_candles import MexcPerpetualCandles + + +class TestMexcPerpetualCandles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "BTC" + cls.quote_asset = "USDT" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}_{cls.quote_asset}" + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = MexcPerpetualCandles(trading_pair=self.trading_pair, interval=self.interval) + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def get_fetch_candles_data_mock(self): + return [[1717632000, 3868.6, 3870, 3860.14, 3862.3, 851390, 32903657.9187, 0.0, 0.0, 0.0], + [1717635600, 3862.3, 3873.61, 3856.32, 3864.04, 705088, 27251412.0495, 0.0, 0.0, 0.0], + [1717639200, 3864.04, 3881.99, 3862.3, 3871.27, 608801, 23576631.8815, 0.0, 0.0, 0.0], + [1717642800, 3871.27, 3876.18, 3862.99, 3864.01, 484966, 18769321.3995, 0.0, 0.0, 0.0]] + + def get_candles_rest_data_mock(self): + return { + "success": True, + "code": 0, + "data": { + "time": [ + 1717632000, + 1717635600, + 1717639200, + 1717642800 + ], + "open": [ + 3868.6, + 3862.3, + 3864.04, + 3871.27 + ], + "close": [ + 3862.3, + 3864.04, + 3871.27, + 3864.01 + ], + "high": [ + 3870, + 3873.61, + 3881.99, + 3876.18 + ], + "low": [ + 3860.14, + 3856.32, + 3862.3, + 3862.99 + ], + "vol": [ + 851390, + 705088, + 608801, + 484966 + ], + "amount": [ + 32903657.9187, + 27251412.0495, + 23576631.8815, + 18769321.3995 + ], + "realOpen": [ + 3868.61, + 3862.29, + 3864.04, + 3871.26 + ], + "realClose": [ + 3862.3, + 3864.04, + 3871.27, + 3864.01 + ], + "realHigh": [ + 3870, + 3873.61, + 3881.99, + 3876.18 + ], + "realLow": [ + 3860.14, + 3856.32, + 3862.3, + 3862.99 + ] + } + } + + def get_candles_ws_data_mock_1(self): + return { + "symbol": "BTC_USDT", + "data": { + "symbol": "BTC_USDT", + "interval": "Min60", + "t": 1718751060, + "o": 65213.5, + "c": 65210.5, + "h": 65233.5, + "l": 65208.5, + "a": 3344326.96161, + "q": 512797, + "ro": 65213.4, + "rc": 65210.5, + "rh": 65233.5, + "rl": 65208.5 + }, + "channel": "push.kline", + "ts": 1718751106472 + } + + def get_candles_ws_data_mock_2(self): + return { + "symbol": "BTC_USDT", + "data": { + "symbol": "BTC_USDT", + "interval": "Min60", + "t": 1718751061, + "o": 65213.5, + "c": 65210.5, + "h": 65233.5, + "l": 65208.5, + "a": 3344326.96161, + "q": 512797, + "ro": 65213.4, + "rc": 65210.5, + "rh": 65233.5, + "rl": 65208.5 + }, + "channel": "push.kline", + "ts": 1718751106472 + } + + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/hummingbot/data_feed/candles_feed/mexc_spot_candles/__init__.py b/test/hummingbot/data_feed/candles_feed/mexc_spot_candles/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/hummingbot/data_feed/candles_feed/mexc_spot_candles/test_mexc_spot_candles.py b/test/hummingbot/data_feed/candles_feed/mexc_spot_candles/test_mexc_spot_candles.py new file mode 100644 index 0000000000..ccf6087ad5 --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/mexc_spot_candles/test_mexc_spot_candles.py @@ -0,0 +1,141 @@ +import asyncio +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.mexc_spot_candles import MexcSpotCandles + + +class TestMexcSpotCandles(TestCandlesBase): + __test__ = True + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "BTC" + cls.quote_asset = "USDT" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.max_records = 150 + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = MexcSpotCandles(trading_pair=self.trading_pair, interval=self.interval) + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def get_fetch_candles_data_mock(self): + return [[1718726400.0, '64698.66', '64868.98', '64336', '64700.98', '575.730117', '37155549.91', 0.0, 0.0, 0.0], + [1718730000.0, '64700.98', '64904.91', '64400', '64603.99', '917.852709', '59373594.99', 0.0, 0.0, 0.0], + [1718733600.0, '64603.99', '64867.88', '64321', '64678.01', '1007.168584', '65139730.47', 0.0, 0.0, + 0.0], + [1718737200.0, '64678.01', '64738.83', '64066.01', '64422.01', '862.944706', '55564341.51', 0.0, 0.0, + 0.0], + [1718740800.0, '64422.01', '64683.84', '64178.1', '64565.49', '552.774673', '35628336.98', 0.0, 0.0, + 0.0]] + + def get_candles_rest_data_mock(self): + return [ + [ + 1718726400000, + "64698.66", + "64868.98", + "64336", + "64700.98", + "575.730117", + 1718730000000, + "37155549.91" + ], + [ + 1718730000000, + "64700.98", + "64904.91", + "64400", + "64603.99", + "917.852709", + 1718733600000, + "59373594.99" + ], + [ + 1718733600000, + "64603.99", + "64867.88", + "64321", + "64678.01", + "1007.168584", + 1718737200000, + "65139730.47" + ], + [ + 1718737200000, + "64678.01", + "64738.83", + "64066.01", + "64422.01", + "862.944706", + 1718740800000, + "55564341.51" + ], + [ + 1718740800000, + "64422.01", + "64683.84", + "64178.1", + "64565.49", + "552.774673", + 1718744400000, + "35628336.98" + ] + ] + + def get_candles_ws_data_mock_1(self): + return { + "c": "spot@public.kline.v3.api@BTCUSDT@Min15", + "d": { + "k": { + "T": 1661931900, + "a": 29043.48804658, + "c": 20279.43, + "h": 20284.93, + "i": "Min60", + "l": 20277.52, + "o": 20284.93, + "t": 1661931000, + "v": 1.43211}, + "e": "spot@public.kline.v3.api"}, + "s": "BTCUSDT", + "t": 1661931016878 + } + + def get_candles_ws_data_mock_2(self): + return { + "c": "spot@public.kline.v3.api@BTCUSDT@Min15", + "d": { + "k": { + "T": 1661931900, + "a": 29043.48804658, + "c": 20279.43, + "h": 20284.93, + "i": "Min60", + "l": 20277.52, + "o": 20284.93, + "t": 1661934600, + "v": 1.43211}, + "e": "spot@public.kline.v3.api"}, + "s": "BTCUSDT", + "t": 1661931016878 + } + + @staticmethod + def _success_subscription_mock(): + return { + "id": 0, + "code": 0, + "msg": "spot@public.kline.v3.api@BTCUSDT" + } diff --git a/test/hummingbot/data_feed/candles_feed/okx_perpetual_candles/test_okx_perpetual_candles.py b/test/hummingbot/data_feed/candles_feed/okx_perpetual_candles/test_okx_perpetual_candles.py index def27a5236..e47dd460ad 100644 --- a/test/hummingbot/data_feed/candles_feed/okx_perpetual_candles/test_okx_perpetual_candles.py +++ b/test/hummingbot/data_feed/candles_feed/okx_perpetual_candles/test_okx_perpetual_candles.py @@ -1,20 +1,12 @@ import asyncio -import json -import re -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch - -import numpy as np -from aioresponses import aioresponses +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.data_feed.candles_feed.data_types import HistoricalCandlesConfig -from hummingbot.data_feed.candles_feed.okx_perpetual_candles import OKXPerpetualCandles, constants as CONSTANTS +from hummingbot.data_feed.candles_feed.okx_perpetual_candles import OKXPerpetualCandles -class TestOKXPerpetualCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestOKXPerpetualCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -26,84 +18,82 @@ def setUpClass(cls) -> None: cls.interval = "1h" cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" cls.ex_trading_pair = f"{cls.base_asset}-{cls.quote_asset}-SWAP" + cls.max_records = 150 def setUp(self) -> None: super().setUp() + self.data_feed = OKXPerpetualCandles(trading_pair=self.trading_pair, + interval=self.interval, + max_records=self.max_records) self.mocking_assistant = NetworkMockingAssistant() - self.data_feed = OKXPerpetualCandles(trading_pair=self.trading_pair, interval=self.interval) - - self.log_records = [] self.data_feed.logger().setLevel(1) self.data_feed.logger().addHandler(self) - self.resume_test_event = asyncio.Event() - - def handle(self, record): - self.log_records.append(record) - - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) - - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret - def get_fetched_candles_data_mock(self): - candles = self.get_candles_rest_data_mock() - arr = [[row[0], row[1], row[2], row[3], row[4], row[6], row[7], 0., 0., 0.] for row in candles["data"][::-1]] - return np.array(arr).astype(float) - - def get_candles_rest_data_mock(self): + @staticmethod + def get_candles_rest_data_mock(): data = { "code": "0", "msg": "", "data": [ - ["1705431600000", - "43016", - "43183.8", - "42946", - "43169.7", - "404.74017381", - "17447600.212916623", - "17447600.212916623", - "1"], - ["1705428000000", - "43053.3", - "43157.4", - "42836.5", - "43016", - "385.88107189", - "16589516.212133739", - "16589516.212133739", - "1"], - ["1705424400000", - "43250.9", - "43250.9", - "43035.1", - "43048.1", - "333.55276206", - "14383538.301882162", - "14383538.301882162", - "1"], - ["1705420800000", - "43253.6", - "43440.2", - "43000", - "43250.9", - "942.87870026", - "40743115.773175484", - "40743115.773175484", - "1"], + [ + "1718658000000", + "66401", + "66734", + "66310.1", + "66575.3", + "201605.6", + "2016.056", + "134181486.8892", + "1" + ], + [ + "1718654400000", + "66684", + "66765.1", + "66171.3", + "66400.6", + "532566.8", + "5325.668", + "353728101.5321", + "1" + ], + [ + "1718650800000", + "67087.1", + "67099.8", + "66560", + "66683.9", + "449946.1", + "4499.461", + "300581935.693", + "1" + ], + [ + "1718647200000", + "66602", + "67320", + "66543.3", + "67087", + "1345995.9", + "13459.959", + "900743428.1363", + "1" + ] ] } return data + def get_fetch_candles_data_mock(self): + return [[1718647200.0, '66602', '67320', '66543.3', '67087', '13459.959', '900743428.1363', 0.0, 0.0, 0.0], + [1718650800.0, '67087.1', '67099.8', '66560', '66683.9', '4499.461', '300581935.693', 0.0, 0.0, 0.0], + [1718654400.0, '66684', '66765.1', '66171.3', '66400.6', '5325.668', '353728101.5321', 0.0, 0.0, 0.0], + [1718658000.0, '66401', '66734', '66310.1', '66575.3', '2016.056', '134181486.8892', 0.0, 0.0, 0.0]] + def get_candles_ws_data_mock_1(self): data = { "arg": { "channel": "candle1H", - "instId": "BTC-USDT"}, + "instId": self.ex_trading_pair}, "data": [ ["1705420800000", "43253.6", @@ -120,7 +110,7 @@ def get_candles_ws_data_mock_2(self): data = { "arg": { "channel": "candle1H", - "instId": "BTC-USDT"}, + "instId": self.ex_trading_pair}, "data": [ ["1705435200000", "43169.8", @@ -133,194 +123,6 @@ def get_candles_ws_data_mock_2(self): "0"]]} return data - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - # Fill manual params - start_time = 1705420800000 - end_time = 1705431600000 - - # Generate url and regex_url. Last one is used for best practices - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}?after={end_time}&bar={CONSTANTS.INTERVALS[self.interval]}&before={start_time}&instId={self.ex_trading_pair}&limit=100" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - - # Mock the response - data_mock = self.get_candles_rest_data_mock() - - # Add the mock to the aioresponse mock - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - # Run the test - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) - - # Check the response - self.assertEqual(resp.shape[0], len(data_mock["data"])) - self.assertEqual(resp.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.okx_perpetual_candles.OKXPerpetualCandles.fetch_candles", new_callable=AsyncMock) - def test_get_historical_candles(self, fetched_candles_mock): - config = HistoricalCandlesConfig(connector_name="okx_perpetual", - trading_pair=self.ex_trading_pair, - interval=self.interval, - start_time=1705420800000, - end_time=1705431600000) - resp_1 = self.get_fetched_candles_data_mock() - resp_2 = np.array([]) - fetched_candles_mock.side_effect = [resp_1, resp_2] - candles_df = self.async_run_with_timeout(self.data_feed.get_historical_candles(config)) - - # Check the response - self.assertEqual(candles_df.shape[0], len(resp_1)) - self.assertEqual(candles_df.shape[1], 10) - - # Check candles integrity. Diff should always be interval in milliseconds and keep sign constant - self.assertEqual(len(candles_df["timestamp"].diff()[1:].unique()), 1, "Timestamp diff should be constant") - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "event": "subscribe", - "arg": { - "channel": "candle1H", - "instId": "BTC-USDT-SWAP" - }, - "connId": "a4d3ae55" - } - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(1, len(sent_subscription_messages)) - expected_kline_subscription = { - "op": "subscribe", - "args": [{ - "channel": f"candle{CONSTANTS.INTERVALS[self.interval]}", - "instId": self.ex_trading_pair}] - } - - self.assertEqual(expected_kline_subscription, sent_subscription_messages[0]) - - self.assertTrue(self.is_logged( - log_level="INFO", - message="Subscribed to public klines..." - )) - - @patch("hummingbot.data_feed.candles_feed.okx_perpetual_candles.OKXPerpetualCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect") - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): - mock_ws.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.okx_perpetual_candles.OKXPerpetualCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait()) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @patch("hummingbot.data_feed.candles_feed.okx_perpetual_candles.OKXPerpetualCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @patch("hummingbot.data_feed.candles_feed.okx_perpetual_candles.OKXPerpetualCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.okx_perpetual_candles.OKXPerpetualCandles.fill_historical_candles") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - msg_1 = json.dumps(self.get_candles_ws_data_mock_1()) - msg_2 = json.dumps(self.get_candles_ws_data_mock_2()) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=msg_1) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=msg_2) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/hummingbot/data_feed/candles_feed/okx_spot_candles/test_okx_spot_candles.py b/test/hummingbot/data_feed/candles_feed/okx_spot_candles/test_okx_spot_candles.py index c8ae03714e..1e5be8f1d8 100644 --- a/test/hummingbot/data_feed/candles_feed/okx_spot_candles/test_okx_spot_candles.py +++ b/test/hummingbot/data_feed/candles_feed/okx_spot_candles/test_okx_spot_candles.py @@ -1,20 +1,12 @@ import asyncio -import json -import re -import unittest -from typing import Awaitable -from unittest.mock import AsyncMock, MagicMock, patch - -import numpy as np -from aioresponses import aioresponses +from test.hummingbot.data_feed.candles_feed.test_candles_base import TestCandlesBase from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.data_feed.candles_feed.data_types import HistoricalCandlesConfig -from hummingbot.data_feed.candles_feed.okx_spot_candles import OKXSpotCandles, constants as CONSTANTS +from hummingbot.data_feed.candles_feed.okx_spot_candles import OKXSpotCandles -class TestOKXSpotCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestOKXSpotCandles(TestCandlesBase): + __test__ = True level = 0 @classmethod @@ -26,35 +18,19 @@ def setUpClass(cls) -> None: cls.interval = "1h" cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" cls.ex_trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.max_records = 150 def setUp(self) -> None: super().setUp() + self.data_feed = OKXSpotCandles(trading_pair=self.trading_pair, + interval=self.interval, + max_records=self.max_records) self.mocking_assistant = NetworkMockingAssistant() - self.data_feed = OKXSpotCandles(trading_pair=self.trading_pair, interval=self.interval) - - self.log_records = [] self.data_feed.logger().setLevel(1) self.data_feed.logger().addHandler(self) - self.resume_test_event = asyncio.Event() - - def handle(self, record): - self.log_records.append(record) - - def is_logged(self, log_level: str, message: str) -> bool: - return any( - record.levelname == log_level and record.getMessage() == message for - record in self.log_records) - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): - ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) - return ret - - def get_fetched_candles_data_mock(self): - candles = self.get_candles_rest_data_mock() - arr = [[row[0], row[1], row[2], row[3], row[4], row[6], row[7], 0., 0., 0.] for row in candles["data"][::-1]] - return np.array(arr).astype(float) - - def get_candles_rest_data_mock(self): + @staticmethod + def get_candles_rest_data_mock(): data = { "code": "0", "msg": "", @@ -99,11 +75,21 @@ def get_candles_rest_data_mock(self): } return data + def get_fetch_candles_data_mock(self): + return [[1705420800.0, '43253.6', '43440.2', '43000', '43250.9', '942.87870026', '40743115.773175484', 0.0, 0.0, + 0.0], + [1705424400.0, '43250.9', '43250.9', '43035.1', '43048.1', '333.55276206', '14383538.301882162', 0.0, + 0.0, 0.0], + [1705428000.0, '43053.3', '43157.4', '42836.5', '43016', '385.88107189', '16589516.212133739', 0.0, 0.0, + 0.0], + [1705431600.0, '43016', '43183.8', '42946', '43169.7', '404.74017381', '17447600.212916623', 0.0, 0.0, + 0.0]] + def get_candles_ws_data_mock_1(self): data = { "arg": { "channel": "candle1H", - "instId": "BTC-USDT"}, + "instId": self.ex_trading_pair}, "data": [ ["1705420800000", "43253.6", @@ -120,7 +106,7 @@ def get_candles_ws_data_mock_2(self): data = { "arg": { "channel": "candle1H", - "instId": "BTC-USDT"}, + "instId": self.ex_trading_pair}, "data": [ ["1705435200000", "43169.8", @@ -133,194 +119,6 @@ def get_candles_ws_data_mock_2(self): "0"]]} return data - @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - # Fill manual params - start_time = 1705420800000 - end_time = 1705431600000 - - # Generate url and regex_url. Last one is used for best practices - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}?after={end_time}&bar={CONSTANTS.INTERVALS[self.interval]}&before={start_time}&instId={self.ex_trading_pair}&limit=100" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) - - # Mock the response - data_mock = self.get_candles_rest_data_mock() - - # Add the mock to the aioresponse mock - mock_api.get(url=regex_url, body=json.dumps(data_mock)) - - # Run the test - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) - - # Check the response - self.assertEqual(resp.shape[0], len(data_mock["data"])) - self.assertEqual(resp.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.okx_spot_candles.OKXSpotCandles.fetch_candles", new_callable=AsyncMock) - def test_get_historical_candles(self, fetched_candles_mock): - config = HistoricalCandlesConfig(connector_name="okx", - trading_pair=self.ex_trading_pair, - interval=self.interval, - start_time=1705420800000, - end_time=1705431600000) - resp_1 = self.get_fetched_candles_data_mock() - resp_2 = np.array([]) - fetched_candles_mock.side_effect = [resp_1, resp_2] - candles_df = self.async_run_with_timeout(self.data_feed.get_historical_candles(config)) - - # Check the response - self.assertEqual(candles_df.shape[0], len(resp_1)) - self.assertEqual(candles_df.shape[1], 10) - - # Check candles integrity. Diff should always be interval in milliseconds and keep sign constant - self.assertEqual(len(candles_df["timestamp"].diff()[1:].unique()), 1, "Timestamp diff should be constant") - - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - - result_subscribe_klines = { - "event": "subscribe", - "arg": { - "channel": "candle1H", - "instId": "BTC-USDT" - }, - "connId": "a4d3ae55" - } - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(result_subscribe_klines)) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( - websocket_mock=ws_connect_mock.return_value) - - self.assertEqual(1, len(sent_subscription_messages)) - expected_kline_subscription = { - "op": "subscribe", - "args": [{ - "channel": f"candle{CONSTANTS.INTERVALS[self.interval]}", - "instId": self.ex_trading_pair}] - } - - self.assertEqual(expected_kline_subscription, sent_subscription_messages[0]) - - self.assertTrue(self.is_logged( - log_level="INFO", - message="Subscribed to public klines..." - )) - - @patch("hummingbot.data_feed.candles_feed.okx_spot_candles.OKXSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect") - def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): - mock_ws.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - self.async_run_with_timeout(self.listening_task) - - @patch("hummingbot.data_feed.candles_feed.okx_spot_candles.OKXSpotCandles._sleep") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): - mock_ws.side_effect = Exception("TEST ERROR.") - sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( - asyncio.CancelledError()) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.async_run_with_timeout(self.resume_test_event.wait()) - - self.assertTrue( - self.is_logged( - "ERROR", - "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) - - def test_subscribe_channels_raises_cancel_exception(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = asyncio.CancelledError - - with self.assertRaises(asyncio.CancelledError): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - def test_subscribe_channels_raises_exception_and_logs_error(self): - mock_ws = MagicMock() - mock_ws.send.side_effect = Exception("Test Error") - - with self.assertRaises(Exception): - self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) - self.async_run_with_timeout(self.listening_task) - - self.assertTrue( - self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") - ) - - @patch("hummingbot.data_feed.candles_feed.okx_spot_candles.OKXSpotCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - fill_historical_candles_mock.assert_called_once() - - @patch("hummingbot.data_feed.candles_feed.okx_spot_candles.OKXSpotCandles.fill_historical_candles", new_callable=AsyncMock) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - fill_historical_candles.return_value = None - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=json.dumps(self.get_candles_ws_data_mock_1())) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) - - self.assertEqual(self.data_feed.candles_df.shape[0], 1) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - @patch("hummingbot.data_feed.candles_feed.okx_spot_candles.OKXSpotCandles.fill_historical_candles") - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): - ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - msg_1 = json.dumps(self.get_candles_ws_data_mock_1()) - msg_2 = json.dumps(self.get_candles_ws_data_mock_2()) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=msg_1) - - self.mocking_assistant.add_websocket_aiohttp_message( - websocket_mock=ws_connect_mock.return_value, - message=msg_2) - - self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) - - self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) - - self.assertEqual(self.data_feed.candles_df.shape[0], 2) - self.assertEqual(self.data_feed.candles_df.shape[1], 10) - - def _create_exception_and_unlock_test_with_event(self, exception): - self.resume_test_event.set() - raise exception + @staticmethod + def _success_subscription_mock(): + return {} diff --git a/test/hummingbot/data_feed/candles_feed/binance_perpetuals_candles/test_binance_perpetuals_candles.py b/test/hummingbot/data_feed/candles_feed/test_candles_base.py similarity index 51% rename from test/hummingbot/data_feed/candles_feed/binance_perpetuals_candles/test_binance_perpetuals_candles.py rename to test/hummingbot/data_feed/candles_feed/test_candles_base.py index 27344b5acb..9ae3db9237 100644 --- a/test/hummingbot/data_feed/candles_feed/binance_perpetuals_candles/test_binance_perpetuals_candles.py +++ b/test/hummingbot/data_feed/candles_feed/test_candles_base.py @@ -1,38 +1,41 @@ import asyncio import json +import os import re import unittest +from abc import ABC +from collections import deque from typing import Awaitable from unittest.mock import AsyncMock, MagicMock, patch +import pandas as pd from aioresponses import aioresponses from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant -from hummingbot.data_feed.candles_feed.binance_perpetual_candles import BinancePerpetualCandles, constants as CONSTANTS +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase -class TestBinancePerpetualCandles(unittest.TestCase): - # the level is required to receive logs from the data source logger +class TestCandlesBase(unittest.TestCase, ABC): + __test__ = False level = 0 @classmethod def setUpClass(cls) -> None: super().setUpClass() cls.ev_loop = asyncio.get_event_loop() - cls.base_asset = "BTC" - cls.quote_asset = "USDT" - cls.interval = "1h" - cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" - cls.ex_trading_pair = cls.base_asset + cls.quote_asset + cls.trading_pair: str = None + cls.interval: str = None + cls.ex_trading_pair: str = None + cls.max_records: int = None def setUp(self) -> None: super().setUp() - self.mocking_assistant = NetworkMockingAssistant() - self.data_feed = BinancePerpetualCandles(trading_pair=self.trading_pair, interval=self.interval) + self.mocking_assistant: NetworkMockingAssistant = None + self.data_feed: CandlesBase = None + self.start_time = 10e6 + self.end_time = 10e17 self.log_records = [] - self.data_feed.logger().setLevel(1) - self.data_feed.logger().addHandler(self) self.resume_test_event = asyncio.Event() def handle(self, record): @@ -43,149 +46,139 @@ def is_logged(self, log_level: str, message: str) -> bool: record.levelname == log_level and record.getMessage() == message for record in self.log_records) - def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1): + @staticmethod + def async_run_with_timeout(coroutine: Awaitable, timeout: int = 1): ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) return ret - def get_candles_rest_data_mock(self): - data = [ - [ - 1672981200000, - "16823.24000000", - "16823.63000000", - "16792.12000000", - "16810.18000000", - "6230.44034000", - 1672984799999, - "104737787.36570630", - 162086, - "3058.60695000", - "51418990.63131130", - "0" - ], - [ - 1672984800000, - "16809.74000000", - "16816.45000000", - "16779.96000000", - "16786.86000000", - "6529.22759000", - 1672988399999, - "109693209.64287010", - 175249, - "3138.11977000", - "52721850.46080600", - "0" - ], - [ - 1672988400000, - "16786.60000000", - "16802.87000000", - "16780.15000000", - "16794.06000000", - "5763.44917000", - 1672991999999, - "96775667.56265520", - 160778, - "3080.59468000", - "51727251.37008490", - "0" - ], - [ - 1672992000000, - "16794.33000000", - "16812.22000000", - "16791.47000000", - "16802.11000000", - "5475.13940000", - 1672995599999, - "92000245.54341140", - 164303, - "2761.40926000", - "46400964.30558100", - "0" - ], - ] - return data - - def get_candles_ws_data_mock_1(self): - data = { - "e": "kline", - "E": 123456789, - "s": "BTCUSDT", - "k": {"t": 123400000, - "T": 123460000, - "s": "BNBBTC", - "i": "1m", - "f": 100, - "L": 200, - "o": "0.0010", - "c": "0.0020", - "h": "0.0025", - "l": "0.0015", - "v": "1000", - "n": 100, - "x": False, - "q": "1.0000", - "V": "500", - "Q": "0.500", - "B": "123456" - } - } - return data - - def get_candles_ws_data_mock_2(self): - data = { - "e": "kline", - "E": 123516789, - "s": "BTCUSDT", - "k": {"t": 123460000, - "T": 123460000, - "s": "BNBBTC", - "i": "1m", - "f": 100, - "L": 200, - "o": "0.0010", - "c": "0.0020", - "h": "0.0025", - "l": "0.0015", - "v": "1000", - "n": 100, - "x": False, - "q": "1.0000", - "V": "500", - "Q": "0.500", - "B": "123456" - } - } - return data + def _candles_data_mock(self): + return deque(self.get_fetch_candles_data_mock()[-4:]) + + @staticmethod + def get_candles_rest_data_mock(): + """ + Returns a mock response from the exchange REST API endpoint. At least it must contain four candles. + """ + raise NotImplementedError + + def get_fetch_candles_data_mock(self): + raise NotImplementedError + + @staticmethod + def get_candles_ws_data_mock_1(): + raise NotImplementedError + + @staticmethod + def get_candles_ws_data_mock_2(): + raise NotImplementedError + + @staticmethod + def _success_subscription_mock(): + raise NotImplementedError + + def test_initialization(self): + self.assertEqual(self.data_feed._trading_pair, self.trading_pair) + self.assertEqual(self.data_feed.interval, self.interval) + self.assertEqual(len(self.data_feed._candles), 0) + self.assertEqual(self.data_feed._candles.maxlen, self.max_records) + + def test_ready_property(self): + self.assertFalse(self.data_feed.ready) + self.data_feed._candles.extend(range(self.max_records)) + self.assertTrue(self.data_feed.ready) + + def test_candles_df_property(self): + self.data_feed._candles.extend(self._candles_data_mock()) + expected_df = pd.DataFrame(self._candles_data_mock(), columns=self.data_feed.columns, dtype=float) + + pd.testing.assert_frame_equal(self.data_feed.candles_df, expected_df) + + def test_get_exchange_trading_pair(self): + result = self.data_feed.get_exchange_trading_pair(self.trading_pair) + self.assertEqual(result, self.ex_trading_pair) + + @patch("os.path.exists", return_value=True) + @patch("pandas.read_csv") + def test_load_candles_from_csv(self, mock_read_csv, _): + mock_read_csv.return_value = pd.DataFrame(data=self._candles_data_mock(), + columns=self.data_feed.columns) + + self.data_feed.load_candles_from_csv("/path/to/data") + self.assertEqual(len(self.data_feed._candles), 4) + + @patch("os.path.exists", return_value=False) + def test_load_candles_from_csv_file_not_found(self, _): + data_path = "/path/to/data" + expected_filename = f"candles_{self.data_feed.name}_{self.data_feed.interval}.csv" + expected_file_path = os.path.join(data_path, expected_filename) + expected_error_message = f"File '{expected_file_path}' does not exist." + + with self.assertRaises(FileNotFoundError) as context: + self.data_feed.load_candles_from_csv(data_path) + + self.assertEqual(str(context.exception), expected_error_message) + + def test_check_candles_sorted_and_equidistant(self): + not_enough_data = [self._candles_data_mock()[0]] + self.assertIsNone(self.data_feed.check_candles_sorted_and_equidistant(not_enough_data)) + self.assertEqual(len(self.data_feed._candles), 0) + + correct_data = self._candles_data_mock().copy() + self.data_feed._candles.extend(correct_data) + self.assertIsNone(self.data_feed.check_candles_sorted_and_equidistant(correct_data)) + self.assertEqual(len(self.data_feed._candles), 4) + + def test_check_candles_sorted_and_equidistant_reset_candles_if_not_ascending(self): + reversed_data = list(self._candles_data_mock())[::-1] + self.data_feed._candles.extend(reversed_data) + self.assertEqual(len(self.data_feed._candles), 4) + self.data_feed.check_candles_sorted_and_equidistant(reversed_data) + self.is_logged("WARNING", "Candles are not sorted by timestamp in ascending order.") + self.assertEqual(len(self.data_feed._candles), 0) + + def test_check_candles_sorted_and_equidistant_reset_candles_if_not_equidistant(self): + not_equidistant_data = self._candles_data_mock() + not_equidistant_data[0][0] += 1 + self.data_feed._candles.extend(not_equidistant_data) + self.assertEqual(len(self.data_feed._candles), 4) + self.data_feed.check_candles_sorted_and_equidistant(not_equidistant_data) + self.is_logged("WARNING", "Candles are malformed. Restarting...") + self.assertEqual(len(self.data_feed._candles), 0) + + def test_reset_candles(self): + self.data_feed._candles.extend(self._candles_data_mock()) + self.data_feed._ws_candle_available.set() + self.assertEqual(self.data_feed._ws_candle_available.is_set(), True) + self.assertEqual(len(self.data_feed._candles), 4) + self.data_feed._reset_candles() + self.assertEqual(len(self.data_feed._candles), 0) + self.assertEqual(self.data_feed._ws_candle_available.is_set(), False) + + def test_ensure_timestamp_in_seconds(self): + self.assertEqual(self.data_feed.ensure_timestamp_in_seconds(1622505600), 1622505600) + self.assertEqual(self.data_feed.ensure_timestamp_in_seconds(1622505600000), 1622505600) + self.assertEqual(self.data_feed.ensure_timestamp_in_seconds(1622505600000000), 1622505600) + + with self.assertRaises(ValueError): + self.data_feed.ensure_timestamp_in_seconds(162250) @aioresponses() - def test_fetch_candles(self, mock_api: aioresponses): - start_time = 1672981200000 - end_time = 1672992000000 - url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}?endTime={end_time}&interval={self.interval}&limit=500" \ - f"&startTime={start_time}&symbol={self.ex_trading_pair}" - regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + def test_fetch_candles(self, mock_api): + regex_url = re.compile(f"^{self.data_feed.candles_url}".replace(".", r"\.").replace("?", r"\?")) data_mock = self.get_candles_rest_data_mock() mock_api.get(url=regex_url, body=json.dumps(data_mock)) - resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) + resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=self.start_time, + end_time=self.end_time)) - self.assertEqual(resp.shape[0], len(data_mock)) + self.assertEqual(resp.shape[0], len(self.get_fetch_candles_data_mock())) self.assertEqual(resp.shape[1], 10) - def test_candles_empty(self): - self.assertTrue(self.data_feed.candles_df.empty) - @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() - result_subscribe_klines = { - "result": None, - "id": 1 - } + result_subscribe_klines = self._success_subscription_mock() self.mocking_assistant.add_websocket_aiohttp_message( websocket_mock=ws_connect_mock.return_value, @@ -199,11 +192,12 @@ def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): websocket_mock=ws_connect_mock.return_value) self.assertEqual(1, len(sent_subscription_messages)) - expected_kline_subscription = { - "method": "SUBSCRIBE", - "params": [f"{self.ex_trading_pair.lower()}@kline_{self.interval}"], - "id": 1} - + expected_kline_subscription = self.data_feed.ws_subscription_payload() + # this is bacause I couldn't find a way to mock the nonce + if "id" in expected_kline_subscription: + del expected_kline_subscription["id"] + if "id" in sent_subscription_messages[0]: + del sent_subscription_messages[0]["id"] self.assertEqual(expected_kline_subscription, sent_subscription_messages[0]) self.assertTrue(self.is_logged( @@ -220,7 +214,7 @@ def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: Asyn self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) self.async_run_with_timeout(self.listening_task) - @patch("hummingbot.data_feed.candles_feed.binance_perpetual_candles.BinancePerpetualCandles._sleep") + @patch("hummingbot.data_feed.candles_feed.candles_base.CandlesBase._sleep") @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): mock_ws.side_effect = Exception("TEST ERROR.") @@ -256,7 +250,7 @@ def test_subscribe_channels_raises_exception_and_logs_error(self): self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") ) - @patch("hummingbot.data_feed.candles_feed.binance_perpetual_candles.BinancePerpetualCandles.fill_historical_candles", new_callable=AsyncMock) + @patch("hummingbot.data_feed.candles_feed.candles_base.CandlesBase.fill_historical_candles", new_callable=AsyncMock) @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() @@ -272,7 +266,7 @@ def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_his self.assertEqual(self.data_feed.candles_df.shape[1], 10) fill_historical_candles_mock.assert_called_once() - @patch("hummingbot.data_feed.candles_feed.binance_perpetual_candles.BinancePerpetualCandles.fill_historical_candles", new_callable=AsyncMock) + @patch("hummingbot.data_feed.candles_feed.candles_base.CandlesBase.fill_historical_candles", new_callable=AsyncMock) @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() @@ -293,7 +287,7 @@ def test_process_websocket_messages_duplicated_candle_not_included(self, ws_conn self.assertEqual(self.data_feed.candles_df.shape[0], 1) self.assertEqual(self.data_feed.candles_df.shape[1], 10) - @patch("hummingbot.data_feed.candles_feed.binance_perpetual_candles.BinancePerpetualCandles.fill_historical_candles") + @patch("hummingbot.data_feed.candles_feed.candles_base.CandlesBase.fill_historical_candles", new_callable=AsyncMock) @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() diff --git a/test/hummingbot/pmm_script/test_pmm_script_base.py b/test/hummingbot/pmm_script/test_pmm_script_base.py deleted file mode 100644 index b2f6c13e82..0000000000 --- a/test/hummingbot/pmm_script/test_pmm_script_base.py +++ /dev/null @@ -1,72 +0,0 @@ -import unittest -from decimal import Decimal -from statistics import mean - -from hummingbot.pmm_script.pmm_script_base import PMMScriptBase - - -class PMMScriptIteratorTests(unittest.TestCase): - - def test_avg_mid_price(self): - script_base = PMMScriptBase() - script_base.mid_prices = [Decimal("10.1"), Decimal("10.2"), Decimal("10.1"), Decimal("10.2"), Decimal("10.4"), - Decimal("10.5"), Decimal("10.3"), Decimal("10.6"), Decimal("10.7"), Decimal("10.8"), - Decimal("10.0"), Decimal("10.1"), Decimal("10.1"), Decimal("10.1"), Decimal("10.1")] - avg_price = script_base.avg_mid_price(3, 10) - # since there is not enough sample size, it should return None - self.assertTrue(avg_price is None) - # At interval of 3 and length of 5, these belows are counted as the samples - samples = [Decimal("10.1"), Decimal("10.5"), Decimal("10.7"), Decimal("10.1"), Decimal("10.1")] - self.assertEqual(mean(samples), script_base.avg_mid_price(3, 5)) - # At length of 2, only the last two should be used for the avg - samples = [Decimal("10.1"), Decimal("10.1")] - self.assertEqual(mean(samples), script_base.avg_mid_price(3, 2)) - # At 100 interval and length of 1, only the last item is counted. - avg_price = script_base.avg_mid_price(100, 1) - self.assertEqual(Decimal("10.1"), avg_price) - - def test_take_samples(self): - script_base = PMMScriptBase() - a_list = [1, 2, 3, 4, 5, 6, 7] - samples = script_base.take_samples(a_list, 3, 10) - # since there is not enough sample size, it should return None - self.assertTrue(samples is None) - # At interval of 3 and length of 2, these belows are counted as the samples - expected = [4, 7] - samples = script_base.take_samples(a_list, 3, 2) - self.assertEqual(expected, samples) - # At interval of 2 and length of 4, these belows are counted as the samples - expected = [1, 3, 5, 7] - samples = script_base.take_samples(a_list, 2, 4) - self.assertEqual(expected, samples) - # At interval of 2 and length of 1, these belows are counted as the samples - expected = [7] - samples = script_base.take_samples(a_list, 2, 1) - self.assertEqual(expected, samples) - - def test_avg_and_median_mid_price_chg(self): - script_base = PMMScriptBase() - script_base.mid_prices = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] - avg_chg = script_base.avg_price_volatility(3, 10) - # since there is not enough sample size, it should return None - self.assertTrue(avg_chg is None) - avg_chg = script_base.avg_price_volatility(3, 5) - # at 5 sample size, as we need 5 +1, so this should also return None - self.assertTrue(avg_chg is None) - # At interval of 4 and length of 3, these belows are counted as the samples - # The samples are 15, 11, 7, 3 - expected_chg = [(15 - 11) / 11, (11 - 7) / 7, (7 - 3) / 3] - self.assertEqual(mean(expected_chg), script_base.avg_price_volatility(4, 3)) - # The median change is (11 - 7) / 7 - self.assertEqual((11 - 7) / 7, script_base.median_price_volatility(4, 3)) - - # At 10 interval and length of 1. - expected_chg = (15 - 5) / 5 - self.assertEqual(expected_chg, script_base.avg_price_volatility(10, 1)) - - def test_round_by_step(self): - self.assertEqual(Decimal("1.75"), PMMScriptBase.round_by_step(Decimal("1.8"), Decimal("0.25"))) - self.assertEqual(Decimal("1.75"), PMMScriptBase.round_by_step(Decimal("1.75"), Decimal("0.25"))) - self.assertEqual(Decimal("1.75"), PMMScriptBase.round_by_step(Decimal("1.7567"), Decimal("0.01"))) - self.assertEqual(Decimal("1"), PMMScriptBase.round_by_step(Decimal("1.7567"), Decimal("1"))) - self.assertEqual(Decimal("-1.75"), PMMScriptBase.round_by_step(Decimal("-1.8"), Decimal("0.25"))) diff --git a/test/hummingbot/strategy/test_order_tracker.py b/test/hummingbot/strategy/test_order_tracker.py index 5dc1c34490..e7714e4f88 100644 --- a/test/hummingbot/strategy/test_order_tracker.py +++ b/test/hummingbot/strategy/test_order_tracker.py @@ -68,7 +68,7 @@ def setUp(self): @staticmethod def simulate_place_order(order_tracker: OrderTracker, order: Union[LimitOrder, MarketOrder], market_info: MarketTradingPairTuple): """ - Simulates an order being succesfully placed. + Simulates an order being successfully placed. """ if isinstance(order, LimitOrder): order_tracker.add_create_order_pending(order.client_order_id) diff --git a/test/hummingbot/strategy/test_strategy_v2_base.py b/test/hummingbot/strategy/test_strategy_v2_base.py index fa660dac94..6bb731632d 100644 --- a/test/hummingbot/strategy/test_strategy_v2_base.py +++ b/test/hummingbot/strategy/test_strategy_v2_base.py @@ -165,8 +165,8 @@ async def test_on_tick(self, mock_execute_action, mock_ready, mock_update_execut # Since no actions are returned, execute_action should not be called mock_execute_action.assert_not_called() - def test_on_stop(self): - self.strategy.on_stop() + async def test_on_stop(self): + await self.strategy.on_stop() # Check if stop methods are called on each component self.strategy.executor_orchestrator.stop.assert_called_once() diff --git a/test/hummingbot/strategy_v2/executors/position_executor/test_data_types.py b/test/hummingbot/strategy_v2/executors/position_executor/test_data_types.py index 65a73baf0f..6641eebeda 100644 --- a/test/hummingbot/strategy_v2/executors/position_executor/test_data_types.py +++ b/test/hummingbot/strategy_v2/executors/position_executor/test_data_types.py @@ -3,6 +3,7 @@ from hummingbot.core.data_type.common import OrderType, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder +from hummingbot.strategy_v2.executors.position_executor.data_types import TrailingStop, TripleBarrierConfig from hummingbot.strategy_v2.models.executors import CloseType, TrackedOrder @@ -42,3 +43,24 @@ def test_tracked_order_order(self): order = TrackedOrder() order.order = in_flight_order self.assertEqual(order.order, in_flight_order) + + def test_get_triple_barrier_new_instance_with_volatility_adjusted(self): + triple_barrier_base = TripleBarrierConfig( + stop_loss=Decimal("0.1"), + take_profit=Decimal("0.2"), + time_limit=100, + trailing_stop=TrailingStop(activation_price=Decimal("0.1"), trailing_delta=Decimal("0.1")), + open_order_type=OrderType.LIMIT, + take_profit_order_type=OrderType.MARKET, + stop_loss_order_type=OrderType.MARKET, + time_limit_order_type=OrderType.MARKET + ) + triple_barrier_new = triple_barrier_base.new_instance_with_adjusted_volatility(1.5) + self.assertEqual(triple_barrier_new.stop_loss, Decimal("0.15")) + self.assertEqual(triple_barrier_new.take_profit, Decimal("0.3")) + self.assertEqual(triple_barrier_new.time_limit, 100) + self.assertEqual(triple_barrier_new.trailing_stop.activation_price, Decimal("0.15")) + self.assertEqual(triple_barrier_new.trailing_stop.trailing_delta, Decimal("0.15")) + self.assertEqual(triple_barrier_new.open_order_type, OrderType.LIMIT) + self.assertEqual(triple_barrier_new.take_profit_order_type, OrderType.MARKET) + self.assertEqual(triple_barrier_new.stop_loss_order_type, OrderType.MARKET) diff --git a/test/hummingbot/strategy_v2/executors/position_executor/test_position_executor.py b/test/hummingbot/strategy_v2/executors/position_executor/test_position_executor.py index cd8e172f55..b4e5574c2f 100644 --- a/test/hummingbot/strategy_v2/executors/position_executor/test_position_executor.py +++ b/test/hummingbot/strategy_v2/executors/position_executor/test_position_executor.py @@ -2,7 +2,7 @@ from test.isolated_asyncio_wrapper_test_case import IsolatedAsyncioWrapperTestCase from unittest.mock import MagicMock, PropertyMock, patch -from hummingbot.connector.connector_base import ConnectorBase +from hummingbot.connector.exchange_py_base import ExchangePyBase from hummingbot.connector.trading_rule import TradingRule from hummingbot.core.data_type.common import OrderType, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState, TradeUpdate @@ -36,36 +36,44 @@ def create_mock_strategy(self): strategy.sell.side_effect = ["OID-SELL-1", "OID-SELL-2", "OID-SELL-3"] strategy.cancel.return_value = None strategy.connectors = { - "binance": MagicMock(spec=ConnectorBase), + "binance": MagicMock(spec=ExchangePyBase), } return strategy def get_position_config_market_long(self): - return PositionExecutorConfig(id="test", timestamp=1234567890, trading_pair="ETH-USDT", connector_name="binance", + return PositionExecutorConfig(id="test", timestamp=1234567890, trading_pair="ETH-USDT", + connector_name="binance", side=TradeType.BUY, entry_price=Decimal("100"), amount=Decimal("1"), triple_barrier_config=TripleBarrierConfig( stop_loss=Decimal("0.05"), take_profit=Decimal("0.1"), time_limit=60, - take_profit_order_type=OrderType.LIMIT, stop_loss_order_type=OrderType.MARKET)) + take_profit_order_type=OrderType.LIMIT, + stop_loss_order_type=OrderType.MARKET)) def get_position_config_market_long_tp_market(self): - return PositionExecutorConfig(id="test-1", timestamp=1234567890, trading_pair="ETH-USDT", connector_name="binance", + return PositionExecutorConfig(id="test-1", timestamp=1234567890, trading_pair="ETH-USDT", + connector_name="binance", side=TradeType.BUY, entry_price=Decimal("100"), amount=Decimal("1"), triple_barrier_config=TripleBarrierConfig( stop_loss=Decimal("0.05"), take_profit=Decimal("0.1"), time_limit=60, - take_profit_order_type=OrderType.MARKET, stop_loss_order_type=OrderType.MARKET)) + take_profit_order_type=OrderType.MARKET, + stop_loss_order_type=OrderType.MARKET)) def get_position_config_market_short(self): - return PositionExecutorConfig(id="test-2", timestamp=1234567890, trading_pair="ETH-USDT", connector_name="binance", + return PositionExecutorConfig(id="test-2", timestamp=1234567890, trading_pair="ETH-USDT", + connector_name="binance", side=TradeType.SELL, entry_price=Decimal("100"), amount=Decimal("1"), triple_barrier_config=TripleBarrierConfig( stop_loss=Decimal("0.05"), take_profit=Decimal("0.1"), time_limit=60, - take_profit_order_type=OrderType.LIMIT, stop_loss_order_type=OrderType.MARKET)) + take_profit_order_type=OrderType.LIMIT, + stop_loss_order_type=OrderType.MARKET)) def get_incomplete_position_config(self): - return PositionExecutorConfig(id="test-3", timestamp=1234567890, trading_pair="ETH-USDT", connector_name="binance", + return PositionExecutorConfig(id="test-3", timestamp=1234567890, trading_pair="ETH-USDT", + connector_name="binance", side=TradeType.SELL, entry_price=Decimal("100"), amount=Decimal("1"), triple_barrier_config=TripleBarrierConfig( - take_profit_order_type=OrderType.LIMIT, stop_loss_order_type=OrderType.MARKET)) + take_profit_order_type=OrderType.LIMIT, + stop_loss_order_type=OrderType.MARKET)) def test_properties(self): position_config = self.get_position_config_market_short() @@ -92,9 +100,13 @@ def get_position_executor_running_from_config(self, position_config): position_executor._status = RunnableStatus.RUNNING return position_executor + @patch.object(PositionExecutor, "get_trading_rules") @patch.object(PositionExecutor, "get_price") - async def test_control_position_create_open_order(self, mock_price): + async def test_control_position_create_open_order(self, mock_price, trading_rules_mock): mock_price.return_value = Decimal("100") + trading_rules = MagicMock(spec=TradingRule) + trading_rules.min_order_size = Decimal("0.1") + trading_rules_mock.return_value = trading_rules position_config = self.get_position_config_market_short() position_executor = self.get_position_executor_running_from_config(position_config) await position_executor.control_task() @@ -118,11 +130,12 @@ async def test_control_position_not_started_expired(self, mock_price, trading_ru @patch.object(PositionExecutor, "get_trading_rules") async def test_control_open_order_expiration(self, trading_rules_mock): - position_config = self.get_position_config_market_short() - position_executor = self.get_position_executor_running_from_config(position_config) trading_rules = MagicMock(spec=TradingRule) trading_rules.min_order_size = Decimal("0.1") + trading_rules.min_notional_size = Decimal("1") trading_rules_mock.return_value = trading_rules + position_config = self.get_position_config_market_short() + position_executor = self.get_position_executor_running_from_config(position_config) type(self.strategy).current_timestamp = PropertyMock(return_value=1234567890 + 61) position_executor._open_order = TrackedOrder(order_id="OID-SELL-1") position_executor._open_order.order = InFlightOrder( @@ -143,15 +156,26 @@ async def test_control_open_order_expiration(self, trading_rules_mock): order_id="OID-SELL-1") self.assertEqual(position_executor.trade_pnl_pct, Decimal("0")) - async def test_control_position_order_placed_not_cancel_open_order(self): + @patch.object(PositionExecutor, "get_trading_rules") + async def test_control_position_order_placed_not_cancel_open_order(self, trading_rules_mock): + trading_rules = MagicMock(spec=TradingRule) + trading_rules.min_order_size = Decimal("0.1") + trading_rules.min_notional_size = Decimal("1") + trading_rules_mock.return_value = trading_rules position_config = self.get_position_config_market_short() position_executor = self.get_position_executor_running_from_config(position_config) position_executor._open_order = TrackedOrder(order_id="OID-SELL-1") await position_executor.control_task() position_executor._strategy.cancel.assert_not_called() - @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", return_value=Decimal("101")) - async def test_control_position_active_position_create_take_profit(self, _): + @patch.object(PositionExecutor, "get_trading_rules") + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("101")) + async def test_control_position_active_position_create_take_profit(self, _, trading_rules_mock): + trading_rules = MagicMock(spec=TradingRule) + trading_rules.min_order_size = Decimal("0.1") + trading_rules.min_notional_size = Decimal("1") + trading_rules_mock.return_value = trading_rules position_config = self.get_position_config_market_short() position_executor = self.get_position_executor_running_from_config(position_config) position_executor._open_order = TrackedOrder(order_id="OID-SELL-1") @@ -189,6 +213,7 @@ async def test_control_position_active_position_create_take_profit(self, _): async def test_control_position_active_position_close_by_take_profit_market(self, _, trading_rules_mock): trading_rules = MagicMock(spec=TradingRule) trading_rules.min_order_size = Decimal("0.1") + trading_rules.min_notional_size = Decimal("1") trading_rules_mock.return_value = trading_rules position_config = self.get_position_config_market_long_tp_market() position_executor = self.get_position_executor_running_from_config(position_config) @@ -224,11 +249,13 @@ async def test_control_position_active_position_close_by_take_profit_market(self self.assertEqual(position_executor.trade_pnl_pct, Decimal("0.2")) @patch.object(PositionExecutor, "get_trading_rules") - @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", return_value=Decimal("70")) + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("70")) async def test_control_position_active_position_close_by_stop_loss(self, _, trading_rules_mock): position_config = self.get_position_config_market_long() trading_rules = MagicMock(spec=TradingRule) trading_rules.min_order_size = Decimal("0.1") + trading_rules.min_notional_size = Decimal("1") trading_rules_mock.return_value = trading_rules position_executor = self.get_position_executor_running_from_config(position_config) position_executor._open_order = TrackedOrder(order_id="OID-BUY-1") @@ -263,10 +290,12 @@ async def test_control_position_active_position_close_by_stop_loss(self, _, trad self.assertEqual(position_executor.trade_pnl_pct, Decimal("-0.3")) @patch.object(PositionExecutor, "get_trading_rules") - @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", return_value=Decimal("100")) + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("100")) async def test_control_position_active_position_close_by_time_limit(self, _, trading_rules_mock): trading_rules = MagicMock(spec=TradingRule) trading_rules.min_order_size = Decimal("0.1") + trading_rules.min_notional_size = Decimal("1") trading_rules_mock.return_value = trading_rules position_config = self.get_position_config_market_long() type(self.strategy).current_timestamp = PropertyMock(return_value=1234567890 + 61) @@ -303,10 +332,12 @@ async def test_control_position_active_position_close_by_time_limit(self, _, tra self.assertEqual(position_executor.trade_pnl_pct, Decimal("0.0")) @patch.object(PositionExecutor, "get_trading_rules") - @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", return_value=Decimal("70")) + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("70")) async def test_control_position_close_placed_stop_loss_failed(self, _, trading_rules_mock): trading_rules = MagicMock(spec=TradingRule) trading_rules.min_order_size = Decimal("0.1") + trading_rules.min_notional_size = Decimal("1") trading_rules_mock.return_value = trading_rules position_config = self.get_position_config_market_long() position_executor = self.get_position_executor_running_from_config(position_config) @@ -349,7 +380,19 @@ async def test_control_position_close_placed_stop_loss_failed(self, _, trading_r self.assertEqual(position_executor._close_order.order_id, "OID-SELL-1") self.assertEqual(position_executor.close_type, CloseType.STOP_LOSS) - def test_process_order_completed_event_open_order(self): + @patch.object(PositionExecutor, "get_in_flight_order") + def test_process_order_completed_event_open_order(self, in_flight_order_mock): + order = InFlightOrder( + client_order_id="OID-BUY-1", + exchange_order_id="EOID4", + trading_pair="ETH-USDT", + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1"), + price=Decimal("100"), + creation_timestamp=1640001112.223, + ) + in_flight_order_mock.return_value = order position_config = self.get_position_config_market_long() position_executor = self.get_position_executor_running_from_config(position_config) position_executor._open_order = TrackedOrder("OID-BUY-1") @@ -365,8 +408,21 @@ def test_process_order_completed_event_open_order(self): ) market = MagicMock() position_executor.process_order_completed_event("102", market, event) + self.assertEqual(position_executor._open_order.order, order) - def test_process_order_completed_event_close_order(self): + @patch.object(PositionExecutor, "get_in_flight_order") + def test_process_order_completed_event_close_order(self, mock_in_flight_order): + order = InFlightOrder( + client_order_id="OID-BUY-1", + exchange_order_id="EOID4", + trading_pair="ETH-USDT", + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1"), + price=Decimal("100"), + creation_timestamp=1640001112.223, + ) + mock_in_flight_order.return_value = order position_config = self.get_position_config_market_long() position_executor = self.get_position_executor_running_from_config(position_config) position_executor._close_order = TrackedOrder("OID-BUY-1") @@ -383,10 +439,22 @@ def test_process_order_completed_event_close_order(self): ) market = MagicMock() position_executor.process_order_completed_event("102", market, event) - self.assertEqual(position_executor.close_timestamp, 1234567890) self.assertEqual(position_executor.close_type, CloseType.STOP_LOSS) + self.assertEqual(position_executor._close_order.order, order) - def test_process_order_completed_event_take_profit_order(self): + @patch.object(PositionExecutor, "get_in_flight_order") + def test_process_order_completed_event_take_profit_order(self, in_flight_order_mock): + order = InFlightOrder( + client_order_id="OID-BUY-1", + exchange_order_id="EOID4", + trading_pair="ETH-USDT", + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1"), + price=Decimal("100"), + creation_timestamp=1640001112.223, + ) + in_flight_order_mock.return_value = order position_config = self.get_position_config_market_long() position_executor = self.get_position_executor_running_from_config(position_config) position_executor._take_profit_limit_order = TrackedOrder("OID-BUY-1") @@ -402,8 +470,8 @@ def test_process_order_completed_event_take_profit_order(self): ) market = MagicMock() position_executor.process_order_completed_event("102", market, event) - self.assertEqual(position_executor.close_timestamp, 1234567890) self.assertEqual(position_executor.close_type, CloseType.TAKE_PROFIT) + self.assertEqual(position_executor._take_profit_limit_order.order, order) def test_process_order_canceled_event(self): position_config = self.get_position_config_market_long() @@ -417,7 +485,8 @@ def test_process_order_canceled_event(self): position_executor.process_order_canceled_event(102, market, event) self.assertEqual(position_executor._close_order, None) - @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", return_value=Decimal("101")) + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("101")) def test_to_format_status(self, _): position_config = self.get_position_config_market_long() position_executor = self.get_position_executor_running_from_config(position_config) @@ -450,7 +519,8 @@ def test_to_format_status(self, _): self.assertIn("Trading Pair: ETH-USDT", status[0]) self.assertIn("PNL (%): 0.80%", status[0]) - @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", return_value=Decimal("101")) + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("101")) def test_to_format_status_is_closed(self, _): position_config = self.get_position_config_market_long() position_executor = self.get_position_executor_running_from_config(position_config) @@ -488,7 +558,8 @@ def test_to_format_status_is_closed(self, _): @patch.object(PositionExecutor, 'adjust_order_candidates') def test_validate_sufficient_balance(self, mock_adjust_order_candidates, mock_get_trading_rules): # Mock trading rules - trading_rules = TradingRule(trading_pair="ETH-USDT", min_order_size=Decimal("0.1"), min_price_increment=Decimal("0.1"), min_base_amount_increment=Decimal("0.1")) + trading_rules = TradingRule(trading_pair="ETH-USDT", min_order_size=Decimal("0.1"), + min_price_increment=Decimal("0.1"), min_base_amount_increment=Decimal("0.1")) mock_get_trading_rules.return_value = trading_rules executor = PositionExecutor(self.strategy, self.get_position_config_market_long()) # Mock order candidate @@ -535,3 +606,79 @@ def test_cancel_close_order_and_process_cancel_event(self): market = MagicMock() position_executor.process_order_canceled_event("102", market, event) self.assertEqual(position_executor.close_type, None) + + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("101")) + def test_position_executor_created_without_entry_price(self, _): + config = PositionExecutorConfig(id="test", timestamp=1234567890, trading_pair="ETH-USDT", + connector_name="binance", + side=TradeType.BUY, amount=Decimal("1"), + triple_barrier_config=TripleBarrierConfig( + stop_loss=Decimal("0.05"), take_profit=Decimal("0.1"), time_limit=60, + take_profit_order_type=OrderType.LIMIT, + stop_loss_order_type=OrderType.MARKET)) + + executor = PositionExecutor(self.strategy, config) + self.assertEqual(executor.entry_price, Decimal("101")) + + @patch("hummingbot.strategy_v2.executors.position_executor.position_executor.PositionExecutor.get_price", + return_value=Decimal("101")) + def test_position_executor_entry_price_updated_with_limit_maker(self, _): + config = PositionExecutorConfig(id="test", timestamp=1234567890, trading_pair="ETH-USDT", + connector_name="binance", + side=TradeType.BUY, amount=Decimal("1"), + entry_price=Decimal("102"), + triple_barrier_config=TripleBarrierConfig( + open_order_type=OrderType.LIMIT_MAKER, + stop_loss=Decimal("0.05"), take_profit=Decimal("0.1"), time_limit=60, + take_profit_order_type=OrderType.LIMIT, + stop_loss_order_type=OrderType.MARKET)) + + executor = PositionExecutor(self.strategy, config) + self.assertEqual(executor.entry_price, Decimal("101")) + + @patch.object(PositionExecutor, "place_close_order_and_cancel_open_orders") + async def test_control_shutdown_process(self, place_order_mock): + position_config = self.get_position_config_market_long() + position_executor = self.get_position_executor_running_from_config(position_config) + position_executor._open_order = TrackedOrder("OID-BUY-1") + position_executor._open_order.order = InFlightOrder( + client_order_id="OID-BUY-1", + exchange_order_id="EOID4", + trading_pair=position_config.trading_pair, + order_type=position_config.triple_barrier_config.open_order_type, + trade_type=TradeType.BUY, + amount=position_config.amount, + price=position_config.entry_price, + creation_timestamp=1640001112.223, + initial_state=OrderState.FILLED + ) + position_executor._open_order.order.update_with_trade_update( + TradeUpdate( + trade_id="1", + client_order_id="OID-BUY-1", + exchange_order_id="EOID4", + trading_pair=position_config.trading_pair, + fill_price=position_config.entry_price, + fill_base_amount=position_config.amount, + fill_quote_amount=position_config.amount * position_config.entry_price, + fee=AddedToCostTradeFee(flat_fees=[TokenAmount(token="USDT", amount=Decimal("0.2"))]), + fill_timestamp=10, + ) + ) + position_executor._status = RunnableStatus.SHUTTING_DOWN + await position_executor.control_task() + place_order_mock.assert_called_once() + position_executor._close_order = TrackedOrder("OID-SELL-1") + position_executor._close_order.order = InFlightOrder( + client_order_id="OID-SELL-1", + exchange_order_id="EOID4", + trading_pair=position_config.trading_pair, + order_type=position_config.triple_barrier_config.open_order_type, + trade_type=TradeType.SELL, + amount=position_config.amount, + price=position_config.entry_price, + creation_timestamp=1640001112.223, + initial_state=OrderState.OPEN + ) + await position_executor.control_task() diff --git a/test/hummingbot/strategy_v2/executors/test_executor_base.py b/test/hummingbot/strategy_v2/executors/test_executor_base.py index cb3bec9b85..db8edc9420 100644 --- a/test/hummingbot/strategy_v2/executors/test_executor_base.py +++ b/test/hummingbot/strategy_v2/executors/test_executor_base.py @@ -3,6 +3,7 @@ from test.logger_mixin_for_test import LoggerMixinForTest from unittest.mock import MagicMock, PropertyMock, patch +from hummingbot.connector.client_order_tracker import ClientOrderTracker from hummingbot.connector.exchange_py_base import ExchangePyBase from hummingbot.core.data_type.common import OrderType, PriceType, TradeType from hummingbot.core.data_type.order_book import OrderBook @@ -44,6 +45,8 @@ def create_mock_strategy(self): connector.get_order_book.return_value = OrderBook() connector.get_balance.return_value = Decimal("0.0") connector.get_available_balance.return_value = Decimal("0.0") + connector._order_tracker = MagicMock(spec=ClientOrderTracker) + connector._order_tracker.fetch_order.return_value = None strategy.connectors = { "connector1": connector, } @@ -172,3 +175,7 @@ def test_get_total_and_available_balance(self): self.assertEqual(balance, Decimal("0.0")) available_balance = self.component.get_available_balance("connector1", "ETH") self.assertEqual(available_balance, Decimal("0.0")) + + def test_get_in_flight_order(self): + in_flight_orders = self.component.get_in_flight_order("connector1", "OID-BUY-1") + self.assertEqual(in_flight_orders, None)