diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 573a21acb1c6..4a7cd8e7e494 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,6 +32,39 @@ repos: types_or: [python, cython, rst, markdown] args: ["-L", "ot,zar,warmup"] + ############################################################################## + # Rust formatting and linting + ############################################################################## + - repo: local + hooks: + - id: fmt + name: cargo fmt + description: Format files with cargo fmt. + entry: cargo fmt + language: system + types: [rust] + args: ["--manifest-path", "nautilus_core/Cargo.toml", "--all"] + files: \.rs$ + pass_filenames: false + - id: cargo-clippy + name: cargo clippy + description: Run the Clippy linter on the package. + entry: cargo clippy + language: system + types: [rust] + args: ["--manifest-path", "nautilus_core/Cargo.toml", "--", "-D", "warnings"] + files: \.rs$ + pass_filenames: false + - id: cargo-check + name: cargo check + description: Check the package for errors. + entry: cargo check + language: system + types: [rust] + args: ["--manifest-path", "nautilus_core/Cargo.toml"] + files: \.rs$ + pass_filenames: false + ############################################################################## # Python/Cython formatting and linting ############################################################################## @@ -64,7 +97,7 @@ repos: args: ["--settings-file", "pyproject.toml", "."] - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 23.1.0 hooks: - id: black types_or: [python, pyi] @@ -73,7 +106,7 @@ repos: exclude: "docs/_pygments/monokai.py" - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.991 + rev: v1.0.0 hooks: - id: mypy args: [ @@ -107,7 +140,7 @@ repos: files: ^nautilus_trader/ exclude: "nautilus_trader/test_kit" args: - - "--ignore=D100,D102,D103,D104,D107,D105,D200,D203,D205,D212,D400,D413,D415" + - "--ignore=D100,D102,D103,D104,D107,D105,D200,D203,D205,D212,D400,D413,D415,D416" additional_dependencies: - toml @@ -129,3 +162,4 @@ repos: # D400: First line should end with a period (not always a first line) # D413: Missing blank line after last section ('Parameters') # D415: First line should end with a period, question mark, or exclamation point (not always a first line) +# D416: Section name should end with a colon ('Warnings:', not 'Warnings') (incorrect?) diff --git a/Makefile b/Makefile index 5e80fa12958e..d87b019a19ce 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,6 @@ format: (cd nautilus_core && cargo fmt) pre-commit: format - (cd nautilus_core && cargo fmt --all -- --check && cargo check -q && cargo clippy --all-targets --all-features -- -D warnings) pre-commit run --all-files update: diff --git a/README.md b/README.md index 690fd5e4b74b..9a55ebf09fa9 100644 --- a/README.md +++ b/README.md @@ -15,9 +15,9 @@ | Platform | Rust | Python | |:------------------|:----------|:-------| -| Linux (x86\_64) | `1.66.1+` | `3.9+` | -| macOS (x86\_64) | `1.66.1+` | `3.9+` | -| Windows (x86\_64) | `1.66.1+` | `3.9+` | +| Linux (x86\_64) | `1.67.1+` | `3.9+` | +| macOS (x86\_64) | `1.67.1+` | `3.9+` | +| Windows (x86\_64) | `1.67.1+` | `3.9+` | - **Website:** https://nautilustrader.io - **Docs:** https://docs.nautilustrader.io @@ -97,7 +97,7 @@ optional C-inspired syntax. The project heavily utilizes Cython to provide static type safety and increased performance for Python through [C extension modules](https://docs.python.org/3/extending/extending.html). The vast majority of the production code is actually -written in Cython, however the libraries can be accessed from both pure Python and Cython. +written in Cython, however the libraries can be accessed from both Python and Cython. ## What is Rust? @@ -112,7 +112,7 @@ eliminating many classes of bugs at compile-time. The project increasingly utilizes Rust for core performance-critical components. Python language binding is handled through Cython, with static libraries linked at compile-time before the wheel binaries are packaged, so a user does not need to have Rust installed to run NautilusTrader. In the future as more Rust code is introduced, -[PyO3](https://pyo3.rs/latest/) will be leveraged for easier Python bindings. +[PyO3](https://pyo3.rs/latest) will be leveraged for easier Python bindings. ## Architecture (data flow) @@ -276,7 +276,7 @@ class EMACross(Strategy): Cancels all orders and closes all positions on stop. """ - def __init__(self, config: EMACrossConfig): + def __init__(self, config: EMACrossConfig) -> None: super().__init__(config) # Configuration @@ -290,7 +290,7 @@ class EMACross(Strategy): self.instrument: Optional[Instrument] = None # Initialized in on_start - def on_start(self): + def on_start(self) -> None: """Actions to be performed on strategy start.""" # Get instrument self.instrument = self.cache.instrument(self.instrument_id) @@ -305,7 +305,7 @@ class EMACross(Strategy): # Subscribe to live data self.subscribe_bars(self.bar_type) - def on_bar(self, bar: Bar): + def on_bar(self, bar: Bar) -> None: """Actions to be performed when the strategy receives a bar.""" # BUY LOGIC if self.fast_ema.value >= self.slow_ema.value: @@ -322,7 +322,7 @@ class EMACross(Strategy): self.close_all_positions(self.instrument_id) self.sell() - def buy(self): + def buy(self) -> None: """Users simple buy method (example).""" order: MarketOrder = self.order_factory.market( instrument_id=self.instrument_id, @@ -332,7 +332,7 @@ class EMACross(Strategy): self.submit_order(order) - def sell(self): + def sell(self) -> None: """Users simple sell method (example).""" order: MarketOrder = self.order_factory.market( instrument_id=self.instrument_id, @@ -342,7 +342,7 @@ class EMACross(Strategy): self.submit_order(order) - def on_stop(self): + def on_stop(self) -> None: """Actions to be performed when the strategy is stopped.""" # Cleanup orders and positions self.cancel_all_orders(self.instrument_id) @@ -351,7 +351,7 @@ class EMACross(Strategy): # Unsubscribe from data self.unsubscribe_bars(self.bar_type) - def on_reset(self): + def on_reset(self) -> None: """Actions to be performed when the strategy is reset.""" # Reset indicators here self.fast_ema.reset() diff --git a/RELEASES.md b/RELEASES.md index 4ce3b56d4a2a..05d5e329e791 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,35 @@ +# NautilusTrader 1.169.0 Beta + +Released on 18th February 2023 (UTC). + +### Breaking Changes +- `NautilusConfig` objects now _pseudo-immutable_ from new msgspec 0.13.0 +- Renamed `OrderFactory.bracket` param `post_only_entry` -> `entry_post_only` (consistency with other params) +- Renamed `OrderFactory.bracket` param `post_only_tp` -> `tp_post_only` (consistency with other params) +- Renamed `build_time_bars_with_no_updates` -> `time_bars_build_with_no_updates` (consistency with new param) +- Renamed `OrderFactory.set_order_count()` -> `set_client_order_id_count()` (clarity) +- Renamed `TradingNode.start()` to `TradingNode.run()` + +### Enhancements +- Complete overhaul and improvements to Binance adapter(s), thanks @poshcoe +- Added Binance aggregated trades functionality with `use_agg_trade_ticks`, thanks @poshcoe +- Added `time_bars_timestamp_on_close` option for configurable bar timestamping (True by default) +- Added `OrderFactory.generate_client_order_id()` (calls internal generator) +- Added `OrderFactory.generate_order_list_id()` (calls internal generator) +- Added `OrderFactory.create_list(...)` as easier method for creating order lists +- Added `__len__` implementation for `OrderList` (returns length of orders) +- Implemented optimized logger using Rust MPSC channel and separate thread +- Expose and improve `MatchingEngine` public API for custom functionality +- Exposed `TradingNode.run_async()` for easier running from async context +- Exposed `TradingNode.stop_async()` for easier stopping from async context + +### Fixes +- Fixed registration of `SimulationModule` (and refine `Actor` base registration) +- Fixed loading of previously emulated and transformed orders (handles transforming `OrderInitialized` event) +- Fixed handling of `MARKET_TO_LIMIT` orders in matching and risk engines, thanks for reporting @martinsaip + +--- + # NautilusTrader 1.168.0 Beta Released on 29th January 2023 (UTC). diff --git a/build.py b/build.py index 26580b46b70c..3f4edffcdda9 100644 --- a/build.py +++ b/build.py @@ -234,14 +234,9 @@ def _get_clang_version() -> str: return output except subprocess.CalledProcessError as e: raise RuntimeError( + "You are installing from source which requires the Clang compiler to be installed.\n" f"Error running clang: {e.stderr.decode()}", ) from e - except FileNotFoundError as e: - if "clang" in e.strerror: - raise RuntimeError( - "You are installing from source which requires the Clang compiler to be installed.", - ) from e - raise def _get_rustc_version() -> str: @@ -256,15 +251,10 @@ def _get_rustc_version() -> str: return output except subprocess.CalledProcessError as e: raise RuntimeError( + "You are installing from source which requires the Rust compiler to " + "be installed.\nFind more information at https://www.rust-lang.org/tools/install\n" f"Error running rustc: {e.stderr.decode()}", ) from e - except FileNotFoundError as e: - if "rustc" in e.strerror: - raise RuntimeError( - "You are installing from source which requires the Rust compiler to " - "be installed. Find more information at https://www.rust-lang.org/tools/install", - ) from e - raise def build(pyo3_only=False) -> None: diff --git a/docs/getting_started/installation.md b/docs/getting_started/installation.md index fe4a99ac3a5d..88244ebf3134 100644 --- a/docs/getting_started/installation.md +++ b/docs/getting_started/installation.md @@ -30,7 +30,7 @@ For MacBook Pro M1/M2, make sure your Python installed using pyenv is configured PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install -See https://pyo3.rs/v0.17.3/getting_started#virtualenvs. +See https://pyo3.rs/latest/getting_started#virtualenvs. It's possible to install from source using `pip` if you first install the build dependencies as specified in the `pyproject.toml`. However, we highly recommend installing using [poetry](https://python-poetry.org/) as below. diff --git a/docs/getting_started/quick_start.md b/docs/getting_started/quick_start.md index 9d44bbbdaae6..010d141a2891 100644 --- a/docs/getting_started/quick_start.md +++ b/docs/getting_started/quick_start.md @@ -65,6 +65,7 @@ registering indicators to receive certain data types, however in this example we ```python from typing import Optional +from nautilus_trader.core.message import Event from nautilus_trader.trading.strategy import Strategy, StrategyConfig from nautilus_trader.indicators.macd import MovingAverageConvergenceDivergence from nautilus_trader.model.data.tick import QuoteTick @@ -85,7 +86,7 @@ class MACDConfig(StrategyConfig): class MACDStrategy(Strategy): - def __init__(self, config: MACDConfig): + def __init__(self, config: MACDConfig) -> None: super().__init__(config=config) # Our "trading signal" self.macd = MovingAverageConvergenceDivergence( @@ -99,13 +100,13 @@ class MACDStrategy(Strategy): # Convenience self.position: Optional[Position] = None - def on_start(self): + def on_start(self) -> None: self.subscribe_quote_ticks(instrument_id=self.instrument_id) - def on_stop(self): + def on_stop(self) -> None: self.unsubscribe_quote_ticks(instrument_id=self.instrument_id) - def on_quote_tick(self, tick: QuoteTick): + def on_quote_tick(self, tick: QuoteTick) -> None: # Update our MACD self.macd.handle_quote_tick(tick) if self.macd.value: @@ -115,11 +116,11 @@ class MACDStrategy(Strategy): if self.position: assert self.position.quantity <= 1000 - def on_event(self, event): + def on_event(self, event: Event) -> None: if isinstance(event, PositionEvent): self.position = self.cache.position(event.position_id) - def check_for_entry(self): + def check_for_entry(self) -> None: if self.cache.positions(): # If we have a position, do not enter again return @@ -136,7 +137,7 @@ class MACDStrategy(Strategy): ) self.submit_order(order) - def check_for_exit(self): + def check_for_exit(self) -> None: if not self.cache.positions(): # If we don't have a position, return early return @@ -154,7 +155,7 @@ class MACDStrategy(Strategy): ) self.submit_order(order) - def on_dispose(self): + def on_dispose(self) -> None: pass # Do nothing else ``` diff --git a/docs/index.md b/docs/index.md index d412ab9c6f0f..857fd0c0586a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -73,7 +73,7 @@ optional additional C-inspired syntax. The project heavily utilizes Cython to provide static type safety and increased performance for Python through [C extension modules](https://docs.python.org/3/extending/extending.html). The vast majority of the production code is actually -written in Cython, however the libraries can be accessed from both pure Python and Cython. +written in Cython, however the libraries can be accessed from both Python and Cython. ## What is Rust? @@ -88,7 +88,7 @@ eliminating many classes of bugs at compile-time. The project increasingly utilizes Rust for core performance-critical components. Python language binding is handled through Cython, with static libraries linked at compile-time before the wheel binaries are packaged, so a user does not need to have Rust installed to run NautilusTrader. In the future as more Rust code is introduced, -[PyO3](https://pyo3.rs/v0.15.1/) will be leveraged for easier Python bindings. +[PyO3](https://pyo3.rs/latest) will be leveraged for easier Python bindings. ## Architecture Quality Attributes diff --git a/docs/integrations/binance.md b/docs/integrations/binance.md index 760699ec432e..a57286837936 100644 --- a/docs/integrations/binance.md +++ b/docs/integrations/binance.md @@ -1,7 +1,7 @@ # Binance -Founded in 2017, Binance is one of the largest cryptocurrency exchanges in terms -of daily trading volume, and open interest of crypto assets and crypto +Founded in 2017, Binance is one of the largest cryptocurrency exchanges in terms +of daily trading volume, and open interest of crypto assets and crypto derivative products. This integration supports live market data ingest and order execution with Binance. @@ -11,20 +11,20 @@ unstable beta phase and exercise caution. ``` ## Overview -The following documentation assumes a trader is setting up for both live market -data feeds, and trade execution. The full Binance integration consists of an assortment of components, +The following documentation assumes a trader is setting up for both live market +data feeds, and trade execution. The full Binance integration consists of an assortment of components, which can be used together or separately depending on the users needs. - `BinanceHttpClient` provides low-level HTTP API connectivity - `BinanceWebSocketClient` provides low-level WebSocket API connectivity - `BinanceInstrumentProvider` provides instrument parsing and loading functionality -- `BinanceDataClient` provides a market data feed manager -- `BinanceExecutionClient` provides an account management and trade execution gateway +- `BinanceSpotDataClient`/ `BinanceFuturesDataClient` provide a market data feed manager +- `BinanceSpotExecutionClient`/`BinanceFuturesExecutionClient` provide an account management and trade execution gateway - `BinanceLiveDataClientFactory` creation factory for Binance data clients (used by the trading node builder) - `BinanceLiveExecClientFactory` creation factory for Binance execution clients (used by the trading node builder) ```{note} -Most users will simply define a configuration for a live trading node (as below), +Most users will simply define a configuration for a live trading node (as below), and won't need to necessarily work with these lower level components individually. ``` @@ -70,7 +70,7 @@ You must also have at least *one* of the following: - You have subscribed to trade ticks for the instrument you're submitting the order for (used to infer activation price) ## Configuration -The most common use case is to configure a live `TradingNode` to include Binance +The most common use case is to configure a live `TradingNode` to include Binance data and execution clients. To achieve this, add a `BINANCE` section to your client configuration(s): @@ -117,9 +117,9 @@ node.build() ### API credentials There are two options for supplying your credentials to the Binance clients. Either pass the corresponding `api_key` and `api_secret` values to the configuration objects, or -set the following environment variables: +set the following environment variables: -For Binance Spot/Margin live clients, you can set: +For Binance Spot/Margin live clients, you can set: - `BINANCE_API_KEY` - `BINANCE_API_SECRET` @@ -142,13 +142,14 @@ credentials are valid and have trading permissions. All the Binance account types will be supported for live trading. Set the `account_type` using the `BinanceAccountType` enum. The account type options are: - `SPOT` -- `MARGIN` +- `MARGIN_CROSS` (Margin shared between open positions.) +- `MARGIN_ISOLATED` (Margin assigned to a single position.) - `FUTURES_USDT` (USDT or BUSD stablecoins as collateral) - `FUTURES_COIN` (other cryptocurrency as collateral) ### Base URL overrides It's possible to override the default base URLs for both HTTP Rest and -WebSocket APIs. This is useful for configuring API clusters for performance reasons, +WebSocket APIs. This is useful for configuring API clusters for performance reasons, or when Binance has provided you with specialized endpoints. ### Binance US @@ -182,10 +183,18 @@ config = TradingNodeConfig( ) ``` +### Aggregated Trades +Binance provide aggregated trade data endpoints as an alternative source of trade ticks. +In comparison to the default trade endpoints, aggregated trade data endpoints can return all +ticks between a `start_time` and `end_time`. + +To use aggregated trades and the endpoint features, set the `use_agg_trade_ticks` option +to `True` (this is `False` by default.) + ### Parser warnings -Some Binance instruments are unable to be parsed into Nautilus objects if they -contain enormous field values beyond what can be handled by the platform. -In these cases, a _warn and continue_ approach is taken (the instrument will not +Some Binance instruments are unable to be parsed into Nautilus objects if they +contain enormous field values beyond what can be handled by the platform. +In these cases, a _warn and continue_ approach is taken (the instrument will not be available). These warnings may cause unnecessary log noise, and so it's possible to @@ -194,7 +203,7 @@ example below: ```python instrument_provider=InstrumentProviderConfig( - load_all=True, + load_all=True, log_warnings=False, ) ``` @@ -210,7 +219,7 @@ methods may eventually become first-class (not requiring custom/generic subscrip ``` ### BinanceFuturesMarkPriceUpdate -You can subscribe to `BinanceFuturesMarkPriceUpdate` (included funding rating info) +You can subscribe to `BinanceFuturesMarkPriceUpdate` (included funding rating info) data streams by subscribing in the following way from your actor or strategy: ```python @@ -221,8 +230,8 @@ self.subscribe_data( ) ``` -This will result in your actor/strategy passing these received `BinanceFuturesMarkPriceUpdate` -objects to your `on_data` method. You will need to check the type, as this +This will result in your actor/strategy passing these received `BinanceFuturesMarkPriceUpdate` +objects to your `on_data` method. You will need to check the type, as this method acts as a flexible handler for all custom/generic data. ```python diff --git a/docs/user_guide/advanced/data.md b/docs/user_guide/advanced/data.md index e1619286a15d..63cb483117c5 100644 --- a/docs/user_guide/advanced/data.md +++ b/docs/user_guide/advanced/data.md @@ -21,7 +21,7 @@ class MyDataPoint(Data): z: int, ts_event: int, ts_init: int, - ): + ) -> None: super().__init__(ts_event, ts_init) self.label = label @@ -57,7 +57,7 @@ objects to your `on_data` method. You will need to check the type, as this method acts as a flexible handler for all custom/generic data. ```python -def on_data(self, data: Data): +def on_data(self, data: Data) -> None: # First check the type of data if isinstance(data, MyDataPoint): # Do something with the data diff --git a/docs/user_guide/advanced/emulated_orders.md b/docs/user_guide/advanced/emulated_orders.md index ab12632dcf14..020641b67925 100644 --- a/docs/user_guide/advanced/emulated_orders.md +++ b/docs/user_guide/advanced/emulated_orders.md @@ -5,10 +5,9 @@ of whether the type is supported on a trading venue. The logic and code paths fo order emulation are exactly the same for all environment contexts (backtest, sandbox, live), and utilize a common `OrderEmulator` component. -## Limitations +```{note} There is no limitation on the number of emulated orders you can have per running instance. -Currently only individual orders can be emulated, so it is not possible to submit contingency order lists -for emulation (this may be supported in a future version). +``` ## Submitting for emulation The only requirement to emulate an order is to pass a `TriggerType` to the `emulation_trigger` @@ -29,18 +28,18 @@ An emulated order will retain its original client order ID throughout its entire ## Life cycle An emulated order will progress through the following stages: - Submitted by a `Strategy` through the `submit_order` method -- Then sent to the `RiskEngine` for pre-trade risk checks (if may be denied at this point) +- Then sent to the `RiskEngine` for pre-trade risk checks (it may be denied at this point) - Then sent to the `OrderEmulator` where it is _held_ / emulated ### Held emulated orders -The following will occur for an emulated order now inside the `OrderEmulator` component: +The following will occur for an emulated order now _held_ by the `OrderEmulator` component: - The original `SubmitOrder` command will be cached -- The emulated order will be held inside a local `MatchingCore` component +- The emulated order will be processed inside a local `MatchingCore` component - The `OrderEmulator` will subscribe to any needed market data (if not already) to update the matching core -- The emulated order will be modified (by the trader) and updated (by the market) until _released_ or canceled +- The emulated order can be modified (by the trader) and updated (by the market) until _released_ or canceled ### Released emulated orders -Once an emulated order is triggered / matched locally based on a data feed, the following +Once an emulated order is triggered / matched locally based on the arrival of data, the following _release_ actions will occur: - The order will be transformed to either a `MARKET` or `LIMIT` order (see below table) through an additional `OrderInitialized` event - The orders `emulation_trigger` will be set to `NONE` (it will no longer be treated as an emulated order by any component) @@ -73,7 +72,7 @@ It's possible to query for emulated orders through the following `Cache` methods See the full [API reference](../../api_reference/cache) for additional details. -You can also query order objects directly in pure Python: +You can also query order objects directly in Python: - `order.is_emulated` Or through the C API if in Cython: diff --git a/docs/user_guide/architecture.md b/docs/user_guide/architecture.md index bc6b1b09e18e..f8c254d946e6 100644 --- a/docs/user_guide/architecture.md +++ b/docs/user_guide/architecture.md @@ -33,7 +33,7 @@ when making design and architectural decisions, roughly in order of 'weighting'. ## System architecture The NautilusTrader codebase is actually both a framework for composing trading -systems, and a set of default system applications which can operate in various +systems, and a set of default system implementations which can operate in various environment contexts. ### Environment contexts @@ -44,7 +44,7 @@ environment contexts. ### Common core The platform has been designed to share as much common code between backtest, sandbox and live trading systems as possible. This is formalized in the `system` subpackage, where you will find the `NautilusKernel` class, -providing a common core system kernel. +providing a common core system 'kernel'. A _ports and adapters_ architectural style allows modular components to be 'plugged into' the core system, providing many hooks for user defined / custom component implementations. @@ -100,7 +100,7 @@ for each of these subpackages from the left nav menu. ## Code structure The foundation of the codebase is the `nautilus_core` directory, containing a collection of core Rust libraries including a C API interface generated by `cbindgen`. -The bulk of the production code resides in the `nautilus_trader` directory, which contains a collection of pure Python and Cython modules. +The bulk of the production code resides in the `nautilus_trader` directory, which contains a collection of Python and Cython modules. Python bindings for the Rust core are achieved by statically linking the Rust libraries to the C extension modules generated by Cython at compile time (effectively extending the CPython API). diff --git a/docs/user_guide/core_concepts.md b/docs/user_guide/core_concepts.md index 2a6bbb907df8..891a4208b383 100644 --- a/docs/user_guide/core_concepts.md +++ b/docs/user_guide/core_concepts.md @@ -4,7 +4,7 @@ There are three main use cases for this software package: - Backtesting trading systems with historical data (`backtest`) - Testing trading systems with real-time data and simulated execution (`sandbox`) -- Deploying trading systems with real-time data and executing on venues with real accounts (`live`) +- Deploying trading systems with real-time data and executing on venues with real (or paper) accounts (`live`) The projects codebase provides a framework for implementing the software layer of systems which achieve the above. You will find the default `backtest` and `live` system implementations in their respectively named subpackages. A `sandbox` environment can diff --git a/docs/user_guide/strategies.md b/docs/user_guide/strategies.md index 8e06c1d01980..b7b540ebc973 100644 --- a/docs/user_guide/strategies.md +++ b/docs/user_guide/strategies.md @@ -4,7 +4,7 @@ The heart of the NautilusTrader user experience is in writing and working with trading strategies. Defining a trading strategy is achieved by inheriting the `Strategy` class, and implementing the methods required by the strategy. -Using the basic building blocks of data ingest and order management (which we will discuss +Using the basic building blocks of data ingest, event handling, and order management (which we will discuss below), it's possible to implement any type of trading strategy including directional, momentum, re-balancing, pairs, market making etc. @@ -25,7 +25,7 @@ a constructor where you can handle initialization. Minimally the base/super clas ```python class MyStrategy(Strategy): - def __init__(self): + def __init__(self) -> None: super().__init__() # <-- the super class must be called to initialize the strategy ``` @@ -61,7 +61,7 @@ class MyStrategyConfig(StrategyConfig): # parameterize the instrument the strategy will trade. class MyStrategy(Strategy): - def __init__(self, config: MyStrategyConfig): + def __init__(self, config: MyStrategyConfig) -> None: super().__init__(config) # Configuration diff --git a/examples/backtest/betfair_backtest_orderbook_imbalance.py b/examples/backtest/betfair_backtest_orderbook_imbalance.py index 518105c93ab9..1d245db814d0 100644 --- a/examples/backtest/betfair_backtest_orderbook_imbalance.py +++ b/examples/backtest/betfair_backtest_orderbook_imbalance.py @@ -14,6 +14,8 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time + import pandas as pd from nautilus_trader.adapters.betfair.common import BETFAIR_VENUE @@ -84,6 +86,7 @@ ] engine.add_strategies(strategies) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/backtest/crypto_ema_cross_ethusdt_trade_ticks.py b/examples/backtest/crypto_ema_cross_ethusdt_trade_ticks.py index d334295f266a..e13a5449bd54 100644 --- a/examples/backtest/crypto_ema_cross_ethusdt_trade_ticks.py +++ b/examples/backtest/crypto_ema_cross_ethusdt_trade_ticks.py @@ -14,6 +14,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time from decimal import Decimal import pandas as pd @@ -72,6 +73,7 @@ strategy = EMACross(config=config) engine.add_strategy(strategy=strategy) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/backtest/crypto_ema_cross_ethusdt_trailing_stop.py b/examples/backtest/crypto_ema_cross_ethusdt_trailing_stop.py index 3b1b899faf02..8d321d0a8ee9 100644 --- a/examples/backtest/crypto_ema_cross_ethusdt_trailing_stop.py +++ b/examples/backtest/crypto_ema_cross_ethusdt_trailing_stop.py @@ -14,6 +14,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time from decimal import Decimal import pandas as pd @@ -76,6 +77,7 @@ strategy = EMACrossTrailingStop(config=config) engine.add_strategy(strategy=strategy) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/backtest/fx_ema_cross_audusd_bars_from_ticks.py b/examples/backtest/fx_ema_cross_audusd_bars_from_ticks.py index d0799c97679c..2456320bf2aa 100644 --- a/examples/backtest/fx_ema_cross_audusd_bars_from_ticks.py +++ b/examples/backtest/fx_ema_cross_audusd_bars_from_ticks.py @@ -14,6 +14,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time from decimal import Decimal import pandas as pd @@ -82,6 +83,7 @@ strategy = EMACross(config=config) engine.add_strategy(strategy=strategy) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/backtest/fx_ema_cross_audusd_ticks.py b/examples/backtest/fx_ema_cross_audusd_ticks.py index 9d6655eb8193..88d823dfa046 100644 --- a/examples/backtest/fx_ema_cross_audusd_ticks.py +++ b/examples/backtest/fx_ema_cross_audusd_ticks.py @@ -14,6 +14,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time from decimal import Decimal import pandas as pd @@ -93,6 +94,7 @@ strategy = EMACross(config=config) engine.add_strategy(strategy=strategy) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_external.py b/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_external.py index 7435497e6326..71c5f09bd010 100644 --- a/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_external.py +++ b/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_external.py @@ -14,6 +14,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time from decimal import Decimal import pandas as pd @@ -114,6 +115,7 @@ strategy = EMACrossBracket(config=config) engine.add_strategy(strategy=strategy) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_internal.py b/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_internal.py index a453a5116a39..e092c2b3969e 100644 --- a/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_internal.py +++ b/examples/backtest/fx_ema_cross_bracket_gbpusd_bars_internal.py @@ -14,6 +14,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time from decimal import Decimal import pandas as pd @@ -101,6 +102,7 @@ strategy = EMACrossBracket(config=config) engine.add_strategy(strategy=strategy) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/backtest/fx_market_maker_gbpusd_bars.py b/examples/backtest/fx_market_maker_gbpusd_bars.py index 0b8bf668e99f..656852ab0735 100644 --- a/examples/backtest/fx_market_maker_gbpusd_bars.py +++ b/examples/backtest/fx_market_maker_gbpusd_bars.py @@ -14,6 +14,7 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +import time from datetime import datetime from decimal import Decimal @@ -97,6 +98,7 @@ strategy = VolatilityMarketMaker(config=config) engine.add_strategy(strategy=strategy) + time.sleep(0.1) input("Press Enter to continue...") # noqa (always Python 3) # Run the engine (from start to end of data) diff --git a/examples/indicators/ema_python.py b/examples/indicators/ema_python.py index 809e8a8a932c..da85adf8461f 100644 --- a/examples/indicators/ema_python.py +++ b/examples/indicators/ema_python.py @@ -23,7 +23,7 @@ # It's generally recommended to code indicators in Cython as per the built-in # indicators found in the `indicators` subpackage. However this is an example -# demonstrating an equivalent EMA indicator written in pure Python. +# demonstrating an equivalent EMA indicator written in Python. # Note: The `MovingAverage` base class has not being used in this example to # provide more clarity on how to implement custom indicators. Basically you need diff --git a/examples/live/betfair.py b/examples/live/betfair.py index 6cf478df2e4e..92b1b8bc261e 100644 --- a/examples/live/betfair.py +++ b/examples/live/betfair.py @@ -18,13 +18,12 @@ import traceback from nautilus_trader.adapters.betfair.config import BetfairDataClientConfig -from nautilus_trader.adapters.betfair.config import BetfairExecClientConfig from nautilus_trader.adapters.betfair.factories import BetfairLiveDataClientFactory from nautilus_trader.adapters.betfair.factories import BetfairLiveExecClientFactory from nautilus_trader.adapters.betfair.factories import get_cached_betfair_client from nautilus_trader.adapters.betfair.factories import get_cached_betfair_instrument_provider from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.config import CacheDatabaseConfig from nautilus_trader.config import TradingNodeConfig from nautilus_trader.examples.strategies.orderbook_imbalance import OrderBookImbalance @@ -39,7 +38,7 @@ async def main(market_id: str): # Connect to Betfair client early to load instruments and account currency loop = asyncio.get_event_loop() - logger = LiveLogger(loop=loop, clock=LiveClock()) + logger = Logger(clock=LiveClock()) client = get_cached_betfair_client( username=None, # Pass here or will source from the `BETFAIR_USERNAME` env var password=None, # Pass here or will source from the `BETFAIR_PASSWORD` env var @@ -61,8 +60,8 @@ async def main(market_id: str): instruments = provider.list_all() print(f"Found instruments:\n{[ins.id for ins in instruments]}") - # Determine account currency - account = await client.get_account_details() + # Determine account currency - used in execution client + # account = await client.get_account_details() # Configure trading node config = TradingNodeConfig( @@ -79,14 +78,15 @@ async def main(market_id: str): ), }, exec_clients={ - "BETFAIR": BetfairExecClientConfig( - base_currency=account["currencyCode"], - # "username": "YOUR_BETFAIR_USERNAME", - # "password": "YOUR_BETFAIR_PASSWORD", - # "app_key": "YOUR_BETFAIR_APP_KEY", - # "cert_dir": "YOUR_BETFAIR_CERT_DIR", - market_filter=market_filter, - ), + # # UNCOMMENT TO SEND ORDERS + # "BETFAIR": BetfairExecClientConfig( + # base_currency=account["currencyCode"], + # # "username": "YOUR_BETFAIR_USERNAME", + # # "password": "YOUR_BETFAIR_PASSWORD", + # # "app_key": "YOUR_BETFAIR_APP_KEY", + # # "cert_dir": "YOUR_BETFAIR_CERT_DIR", + # market_filter=market_filter, + # ), }, ) strategies = [ @@ -95,6 +95,7 @@ async def main(market_id: str): instrument_id=instrument.id.value, max_trade_size=5, order_id_tag=instrument.selection_id, + subscribe_ticker=True, ), ) for instrument in instruments @@ -110,7 +111,7 @@ async def main(market_id: str): node.build() try: - node.start() + node.run() await asyncio.gather(*asyncio.all_tasks()) except Exception as e: print(e) diff --git a/examples/live/betfair_sandbox.py b/examples/live/betfair_sandbox.py index 2b00a6a2b094..f224b01bc517 100644 --- a/examples/live/betfair_sandbox.py +++ b/examples/live/betfair_sandbox.py @@ -22,7 +22,7 @@ from nautilus_trader.adapters.sandbox.execution import SandboxExecutionClient from nautilus_trader.adapters.sandbox.factory import SandboxLiveExecClientFactory from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.config import CacheDatabaseConfig from nautilus_trader.config import TradingNodeConfig from nautilus_trader.examples.strategies.orderbook_imbalance import OrderBookImbalance @@ -37,7 +37,7 @@ async def main(market_id: str): # Connect to Betfair client early to load instruments and account currency loop = asyncio.get_event_loop() - logger = LiveLogger(loop=loop, clock=LiveClock()) + logger = Logger(clock=LiveClock()) client = get_cached_betfair_client( username=None, # Pass here or will source from the `BETFAIR_USERNAME` env var password=None, # Pass here or will source from the `BETFAIR_PASSWORD` env var @@ -96,7 +96,7 @@ async def main(market_id: str): SandboxExecutionClient.INSTRUMENTS = instruments node.build() - node.start() + node.run() # try: # node.start() # except Exception as ex: diff --git a/examples/live/binance_futures_market_maker.py b/examples/live/binance_futures_market_maker.py index a6733e78e092..34a5c60a5498 100644 --- a/examples/live/binance_futures_market_maker.py +++ b/examples/live/binance_futures_market_maker.py @@ -61,10 +61,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -93,6 +93,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/binance_futures_testnet_ema_cross.py b/examples/live/binance_futures_testnet_ema_cross.py index db709f4396ee..21c22d025690 100644 --- a/examples/live/binance_futures_testnet_ema_cross.py +++ b/examples/live/binance_futures_testnet_ema_cross.py @@ -69,10 +69,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -102,6 +102,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/binance_futures_testnet_ema_cross_bracket.py b/examples/live/binance_futures_testnet_ema_cross_bracket.py index c437159d5e21..ea9184d3d3ba 100644 --- a/examples/live/binance_futures_testnet_ema_cross_bracket.py +++ b/examples/live/binance_futures_testnet_ema_cross_bracket.py @@ -69,10 +69,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -104,6 +104,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/binance_futures_testnet_ema_cross_with_trailing_stop.py b/examples/live/binance_futures_testnet_ema_cross_with_trailing_stop.py index fd4c6f6f4002..420a31718bbe 100644 --- a/examples/live/binance_futures_testnet_ema_cross_with_trailing_stop.py +++ b/examples/live/binance_futures_testnet_ema_cross_with_trailing_stop.py @@ -69,10 +69,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -105,6 +105,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/binance_futures_testnet_market_maker.py b/examples/live/binance_futures_testnet_market_maker.py index 299013d2fb9a..bfd74feb761d 100644 --- a/examples/live/binance_futures_testnet_market_maker.py +++ b/examples/live/binance_futures_testnet_market_maker.py @@ -69,10 +69,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -101,6 +101,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/binance_spot_ema_cross.py b/examples/live/binance_spot_ema_cross.py index 30fecec36a37..e0f7c41a41ce 100644 --- a/examples/live/binance_spot_ema_cross.py +++ b/examples/live/binance_spot_ema_cross.py @@ -69,10 +69,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -102,6 +102,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/binance_spot_market_maker.py b/examples/live/binance_spot_market_maker.py index 7979eb77df30..45a3547f3965 100644 --- a/examples/live/binance_spot_market_maker.py +++ b/examples/live/binance_spot_market_maker.py @@ -69,10 +69,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -101,6 +101,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/binance_spot_testnet_ema_cross.py b/examples/live/binance_spot_testnet_ema_cross.py index 5325446aa3bd..24ff6e0ca10c 100644 --- a/examples/live/binance_spot_testnet_ema_cross.py +++ b/examples/live/binance_spot_testnet_ema_cross.py @@ -69,10 +69,10 @@ instrument_provider=InstrumentProviderConfig(load_all=True), ), }, - timeout_connection=5.0, - timeout_reconciliation=5.0, - timeout_portfolio=5.0, - timeout_disconnection=5.0, + timeout_connection=10.0, + timeout_reconciliation=10.0, + timeout_portfolio=10.0, + timeout_disconnection=10.0, timeout_post_stop=2.0, ) # Instantiate the node with a configuration @@ -102,6 +102,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/interactive_brokers_book_imbalance.py b/examples/live/interactive_brokers_book_imbalance.py index 9e10433edd8a..709fede2f627 100644 --- a/examples/live/interactive_brokers_book_imbalance.py +++ b/examples/live/interactive_brokers_book_imbalance.py @@ -60,6 +60,7 @@ "IB": InteractiveBrokersDataClientConfig( instrument_provider=provider_config, read_only_api=False, + start_gateway=False, ), }, exec_clients={ @@ -67,6 +68,7 @@ routing=RoutingConfig(default=True, venues={"IDEALPRO"}), instrument_provider=provider_config, read_only_api=False, + start_gateway=False, ), }, timeout_connection=90.0, @@ -100,6 +102,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/examples/live/interactive_brokers_example.py b/examples/live/interactive_brokers_example.py index a9dce570ab00..1ac766486c21 100644 --- a/examples/live/interactive_brokers_example.py +++ b/examples/live/interactive_brokers_example.py @@ -57,15 +57,17 @@ log_level="DEBUG", data_clients={ "IB": InteractiveBrokersDataClientConfig( - gateway_host="127.0.0.1", instrument_provider=InstrumentProviderConfig( load_all=True, filters=msgspec.json.encode(instrument_filters), ), + start_gateway=False, ), }, exec_clients={ - "IB": InteractiveBrokersExecClientConfig(), + "IB": InteractiveBrokersExecClientConfig( + start_gateway=False, + ), }, timeout_connection=90.0, timeout_reconciliation=5.0, @@ -100,6 +102,6 @@ # Stop and dispose of the node with SIGINT/CTRL+C if __name__ == "__main__": try: - node.start() + node.run() finally: node.dispose() diff --git a/nautilus_core/Cargo.lock b/nautilus_core/Cargo.lock index e1425f082c24..0e7af44e1bf2 100644 --- a/nautilus_core/Cargo.lock +++ b/nautilus_core/Cargo.lock @@ -97,9 +97,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.63" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eff18d764974428cf3a9328e23fc5c986f5fbed46e6cd4cdf42544df5d297ec1" +checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2" dependencies = [ "proc-macro2", "quote", @@ -135,18 +135,6 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" -[[package]] -name = "bstr" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", - "serde", -] - [[package]] name = "bumpalo" version = "3.12.0" @@ -401,13 +389,12 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "csv" -version = "1.1.6" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" +checksum = "af91f40b7355f82b0a891f50e70399475945bb0b0da4f1700ce60761c9d3e359" dependencies = [ - "bstr", "csv-core", - "itoa 0.4.8", + "itoa", "ryu", "serde", ] @@ -423,9 +410,9 @@ dependencies = [ [[package]] name = "cxx" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322296e2f2e5af4270b54df9e85a02ff037e271af20ba3e7fe1575515dc840b8" +checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62" dependencies = [ "cc", "cxxbridge-flags", @@ -435,9 +422,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "017a1385b05d631e7875b1f151c9f012d37b53491e2a87f65bff5c262b2111d8" +checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690" dependencies = [ "cc", "codespan-reporting", @@ -450,15 +437,15 @@ dependencies = [ [[package]] name = "cxxbridge-flags" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c26bbb078acf09bc1ecda02d4223f03bdd28bd4874edcb0379138efc499ce971" +checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf" [[package]] name = "cxxbridge-macro" -version = "1.0.88" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357f40d1f06a24b60ae1fe122542c1fb05d28d32acb2aed064e84bc2ad1e252e" +checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892" dependencies = [ "proc-macro2", "quote", @@ -467,9 +454,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0dd3cd20dc6b5a876612a6e5accfe7f3dd883db6d07acfbf14c128f61550dfa" +checksum = "c0808e1bd8671fb44a113a14e13497557533369847788fa2ae912b6ebfce9fa8" dependencies = [ "darling_core", "darling_macro", @@ -477,9 +464,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f" +checksum = "001d80444f28e193f30c2f293455da62dcf9a6b29918a4253152ae2b1de592cb" dependencies = [ "fnv", "ident_case", @@ -491,9 +478,9 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e" +checksum = "b36230598a2d5de7ec1c6f51f72d8a99a9208daff41de2084d06e3fd3ea56685" dependencies = [ "darling_core", "quote", @@ -557,9 +544,9 @@ checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" [[package]] name = "fastrand" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" dependencies = [ "instant", ] @@ -578,9 +565,9 @@ checksum = "ee1b05cbd864bcaecbd3455d6d967862d446e4ebfc3c2e5e5b9841e53cba6673" [[package]] name = "futures" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" +checksum = "13e2792b0ff0340399d58445b88fd9770e3489eff258a4cbc1523418f12abf84" dependencies = [ "futures-channel", "futures-core", @@ -593,9 +580,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" +checksum = "2e5317663a9089767a1ec00a487df42e0ca174b61b4483213ac24448e4664df5" dependencies = [ "futures-core", "futures-sink", @@ -603,15 +590,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" +checksum = "ec90ff4d0fe1f57d600049061dc6bb68ed03c7d2fbd697274c41805dcb3f8608" [[package]] name = "futures-executor" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" +checksum = "e8de0a35a6ab97ec8869e32a2473f4b1324459e14c29275d14b10cb1fd19b50e" dependencies = [ "futures-core", "futures-task", @@ -620,15 +607,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" +checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531" [[package]] name = "futures-macro" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" +checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" dependencies = [ "proc-macro2", "quote", @@ -637,15 +624,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" +checksum = "f310820bb3e8cfd46c80db4d7fb8353e15dfff853a127158425f31e0be6c8364" [[package]] name = "futures-task" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" +checksum = "dcf79a1bf610b10f42aea489289c5a2c478a786509693b80cd39c44ccd936366" [[package]] name = "futures-timer" @@ -655,9 +642,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" +checksum = "9c1d6de3acfef38d2be4b1f543f553131788603495be83da675e180c8d6b7bd1" dependencies = [ "futures-channel", "futures-core", @@ -704,9 +691,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "heck" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" @@ -774,9 +761,9 @@ dependencies = [ [[package]] name = "indoc" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2d6f23ffea9d7e76c53eee25dfb67bcd8fde7f1198b0855350698c9f07c780" +checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" [[package]] name = "instant" @@ -796,12 +783,6 @@ dependencies = [ "either", ] -[[package]] -name = "itoa" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" - [[package]] name = "itoa" version = "1.0.5" @@ -810,9 +791,9 @@ checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" [[package]] name = "js-sys" -version = "0.3.60" +version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" dependencies = [ "wasm-bindgen", ] @@ -1039,9 +1020,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "oorandom" @@ -1067,9 +1048,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "cfg-if", "libc", @@ -1164,18 +1145,18 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.50" +version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" dependencies = [ "unicode-ident", ] [[package]] name = "pyo3" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccd4149c8c3975099622b4e1962dac27565cf5663b76452c3e2b66e0b6824277" +checksum = "06a3d8e8a46ab2738109347433cb7b96dffda2e4a218b03ef27090238886b147" dependencies = [ "cfg-if", "indoc", @@ -1190,9 +1171,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cd09fe469834db21ee60e0051030339e5d361293d8cb5ec02facf7fdcf52dbf" +checksum = "75439f995d07ddfad42b192dfcf3bc66a7ecfd8b4a1f5f6f046aa5c2c5d7677d" dependencies = [ "once_cell", "target-lexicon", @@ -1200,9 +1181,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c427c9a96b9c5b12156dbc11f76b14f49e9aae8905ca783ea87c249044ef137" +checksum = "839526a5c07a17ff44823679b68add4a58004de00512a95b6c1c98a6dcac0ee5" dependencies = [ "libc", "pyo3-build-config", @@ -1210,9 +1191,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b822bbba9d60630a44d2109bc410489bb2f439b33e3a14ddeb8a40b378a7c4" +checksum = "bd44cf207476c6a9760c4653559be4f206efafb924d3e4cbf2721475fc0d6cc5" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -1222,9 +1203,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84ae898104f7c99db06231160770f3e40dad6eb9021daddc0fedfa3e41dff10a" +checksum = "dc1f43d8e30460f36350d18631ccf85ded64c059829208fe680904c65bcd0a4c" dependencies = [ "proc-macro2", "quote", @@ -1310,12 +1291,6 @@ dependencies = [ "regex-syntax", ] -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" - [[package]] name = "regex-syntax" version = "0.6.28" @@ -1452,11 +1427,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" dependencies = [ - "itoa 1.0.5", + "itoa", "ryu", "serde", ] @@ -1544,9 +1519,9 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.12.5" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9410d0f6853b1d94f0e519fb95df60f29d2c1eff2d921ffdf01a4c8a3b54f12d" +checksum = "8ae9980cab1db3fceee2f6c6f643d5d8de2997c58ee8d25fb0cc8a9e9e7348e5" [[package]] name = "tempfile" @@ -1656,9 +1631,9 @@ checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" [[package]] name = "uuid" -version = "1.2.2" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c" +checksum = "1674845326ee10d37ca60470760d4288a6f80f304007d92e5c53bab78c9cfd79" dependencies = [ "getrandom", ] @@ -1694,9 +1669,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -1704,9 +1679,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" dependencies = [ "bumpalo", "log", @@ -1719,9 +1694,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1729,9 +1704,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" dependencies = [ "proc-macro2", "quote", @@ -1742,15 +1717,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.83" +version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" [[package]] name = "web-sys" -version = "0.3.60" +version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" dependencies = [ "js-sys", "wasm-bindgen", @@ -1789,9 +1764,18 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-sys" -version = "0.42.0" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", diff --git a/nautilus_core/Cargo.toml b/nautilus_core/Cargo.toml index 9339eb16c94e..91513c118756 100644 --- a/nautilus_core/Cargo.toml +++ b/nautilus_core/Cargo.toml @@ -8,7 +8,7 @@ members = [ ] [workspace.package] -rust-version = "1.66.1" +rust-version = "1.67.1" version = "0.2.0" edition = "2021" authors = ["Nautech Systems "] @@ -17,7 +17,7 @@ documentation = "https://docs.nautilustrader.io" [workspace.dependencies] chrono = "0.4.22" -pyo3 = "0.18.0" +pyo3 = "0.18.1" rand = "0.8.5" rust-fsm = "0.6.1" strum = { version = "0.24.1", features = ["derive"] } diff --git a/nautilus_core/common/src/clock.rs b/nautilus_core/common/src/clock.rs index 9e5c7213e6e1..b70f4046d609 100644 --- a/nautilus_core/common/src/clock.rs +++ b/nautilus_core/common/src/clock.rs @@ -356,6 +356,9 @@ pub unsafe extern "C" fn test_clock_advance_time( } } +// TODO: This struct implementation potentially leaks memory +// TODO: Skip clippy check for now since it requires large modification +#[allow(clippy::drop_non_drop)] #[no_mangle] pub extern "C" fn vec_time_events_drop(v: Vec_TimeEvent) { drop(v); // Memory freed here diff --git a/nautilus_core/common/src/logging.rs b/nautilus_core/common/src/logging.rs index 9cfab9668e33..ea70b4af2a6c 100644 --- a/nautilus_core/common/src/logging.rs +++ b/nautilus_core/common/src/logging.rs @@ -14,141 +14,229 @@ // ------------------------------------------------------------------------------------------------- use std::ffi::c_char; +use std::io::{Stderr, Stdout}; +use std::sync::mpsc::{channel, Receiver, SendError, Sender}; +use std::time::{Duration, Instant}; use std::{ - io::{self, BufWriter, Stderr, Stdout, Write}, + io::{self, BufWriter, Write}, ops::{Deref, DerefMut}, + thread, }; use nautilus_core::datetime::unix_nanos_to_iso8601; use nautilus_core::string::{cstr_to_string, string_to_cstr}; +use nautilus_core::time::UnixNanos; use nautilus_core::uuid::UUID4; use nautilus_model::identifiers::trader_id::TraderId; use crate::enums::{LogColor, LogLevel}; pub struct Logger { + /// The trader ID for the logger. pub trader_id: TraderId, + /// The machine ID for the logger. pub machine_id: String, + /// The instance ID for the logger. pub instance_id: UUID4, + /// The maximum log level to write to stdout. pub level_stdout: LogLevel, + /// The maximum messages per second which can be flushed to stdout or stderr. + pub rate_limit: usize, + /// If logging is bypassed. pub is_bypassed: bool, - log_template: String, - out: BufWriter, - err: BufWriter, + tx: Sender, } +#[derive(Clone, Debug)] +pub struct LogMessage { + timestamp_ns: UnixNanos, + level: LogLevel, + color: LogColor, + component: String, + msg: String, +} + +/// Provides a high-performance logger utilizing a MPSC channel under the hood. +/// +/// A separate thead is spawned at initialization which receives `LogMessage` structs over the +/// channel. Rate limiting is implemented using a simple token bucket algorithm (maximum messages +/// per second). impl Logger { fn new( trader_id: TraderId, machine_id: String, instance_id: UUID4, level_stdout: LogLevel, + rate_limit: usize, is_bypassed: bool, ) -> Self { + let trader_id_clone = trader_id.value.to_string(); + let (tx, rx) = channel::(); + + thread::spawn(move || { + Self::handle_messages(&trader_id_clone, level_stdout, rate_limit, rx) + }); + Logger { trader_id, machine_id, instance_id, level_stdout, + rate_limit, is_bypassed, - log_template: String::from( - "\x1b[1m{ts}\x1b[0m {color}[{level}] {trader_id}.{component}: {msg}\x1b[0m\n", - ), - out: BufWriter::new(io::stdout()), - err: BufWriter::new(io::stderr()), + tx, + } + } + + fn handle_messages( + trader_id: &str, + level_stdout: LogLevel, + rate_limit: usize, + rx: Receiver, + ) { + let mut out = BufWriter::new(io::stdout()); + let mut err = BufWriter::new(io::stderr()); + + let log_template = String::from( + "\x1b[1m{ts}\x1b[0m {color}[{level}] {trader_id}.{component}: {msg}\x1b[0m\n", + ); + + let mut msg_count = 0; + let mut bucket_time = Instant::now(); + + // Continue to receive and handle log messages until channel is hung up + while let Ok(log_msg) = rx.recv() { + if log_msg.level < level_stdout { + continue; + } + + while msg_count >= rate_limit { + if bucket_time.elapsed().as_secs() >= 1 { + msg_count = 0; + bucket_time = Instant::now(); + } else { + thread::sleep(Duration::from_millis(10)); + } + } + + let fmt_line = log_template + .replace("{ts}", &unix_nanos_to_iso8601(log_msg.timestamp_ns)) + .replace("{color}", &log_msg.color.to_string()) + .replace("{level}", &log_msg.level.to_string()) + .replace("{trader_id}", trader_id) + .replace("{component}", &log_msg.component) + .replace("{msg}", &log_msg.msg); + + if log_msg.level >= LogLevel::Error { + Self::write_stderr(&mut err, fmt_line); + Self::flush_stderr(&mut err); + } else { + Self::write_stdout(&mut out, fmt_line); + Self::flush_stdout(&mut out); + } + + msg_count += 1; } + + // Finally ensure remaining buffers are flushed + Self::flush_stderr(&mut err); + Self::flush_stdout(&mut out); } - #[inline] - fn log( + fn write_stdout(out: &mut BufWriter, line: String) { + match out.write_all(line.as_bytes()) { + Ok(_) => {} + Err(e) => eprintln!("Error writing to stdout: {e:?}"), + } + } + + fn flush_stdout(out: &mut BufWriter) { + match out.flush() { + Ok(_) => {} + Err(e) => eprintln!("Error flushing stdout: {e:?}"), + } + } + + fn write_stderr(err: &mut BufWriter, line: String) { + match err.write_all(line.as_bytes()) { + Ok(_) => {} + Err(e) => eprintln!("Error writing to stderr: {e:?}"), + } + } + + fn flush_stderr(err: &mut BufWriter) { + match err.flush() { + Ok(_) => {} + Err(e) => eprintln!("Error flushing stderr: {e:?}"), + } + } + + fn send( &mut self, timestamp_ns: u64, level: LogLevel, color: LogColor, - component: &str, - msg: &str, - ) -> Result<(), io::Error> { - if level < self.level_stdout { - return Ok(()); - } - - let fmt_line = self - .log_template - .replace("{ts}", &unix_nanos_to_iso8601(timestamp_ns)) - .replace("{color}", &color.to_string()) - .replace("{level}", &level.to_string()) - .replace("{trader_id}", &self.trader_id.to_string()) - .replace("{component}", component) - .replace("{msg}", msg); - - if level >= LogLevel::Error { - self.err.write_all(fmt_line.as_bytes())?; - self.err.flush() - } else { - self.out.write_all(fmt_line.as_bytes())?; - self.out.flush() - } + component: String, + msg: String, + ) -> Result<(), SendError> { + let log_message = LogMessage { + timestamp_ns, + level, + color, + component, + msg, + }; + self.tx.send(log_message) } - #[inline] pub fn debug( &mut self, timestamp_ns: u64, color: LogColor, - component: &str, - msg: &str, - ) -> Result<(), io::Error> { - self.log(timestamp_ns, LogLevel::Debug, color, component, msg) + component: String, + msg: String, + ) -> Result<(), SendError> { + self.send(timestamp_ns, LogLevel::Debug, color, component, msg) } - #[inline] pub fn info( &mut self, timestamp_ns: u64, color: LogColor, - component: &str, - msg: &str, - ) -> Result<(), io::Error> { - self.log(timestamp_ns, LogLevel::Info, color, component, msg) + component: String, + msg: String, + ) -> Result<(), SendError> { + self.send(timestamp_ns, LogLevel::Info, color, component, msg) } - #[inline] pub fn warn( &mut self, timestamp_ns: u64, color: LogColor, - component: &str, - msg: &str, - ) -> Result<(), io::Error> { - self.log(timestamp_ns, LogLevel::Warning, color, component, msg) + component: String, + msg: String, + ) -> Result<(), SendError> { + self.send(timestamp_ns, LogLevel::Warning, color, component, msg) } - #[inline] pub fn error( &mut self, timestamp_ns: u64, color: LogColor, - component: &str, - msg: &str, - ) -> Result<(), io::Error> { - self.log(timestamp_ns, LogLevel::Error, color, component, msg) + component: String, + msg: String, + ) -> Result<(), SendError> { + self.send(timestamp_ns, LogLevel::Error, color, component, msg) } - #[inline] pub fn critical( &mut self, timestamp_ns: u64, color: LogColor, - component: &str, - msg: &str, - ) -> Result<(), io::Error> { - self.log(timestamp_ns, LogLevel::Critical, color, component, msg) - } - - #[inline] - fn flush(&mut self) -> Result<(), io::Error> { - self.out.flush()?; - self.err.flush() + component: String, + msg: String, + ) -> Result<(), SendError> { + self.send(timestamp_ns, LogLevel::Critical, color, component, msg) } } @@ -187,6 +275,7 @@ pub unsafe extern "C" fn logger_new( machine_id_ptr: *const c_char, instance_id_ptr: *const c_char, level_stdout: LogLevel, + rate_limit: usize, is_bypassed: u8, ) -> CLogger { CLogger(Box::new(Logger::new( @@ -194,21 +283,16 @@ pub unsafe extern "C" fn logger_new( String::from(&cstr_to_string(machine_id_ptr)), UUID4::from(cstr_to_string(instance_id_ptr).as_str()), level_stdout, + rate_limit, is_bypassed != 0, ))) } #[no_mangle] -pub extern "C" fn logger_free(mut logger: CLogger) { - let _ = logger.flush(); // ignore flushing error if any +pub extern "C" fn logger_free(logger: CLogger) { drop(logger); // Memory freed here } -#[no_mangle] -pub extern "C" fn flush(logger: &mut CLogger) { - let _ = logger.flush(); -} - #[no_mangle] pub extern "C" fn logger_get_trader_id_cstr(logger: &CLogger) -> *const c_char { string_to_cstr(&logger.trader_id.to_string()) @@ -245,7 +329,7 @@ pub unsafe extern "C" fn logger_log( ) { let component = cstr_to_string(component_ptr); let msg = cstr_to_string(msg_ptr); - let _ = logger.log(timestamp_ns, level, color, &component, &msg); + let _ = logger.send(timestamp_ns, level, color, component, msg); } //////////////////////////////////////////////////////////////////////////////// @@ -253,11 +337,10 @@ pub unsafe extern "C" fn logger_log( //////////////////////////////////////////////////////////////////////////////// #[cfg(test)] mod tests { + use crate::logging::{LogColor, LogLevel, Logger}; use nautilus_core::uuid::UUID4; use nautilus_model::identifiers::trader_id::TraderId; - use crate::logging::{LogColor, LogLevel, Logger}; - #[test] fn test_new_logger() { let logger = Logger::new( @@ -265,6 +348,7 @@ mod tests { String::from("user-01"), UUID4::new(), LogLevel::Debug, + 100_000, false, ); assert_eq!(logger.trader_id, TraderId::new("TRADER-000")); @@ -278,6 +362,7 @@ mod tests { String::from("user-01"), UUID4::new(), LogLevel::Info, + 100_000, false, ); @@ -285,8 +370,8 @@ mod tests { .info( 1650000000000000, LogColor::Normal, - "RiskEngine", - "This is a test.", + String::from("RiskEngine"), + String::from("This is a test."), ) .expect("Error while logging"); } diff --git a/nautilus_core/common/src/timer.rs b/nautilus_core/common/src/timer.rs index 6298359a8af4..2028fe037efa 100644 --- a/nautilus_core/common/src/timer.rs +++ b/nautilus_core/common/src/timer.rs @@ -122,6 +122,7 @@ pub struct TestTimer { } impl TestTimer { + #[must_use] pub fn new( name: String, interval_ns: u64, diff --git a/nautilus_core/core/src/cvec.rs b/nautilus_core/core/src/cvec.rs index 43b49eb12e34..2d35825a87c1 100644 --- a/nautilus_core/core/src/cvec.rs +++ b/nautilus_core/core/src/cvec.rs @@ -15,7 +15,7 @@ use std::{ffi::c_void, ptr::null}; -/// CVec is a C compatible struct that stores an opaque pointer to a block of +/// `CVec` is a C compatible struct that stores an opaque pointer to a block of /// memory, it's length and the capacity of the vector it was allocated from. /// /// NOTE: Changing the values here may lead to undefined behaviour when the @@ -57,12 +57,12 @@ impl CVec { impl From> for CVec { fn from(data: Vec) -> Self { if data.is_empty() { - CVec::empty() + Self::empty() } else { let len = data.len(); let cap = data.capacity(); - CVec { - ptr: &mut data.leak()[0] as *mut T as *mut c_void, + Self { + ptr: (&mut data.leak()[0] as *mut T).cast::(), len, cap, } @@ -77,7 +77,7 @@ impl From> for CVec { pub extern "C" fn cvec_drop(cvec: CVec) { let CVec { ptr, len, cap } = cvec; let data: Vec = unsafe { Vec::from_raw_parts(ptr.cast::(), len, cap) }; - drop(data) // Memory freed here + drop(data); // Memory freed here } #[no_mangle] diff --git a/nautilus_core/core/src/datetime.rs b/nautilus_core/core/src/datetime.rs index cec1999cd26b..3b8fd79a5add 100644 --- a/nautilus_core/core/src/datetime.rs +++ b/nautilus_core/core/src/datetime.rs @@ -73,6 +73,7 @@ pub extern "C" fn nanos_to_micros(nanos: u64) -> u64 { } #[inline] +#[must_use] pub fn unix_nanos_to_iso8601(timestamp_ns: u64) -> String { let dt = DateTime::::from(UNIX_EPOCH + Duration::from_nanos(timestamp_ns)); dt.to_rfc3339_opts(SecondsFormat::Nanos, true) diff --git a/nautilus_core/core/src/string.rs b/nautilus_core/core/src/string.rs index 983cf09b760d..7dd4b5ac46f7 100644 --- a/nautilus_core/core/src/string.rs +++ b/nautilus_core/core/src/string.rs @@ -24,6 +24,7 @@ use pyo3::{ffi, FromPyPointer, Python}; /// - Assumes `ptr` is borrowed from a valid Python UTF-8 `str`. /// # Panics /// - If `ptr` is null. +#[must_use] pub unsafe fn pystr_to_string(ptr: *mut ffi::PyObject) -> String { assert!(!ptr.is_null(), "`ptr` was NULL"); Python::with_gil(|py| PyString::from_borrowed_ptr(py, ptr).to_string()) @@ -35,6 +36,7 @@ pub unsafe fn pystr_to_string(ptr: *mut ffi::PyObject) -> String { /// - Assumes `ptr` is a valid C string pointer. /// # Panics /// - If `ptr` is null. +#[must_use] pub unsafe fn cstr_to_string(ptr: *const c_char) -> String { assert!(!ptr.is_null(), "`ptr` was NULL"); CStr::from_ptr(ptr) @@ -44,6 +46,7 @@ pub unsafe fn cstr_to_string(ptr: *const c_char) -> String { } /// Create a C string pointer to newly allocated memory from a [&str]. +#[must_use] pub fn string_to_cstr(s: &str) -> *const c_char { CString::new(s).expect("CString::new failed").into_raw() } diff --git a/nautilus_core/core/src/uuid.rs b/nautilus_core/core/src/uuid.rs index cd8c0d2ae2ac..12cb1262b3a2 100644 --- a/nautilus_core/core/src/uuid.rs +++ b/nautilus_core/core/src/uuid.rs @@ -32,6 +32,7 @@ pub struct UUID4 { } impl UUID4 { + #[must_use] pub fn new() -> Self { let uuid = Uuid::new_v4(); UUID4 { @@ -83,6 +84,8 @@ pub extern "C" fn uuid4_free(uuid4: UUID4) { /// /// # Safety /// - Assumes `ptr` is a valid C string pointer. +/// # Panics +/// - If `ptr` cannot be cast to a valid C string. #[no_mangle] pub unsafe extern "C" fn uuid4_from_cstr(ptr: *const c_char) -> UUID4 { UUID4::from( @@ -131,7 +134,7 @@ mod tests { #[test] fn test_uuid4_default() { - let uuid: UUID4 = Default::default(); + let uuid: UUID4 = UUID4::default(); let uuid_string = uuid.value.to_string(); let uuid_parsed = Uuid::parse_str(&uuid_string).expect("Uuid::parse_str failed"); assert_eq!(uuid_parsed.get_version().unwrap(), uuid::Version::Random); diff --git a/nautilus_core/model/src/data/bar.rs b/nautilus_core/model/src/data/bar.rs index aef6673617ae..97ab833c6322 100644 --- a/nautilus_core/model/src/data/bar.rs +++ b/nautilus_core/model/src/data/bar.rs @@ -69,11 +69,6 @@ pub extern "C" fn bar_specification_to_cstr(bar_spec: &BarSpecification) -> *con string_to_cstr(&bar_spec.to_string()) } -#[no_mangle] -pub extern "C" fn bar_specification_free(bar_spec: BarSpecification) { - drop(bar_spec); // Memory freed here -} - #[no_mangle] pub extern "C" fn bar_specification_hash(bar_spec: &BarSpecification) -> u64 { let mut h = DefaultHasher::new(); diff --git a/nautilus_core/model/src/data/tick.rs b/nautilus_core/model/src/data/tick.rs index f4dde5bcd95f..0679090cb23f 100644 --- a/nautilus_core/model/src/data/tick.rs +++ b/nautilus_core/model/src/data/tick.rs @@ -40,6 +40,7 @@ pub struct QuoteTick { } impl QuoteTick { + #[must_use] pub fn new( instrument_id: InstrumentId, bid: Price, @@ -48,7 +49,7 @@ impl QuoteTick { ask_size: Quantity, ts_event: UnixNanos, ts_init: UnixNanos, - ) -> QuoteTick { + ) -> Self { correctness::u8_equal( bid.precision, ask.precision, @@ -97,6 +98,7 @@ pub struct TradeTick { } impl TradeTick { + #[must_use] pub fn new( instrument_id: InstrumentId, price: Price, @@ -105,7 +107,7 @@ impl TradeTick { trade_id: TradeId, ts_event: UnixNanos, ts_init: UnixNanos, - ) -> TradeTick { + ) -> Self { TradeTick { instrument_id, price, diff --git a/nautilus_core/model/src/identifiers/account_id.rs b/nautilus_core/model/src/identifiers/account_id.rs index 41cd74fc4805..59a61da90baf 100644 --- a/nautilus_core/model/src/identifiers/account_id.rs +++ b/nautilus_core/model/src/identifiers/account_id.rs @@ -37,7 +37,8 @@ impl Display for AccountId { } impl AccountId { - pub fn new(s: &str) -> AccountId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`AccountId` value"); correctness::string_contains(s, "-", "`TraderId` value"); diff --git a/nautilus_core/model/src/identifiers/client_id.rs b/nautilus_core/model/src/identifiers/client_id.rs index 0465bf1e4734..20d78021b176 100644 --- a/nautilus_core/model/src/identifiers/client_id.rs +++ b/nautilus_core/model/src/identifiers/client_id.rs @@ -37,7 +37,8 @@ impl Display for ClientId { } impl ClientId { - pub fn new(s: &str) -> ClientId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`ClientId` value"); ClientId { diff --git a/nautilus_core/model/src/identifiers/client_order_id.rs b/nautilus_core/model/src/identifiers/client_order_id.rs index db2d3f101bb2..9bdc7d34c616 100644 --- a/nautilus_core/model/src/identifiers/client_order_id.rs +++ b/nautilus_core/model/src/identifiers/client_order_id.rs @@ -37,7 +37,8 @@ impl Display for ClientOrderId { } impl ClientOrderId { - pub fn new(s: &str) -> ClientOrderId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`ClientOrderId` value"); ClientOrderId { diff --git a/nautilus_core/model/src/identifiers/component_id.rs b/nautilus_core/model/src/identifiers/component_id.rs index 5f8ed9df7757..4fc2bb9af16c 100644 --- a/nautilus_core/model/src/identifiers/component_id.rs +++ b/nautilus_core/model/src/identifiers/component_id.rs @@ -37,7 +37,8 @@ impl Display for ComponentId { } impl ComponentId { - pub fn new(s: &str) -> ComponentId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`ComponentId` value"); ComponentId { diff --git a/nautilus_core/model/src/identifiers/exec_algorithm_id.rs b/nautilus_core/model/src/identifiers/exec_algorithm_id.rs index 71d5bb1ca033..42b717a7d4a9 100644 --- a/nautilus_core/model/src/identifiers/exec_algorithm_id.rs +++ b/nautilus_core/model/src/identifiers/exec_algorithm_id.rs @@ -37,7 +37,8 @@ impl Display for ExecAlgorithmId { } impl ExecAlgorithmId { - pub fn new(s: &str) -> ExecAlgorithmId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`ExecAlgorithmId` value"); ExecAlgorithmId { diff --git a/nautilus_core/model/src/identifiers/instrument_id.rs b/nautilus_core/model/src/identifiers/instrument_id.rs index 8f60cb6a16d3..b3c8cd109866 100644 --- a/nautilus_core/model/src/identifiers/instrument_id.rs +++ b/nautilus_core/model/src/identifiers/instrument_id.rs @@ -48,7 +48,8 @@ impl Display for InstrumentId { } impl InstrumentId { - pub fn new(symbol: Symbol, venue: Venue) -> InstrumentId { + #[must_use] + pub fn new(symbol: Symbol, venue: Venue) -> Self { InstrumentId { symbol, venue } } } diff --git a/nautilus_core/model/src/identifiers/order_list_id.rs b/nautilus_core/model/src/identifiers/order_list_id.rs index a6d0927fb59f..79f842697074 100644 --- a/nautilus_core/model/src/identifiers/order_list_id.rs +++ b/nautilus_core/model/src/identifiers/order_list_id.rs @@ -37,7 +37,8 @@ impl Display for OrderListId { } impl OrderListId { - pub fn new(s: &str) -> OrderListId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`OrderListId` value"); OrderListId { diff --git a/nautilus_core/model/src/identifiers/position_id.rs b/nautilus_core/model/src/identifiers/position_id.rs index 7e05d3ce4afa..1cffcd56f459 100644 --- a/nautilus_core/model/src/identifiers/position_id.rs +++ b/nautilus_core/model/src/identifiers/position_id.rs @@ -37,7 +37,8 @@ impl Display for PositionId { } impl PositionId { - pub fn new(s: &str) -> PositionId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`PositionId` value"); PositionId { diff --git a/nautilus_core/model/src/identifiers/strategy_id.rs b/nautilus_core/model/src/identifiers/strategy_id.rs index 9a757c71c3c9..ee7ef9cf1758 100644 --- a/nautilus_core/model/src/identifiers/strategy_id.rs +++ b/nautilus_core/model/src/identifiers/strategy_id.rs @@ -35,7 +35,8 @@ impl Display for StrategyId { } impl StrategyId { - pub fn new(s: &str) -> StrategyId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`StrategyId` value"); if s != "EXTERNAL" { correctness::string_contains(s, "-", "`StrategyId` value"); diff --git a/nautilus_core/model/src/identifiers/symbol.rs b/nautilus_core/model/src/identifiers/symbol.rs index 22c01bdf18a1..5cd76c58c765 100644 --- a/nautilus_core/model/src/identifiers/symbol.rs +++ b/nautilus_core/model/src/identifiers/symbol.rs @@ -37,7 +37,8 @@ impl Display for Symbol { } impl Symbol { - pub fn new(s: &str) -> Symbol { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`Symbol` value"); Symbol { diff --git a/nautilus_core/model/src/identifiers/trade_id.rs b/nautilus_core/model/src/identifiers/trade_id.rs index 37d974f5d288..fb7ac9013687 100644 --- a/nautilus_core/model/src/identifiers/trade_id.rs +++ b/nautilus_core/model/src/identifiers/trade_id.rs @@ -37,7 +37,8 @@ impl Display for TradeId { } impl TradeId { - pub fn new(s: &str) -> TradeId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`TradeId` value"); TradeId { diff --git a/nautilus_core/model/src/identifiers/trader_id.rs b/nautilus_core/model/src/identifiers/trader_id.rs index da6c53ea9850..6b1fc28f4a7a 100644 --- a/nautilus_core/model/src/identifiers/trader_id.rs +++ b/nautilus_core/model/src/identifiers/trader_id.rs @@ -35,7 +35,8 @@ impl Display for TraderId { } impl TraderId { - pub fn new(s: &str) -> TraderId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`TraderId` value"); correctness::string_contains(s, "-", "`TraderId` value"); diff --git a/nautilus_core/model/src/identifiers/venue.rs b/nautilus_core/model/src/identifiers/venue.rs index 2dada22c45a2..edc78ea47c15 100644 --- a/nautilus_core/model/src/identifiers/venue.rs +++ b/nautilus_core/model/src/identifiers/venue.rs @@ -37,7 +37,8 @@ impl Display for Venue { } impl Venue { - pub fn new(s: &str) -> Venue { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`Venue` value"); Venue { diff --git a/nautilus_core/model/src/identifiers/venue_order_id.rs b/nautilus_core/model/src/identifiers/venue_order_id.rs index 10464399340f..bc7b24fa40eb 100644 --- a/nautilus_core/model/src/identifiers/venue_order_id.rs +++ b/nautilus_core/model/src/identifiers/venue_order_id.rs @@ -37,7 +37,8 @@ impl Display for VenueOrderId { } impl VenueOrderId { - pub fn new(s: &str) -> VenueOrderId { + #[must_use] + pub fn new(s: &str) -> Self { correctness::valid_string(s, "`VenueOrderId` value"); VenueOrderId { diff --git a/nautilus_core/model/src/orderbook/book.rs b/nautilus_core/model/src/orderbook/book.rs index 45199f41e031..ee47a4167d75 100644 --- a/nautilus_core/model/src/orderbook/book.rs +++ b/nautilus_core/model/src/orderbook/book.rs @@ -29,6 +29,7 @@ pub struct OrderBook { } impl OrderBook { + #[must_use] pub fn new(instrument_id: InstrumentId, book_level: BookType) -> Self { OrderBook { bids: Ladder::new(OrderSide::Buy), diff --git a/nautilus_core/model/src/orderbook/ladder.rs b/nautilus_core/model/src/orderbook/ladder.rs index 798fee44cff9..c67da9505fdf 100644 --- a/nautilus_core/model/src/orderbook/ladder.rs +++ b/nautilus_core/model/src/orderbook/ladder.rs @@ -29,6 +29,7 @@ pub struct BookPrice { } impl BookPrice { + #[must_use] pub fn new(value: Price, side: OrderSide) -> Self { BookPrice { value, side } } @@ -69,6 +70,7 @@ pub struct Ladder { } impl Ladder { + #[must_use] pub fn new(side: OrderSide) -> Self { Ladder { side, diff --git a/nautilus_core/model/src/orderbook/level.rs b/nautilus_core/model/src/orderbook/level.rs index 0c8c87169873..309f0df7773f 100644 --- a/nautilus_core/model/src/orderbook/level.rs +++ b/nautilus_core/model/src/orderbook/level.rs @@ -27,6 +27,7 @@ pub struct Level { } impl Level { + #[must_use] pub fn new(price: BookPrice) -> Self { Level { price, diff --git a/nautilus_core/model/src/orderbook/order.rs b/nautilus_core/model/src/orderbook/order.rs index faf3461dab1b..75448402983e 100644 --- a/nautilus_core/model/src/orderbook/order.rs +++ b/nautilus_core/model/src/orderbook/order.rs @@ -28,6 +28,7 @@ pub struct BookOrder { } impl BookOrder { + #[must_use] pub fn new(price: Price, size: Quantity, side: OrderSide, order_id: u64) -> Self { BookOrder { price, diff --git a/nautilus_core/model/src/types/currency.rs b/nautilus_core/model/src/types/currency.rs index 93801d80cc2e..2212ea4ba914 100644 --- a/nautilus_core/model/src/types/currency.rs +++ b/nautilus_core/model/src/types/currency.rs @@ -35,6 +35,7 @@ pub struct Currency { } impl Currency { + #[must_use] pub fn new( code: &str, precision: u8, @@ -46,7 +47,7 @@ impl Currency { correctness::valid_string(name, "`Currency` name"); correctness::u8_in_range_inclusive(precision, 0, 9, "`Currency` precision"); - Self { + Currency { code: Box::new(Rc::new(code.to_string())), precision, iso4217, diff --git a/nautilus_core/model/src/types/money.rs b/nautilus_core/model/src/types/money.rs index a2e35ec72ca1..ff3c6de28067 100644 --- a/nautilus_core/model/src/types/money.rs +++ b/nautilus_core/model/src/types/money.rs @@ -34,10 +34,11 @@ pub struct Money { } impl Money { - pub fn new(amount: f64, currency: Currency) -> Money { + #[must_use] + pub fn new(amount: f64, currency: Currency) -> Self { correctness::f64_in_range_inclusive(amount, MONEY_MIN, MONEY_MAX, "`Money` amount"); - Self { + Money { raw: f64_to_fixed_i64(amount, currency.precision), currency, } diff --git a/nautilus_core/model/src/types/price.rs b/nautilus_core/model/src/types/price.rs index 1fe0071c7676..25e94efd800a 100644 --- a/nautilus_core/model/src/types/price.rs +++ b/nautilus_core/model/src/types/price.rs @@ -34,17 +34,18 @@ pub struct Price { } impl Price { + #[must_use] pub fn new(value: f64, precision: u8) -> Self { correctness::f64_in_range_inclusive(value, PRICE_MIN, PRICE_MAX, "`Price` value"); - Self { + Price { raw: f64_to_fixed_i64(value, precision), precision, } } pub fn from_raw(raw: i64, precision: u8) -> Self { - Self { raw, precision } + Price { raw, precision } } pub fn is_zero(&self) -> bool { @@ -219,11 +220,6 @@ pub extern "C" fn price_from_raw(raw: i64, precision: u8) -> Price { Price::from_raw(raw, precision) } -#[no_mangle] -pub extern "C" fn price_free(price: Price) { - drop(price); // Memory freed here -} - #[no_mangle] pub extern "C" fn price_as_f64(price: &Price) -> f64 { price.as_f64() diff --git a/nautilus_core/model/src/types/quantity.rs b/nautilus_core/model/src/types/quantity.rs index 8fb59996f58e..552e163d76a8 100644 --- a/nautilus_core/model/src/types/quantity.rs +++ b/nautilus_core/model/src/types/quantity.rs @@ -34,17 +34,18 @@ pub struct Quantity { } impl Quantity { + #[must_use] pub fn new(value: f64, precision: u8) -> Self { correctness::f64_in_range_inclusive(value, QUANTITY_MIN, QUANTITY_MAX, "`Quantity` value"); - Self { + Quantity { raw: f64_to_fixed_u64(value, precision), precision, } } pub fn from_raw(raw: u64, precision: u8) -> Self { - Self { raw, precision } + Quantity { raw, precision } } pub fn is_zero(&self) -> bool { @@ -205,11 +206,6 @@ pub extern "C" fn quantity_from_raw(raw: u64, precision: u8) -> Quantity { Quantity::from_raw(raw, precision) } -#[no_mangle] -pub extern "C" fn quantity_free(qty: Quantity) { - drop(qty); // Memory freed here -} - #[no_mangle] pub extern "C" fn quantity_as_f64(qty: &Quantity) -> f64 { qty.as_f64() diff --git a/nautilus_core/persistence/src/parquet/reader.rs b/nautilus_core/persistence/src/parquet/reader.rs index 318998d8b685..24d332ebda24 100644 --- a/nautilus_core/persistence/src/parquet/reader.rs +++ b/nautilus_core/persistence/src/parquet/reader.rs @@ -153,6 +153,7 @@ impl ParquetReader where R: Read + Seek, { + #[must_use] pub fn new(mut reader: R, chunk_size: usize, filter_arg: GroupFilterArg) -> Self { let metadata = read::read_metadata(&mut reader).expect("Unable to read metadata"); let schema = read::infer_schema(&metadata).expect("Unable to infer schema"); diff --git a/nautilus_core/persistence/src/parquet/writer.rs b/nautilus_core/persistence/src/parquet/writer.rs index 73f9bfea6240..98f048eb2b68 100644 --- a/nautilus_core/persistence/src/parquet/writer.rs +++ b/nautilus_core/persistence/src/parquet/writer.rs @@ -41,7 +41,8 @@ where A: EncodeToChunk + 'a + Sized, W: Write, { - pub fn new(w: W, schema: Schema) -> ParquetWriter { + #[must_use] + pub fn new(w: W, schema: Schema) -> Self { let options = WriteOptions { write_statistics: true, compression: CompressionOptions::Uncompressed, @@ -59,6 +60,7 @@ where } } + #[must_use] pub fn new_buffer_writer(schema: Schema) -> ParquetWriter> { ParquetWriter::new(Vec::new(), schema) } diff --git a/nautilus_core/rust-toolchain.toml b/nautilus_core/rust-toolchain.toml index 3ac3ddd03d48..31445fb68022 100644 --- a/nautilus_core/rust-toolchain.toml +++ b/nautilus_core/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -version = "1.66.1" +version = "1.67.1" channel = "stable" diff --git a/nautilus_trader/accounting/manager.pyx b/nautilus_trader/accounting/manager.pyx index 3a7814924baf..fbdc17822ab2 100644 --- a/nautilus_trader/accounting/manager.pyx +++ b/nautilus_trader/accounting/manager.pyx @@ -200,9 +200,6 @@ cdef class AccountsManager: assert order.is_open_c() if not order.has_price_c() and not order.has_trigger_price_c(): - self._log.warning( - "Cannot update account without initial trigger price.", - ) continue # Calculate balance locked @@ -299,9 +296,6 @@ cdef class AccountsManager: assert order.is_open_c() if not order.has_price_c() and not order.has_trigger_price_c(): - self._log.warning( - "Cannot update account without initial trigger price.", - ) continue # Calculate initial margin diff --git a/nautilus_trader/adapters/betfair/data.py b/nautilus_trader/adapters/betfair/data.py index 3f57b10d556e..39ed90a6ae7a 100644 --- a/nautilus_trader/adapters/betfair/data.py +++ b/nautilus_trader/adapters/betfair/data.py @@ -25,6 +25,7 @@ from nautilus_trader.adapters.betfair.client.core import BetfairClient from nautilus_trader.adapters.betfair.common import BETFAIR_VENUE from nautilus_trader.adapters.betfair.data_types import BetfairStartingPrice +from nautilus_trader.adapters.betfair.data_types import BSPOrderBookDeltas from nautilus_trader.adapters.betfair.data_types import InstrumentSearch from nautilus_trader.adapters.betfair.data_types import SubscriptionStatus from nautilus_trader.adapters.betfair.parsing.streaming import BetfairParser @@ -235,35 +236,32 @@ async def delayed_subscribe(self, delay=0): await self._stream.send_subscription_message(market_ids=list(self._subscribed_market_ids)) self._log.info(f"Added market_ids {self._subscribed_market_ids} for data.") - def subscribe_trade_ticks(self, instrument_id: InstrumentId): + async def _subscribe_ticker(self, instrument_id: InstrumentId) -> None: pass # Subscribed as part of orderbook - def subscribe_instrument(self, instrument_id: InstrumentId): + async def _subscribe_instrument(self, instrument_id: InstrumentId): for instrument in self._instrument_provider.list_all(): self._handle_data(data=instrument) - def subscribe_instrument_status_updates(self, instrument_id: InstrumentId): + async def _subscribe_instrument_status_updates(self, instrument_id: InstrumentId): pass # Subscribed as part of orderbook - def subscribe_instrument_close(self, instrument_id: InstrumentId): + async def _subscribe_instrument_close(self, instrument_id: InstrumentId): pass # Subscribed as part of orderbook - def unsubscribe_order_book_snapshots(self, instrument_id: InstrumentId): + async def _unsubscribe_order_book_snapshots(self, instrument_id: InstrumentId): # TODO - this could be done by removing the market from self.__subscribed_market_ids and resending the # subscription message - when we have a use case self._log.warning("Betfair does not support unsubscribing from instruments") - def unsubscribe_order_book_deltas(self, instrument_id: InstrumentId): + async def _unsubscribe_order_book_deltas(self, instrument_id: InstrumentId): # TODO - this could be done by removing the market from self.__subscribed_market_ids and resending the # subscription message - when we have a use case self._log.warning("Betfair does not support unsubscribing from instruments") # -- INTERNAL --------------------------------------------------------------------------------- - def _log_betfair_error(self, ex: Exception, method_name: str): - self._log.warning(f"{type(ex).__name__}: {ex} in {method_name}") - def handle_data(self, data: Data): self._handle_data(data=data) @@ -284,10 +282,10 @@ def _on_market_update(self, mcm: MCM): updates = self.parser.parse(mcm=mcm) for data in updates: self._log.debug(f"{data}") - if isinstance(data, BetfairStartingPrice): + if isinstance(data, (BetfairStartingPrice, BSPOrderBookDeltas)): # Not a regular data type generic_data = GenericData( - DataType(BetfairStartingPrice, metadata={"instrument_id": data.instrument_id}), + DataType(data.__class__, metadata={"instrument_id": data.instrument_id}), data, ) self._handle_data(generic_data) diff --git a/nautilus_trader/adapters/betfair/execution.py b/nautilus_trader/adapters/betfair/execution.py index ad5678d11ec3..511327b2758d 100644 --- a/nautilus_trader/adapters/betfair/execution.py +++ b/nautilus_trader/adapters/betfair/execution.py @@ -180,7 +180,7 @@ async def _disconnect(self) -> None: async def watch_stream(self): """Ensure socket stream is connected""" - while self.stream.is_running: + while not self.stream.is_stopping: if not self.stream.is_connected: await self.stream.connect() await asyncio.sleep(1) diff --git a/nautilus_trader/adapters/betfair/factories.py b/nautilus_trader/adapters/betfair/factories.py index 586c4b12bcb4..e34e726270a0 100644 --- a/nautilus_trader/adapters/betfair/factories.py +++ b/nautilus_trader/adapters/betfair/factories.py @@ -26,7 +26,6 @@ from nautilus_trader.adapters.betfair.providers import BetfairInstrumentProvider from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger from nautilus_trader.common.logging import Logger from nautilus_trader.common.logging import LoggerAdapter from nautilus_trader.live.factories import LiveDataClientFactory @@ -153,7 +152,7 @@ def create( # type: ignore msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ): """ Create a new Betfair data client. @@ -172,7 +171,7 @@ def create( # type: ignore The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns @@ -223,7 +222,7 @@ def create( # type: ignore msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ): """ Create a new Betfair execution client. @@ -242,7 +241,7 @@ def create( # type: ignore The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns diff --git a/nautilus_trader/adapters/betfair/parsing/streaming.py b/nautilus_trader/adapters/betfair/parsing/streaming.py index 1356b8cb3e0b..f09898975ae9 100644 --- a/nautilus_trader/adapters/betfair/parsing/streaming.py +++ b/nautilus_trader/adapters/betfair/parsing/streaming.py @@ -46,7 +46,7 @@ from nautilus_trader.adapters.betfair.parsing.constants import STRICT_MARKET_DATA_HANDLING from nautilus_trader.adapters.betfair.parsing.requests import parse_handicap from nautilus_trader.adapters.betfair.util import hash_market_trade -from nautilus_trader.adapters.betfair.util import one +from nautilus_trader.common.functions import one from nautilus_trader.core.datetime import millis_to_nanos from nautilus_trader.execution.reports import TradeReport from nautilus_trader.model.data.tick import TradeTick @@ -340,7 +340,6 @@ def runner_change_all_depth_to_order_book_snapshot( ts_event: int, ts_init: int, ) -> Optional[OrderBookSnapshot]: - # Bids are available to lay (atl) if rc.atl: bids = [ diff --git a/nautilus_trader/adapters/betfair/util.py b/nautilus_trader/adapters/betfair/util.py index e32bd4db7f39..b0cba583488f 100644 --- a/nautilus_trader/adapters/betfair/util.py +++ b/nautilus_trader/adapters/betfair/util.py @@ -16,6 +16,7 @@ from typing import Optional import msgspec +from betfair_parser.spec.streaming import MCM from betfair_parser.spec.streaming import STREAM_DECODER from nautilus_trader.common.providers import InstrumentProvider @@ -70,28 +71,7 @@ def hash_market_trade(timestamp: int, price: float, volume: float): return f"{str(timestamp)[:-6]}{price}{str(volume)}" -def one(iterable): - it = iter(iterable) - - try: - first_value = next(it) - except StopIteration as e: - raise (ValueError("too few items in iterable (expected 1)")) from e - - try: - second_value = next(it) - except StopIteration: - pass - else: - msg = f"Expected exactly one item in iterable, but got {first_value}, {second_value}, and perhaps more." - raise ValueError(msg) - - return first_value - - def historical_instrument_provider_loader(instrument_provider, line): - from betfair_parser.spec.streaming import MCM - from nautilus_trader.adapters.betfair.providers import make_instruments if instrument_provider is None: diff --git a/nautilus_trader/adapters/binance/common/data.py b/nautilus_trader/adapters/binance/common/data.py new file mode 100644 index 000000000000..5dfc72fceecf --- /dev/null +++ b/nautilus_trader/adapters/binance/common/data.py @@ -0,0 +1,659 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import asyncio +from typing import Optional + +import msgspec +import pandas as pd + +from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser +from nautilus_trader.adapters.binance.common.enums import BinanceKlineInterval +from nautilus_trader.adapters.binance.common.schemas.market import BinanceAggregatedTradeMsg +from nautilus_trader.adapters.binance.common.schemas.market import BinanceCandlestickMsg +from nautilus_trader.adapters.binance.common.schemas.market import BinanceDataMsgWrapper +from nautilus_trader.adapters.binance.common.schemas.market import BinanceOrderBookMsg +from nautilus_trader.adapters.binance.common.schemas.market import BinanceQuoteMsg +from nautilus_trader.adapters.binance.common.schemas.market import BinanceTickerMsg +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.common.types import BinanceBar +from nautilus_trader.adapters.binance.common.types import BinanceTicker +from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.market import BinanceMarketHttpAPI +from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient +from nautilus_trader.cache.cache import Cache +from nautilus_trader.common.clock import LiveClock +from nautilus_trader.common.enums import LogColor +from nautilus_trader.common.logging import Logger +from nautilus_trader.common.providers import InstrumentProvider +from nautilus_trader.core.correctness import PyCondition +from nautilus_trader.core.datetime import secs_to_millis +from nautilus_trader.core.uuid import UUID4 +from nautilus_trader.live.data_client import LiveMarketDataClient +from nautilus_trader.model.data.bar import BarType +from nautilus_trader.model.data.base import DataType +from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.tick import TradeTick +from nautilus_trader.model.enums import BookType +from nautilus_trader.model.enums import PriceType +from nautilus_trader.model.identifiers import ClientId +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import Symbol +from nautilus_trader.model.instruments.base import Instrument +from nautilus_trader.model.orderbook.data import OrderBookData +from nautilus_trader.model.orderbook.data import OrderBookDeltas +from nautilus_trader.model.orderbook.data import OrderBookSnapshot +from nautilus_trader.msgbus.bus import MessageBus + + +class BinanceCommonDataClient(LiveMarketDataClient): + """ + Provides a data client of common methods for the `Binance` exchange. + + Parameters + ---------- + loop : asyncio.AbstractEventLoop + The event loop for the client. + client : BinanceHttpClient + The binance HTTP client. + market : BinanceMarketHttpAPI + The binance Market HTTP API. + enum_parser : BinanceEnumParser + The parser for Binance enums. + msgbus : MessageBus + The message bus for the client. + cache : Cache + The cache for the client. + clock : LiveClock + The clock for the client. + logger : Logger + The logger for the client. + instrument_provider : InstrumentProvider + The instrument provider. + account_type : BinanceAccountType + The account type for the client. + base_url_ws : str, optional + The base URL for the WebSocket client. + use_agg_trade_ticks : bool, default False + Whether to use aggregated trade tick endpoints instead of raw trade ticks. + TradeId of ticks will be the Aggregate tradeId returned by Binance. + + Warnings + -------- + This class should not be used directly, but through a concrete subclass. + """ + + def __init__( + self, + loop: asyncio.AbstractEventLoop, + client: BinanceHttpClient, + market: BinanceMarketHttpAPI, + enum_parser: BinanceEnumParser, + msgbus: MessageBus, + cache: Cache, + clock: LiveClock, + logger: Logger, + instrument_provider: InstrumentProvider, + account_type: BinanceAccountType, + base_url_ws: Optional[str] = None, + use_agg_trade_ticks: bool = False, + ): + super().__init__( + loop=loop, + client_id=ClientId(BINANCE_VENUE.value), + venue=BINANCE_VENUE, + instrument_provider=instrument_provider, + msgbus=msgbus, + cache=cache, + clock=clock, + logger=logger, + ) + + self._binance_account_type = account_type + self._use_agg_trade_ticks = use_agg_trade_ticks + self._log.info(f"Account type: {self._binance_account_type.value}.", LogColor.BLUE) + + self._update_instrument_interval: int = 60 * 60 # Once per hour (hardcode) + self._update_instruments_task: Optional[asyncio.Task] = None + + self._connect_websockets_interval: int = 4 # Retry websocket connection every 4 seconds + self._connect_websockets_task: Optional[asyncio.Task] = None + + # HTTP API + self._http_client = client + self._http_market = market + + # Enum parser + self._enum_parser = enum_parser + + # WebSocket API + self._ws_client = BinanceWebSocketClient( + loop=loop, + clock=clock, + logger=logger, + handler=self._handle_ws_message, + base_url=base_url_ws, + ) + + # Hot caches + self._instrument_ids: dict[str, InstrumentId] = {} + self._book_buffer: dict[InstrumentId, list[OrderBookData]] = {} + + self._log.info(f"Base URL HTTP {self._http_client.base_url}.", LogColor.BLUE) + self._log.info(f"Base URL WebSocket {base_url_ws}.", LogColor.BLUE) + + # Register common WebSocket message handlers + self._ws_handlers = { + "@bookTicker": self._handle_book_ticker, + "@ticker": self._handle_ticker, + "@kline": self._handle_kline, + "@trade": self._handle_trade, + "@aggTrade": self._handle_agg_trade, + "@depth@": self._handle_book_diff_update, + "@depth5": self._handle_book_partial_update, + "@depth10": self._handle_book_partial_update, + "@depth20": self._handle_book_partial_update, + } + + # WebSocket msgspec decoders + self._decoder_data_msg_wrapper = msgspec.json.Decoder(BinanceDataMsgWrapper) + self._decoder_order_book_msg = msgspec.json.Decoder(BinanceOrderBookMsg) + self._decoder_quote_msg = msgspec.json.Decoder(BinanceQuoteMsg) + self._decoder_ticker_msg = msgspec.json.Decoder(BinanceTickerMsg) + self._decoder_candlestick_msg = msgspec.json.Decoder(BinanceCandlestickMsg) + self._decoder_agg_trade_msg = msgspec.json.Decoder(BinanceAggregatedTradeMsg) + + async def _connect(self) -> None: + # Connect HTTP client + self._log.info("Connecting client...") + if not self._http_client.connected: + await self._http_client.connect() + + self._log.info("Initialising instruments...") + await self._instrument_provider.initialize() + + self._send_all_instruments_to_data_engine() + self._update_instruments_task = self.create_task(self._update_instruments()) + + # Connect WebSocket clients + self._connect_websockets_task = self.create_task(self._connect_websockets()) + + async def _connect_websockets(self) -> None: + try: + while not self._ws_client.is_connected: + self._log.debug( + f"Scheduled `connect_websockets` to run in " + f"{self._connect_websockets_interval}s.", + ) + await asyncio.sleep(self._connect_websockets_interval) + if self._ws_client.has_subscriptions: + await self._ws_client.connect() + else: + self._log.info("Awaiting subscriptions...") + except asyncio.CancelledError: + self._log.debug("`connect_websockets` task was canceled.") + + async def _update_instruments(self) -> None: + try: + while True: + self._log.debug( + f"Scheduled `update_instruments` to run in " + f"{self._update_instrument_interval}s.", + ) + await asyncio.sleep(self._update_instrument_interval) + await self._instrument_provider.load_all_async() + self._send_all_instruments_to_data_engine() + except asyncio.CancelledError: + self._log.debug("`update_instruments` task was canceled.") + + async def _disconnect(self) -> None: + # Cancel update instruments task + if self._update_instruments_task: + self._log.debug("Canceling `update_instruments` task...") + self._update_instruments_task.cancel() + self._update_instruments_task.done() + + # Cancel WebSocket connect task + if self._connect_websockets_task: + self._log.debug("Canceling `connect_websockets` task...") + self._connect_websockets_task.cancel() + self._connect_websockets_task.done() + # Disconnect WebSocket client + if self._ws_client.is_connected: + await self._ws_client.disconnect() + + # Disconnect HTTP client + if self._http_client.connected: + await self._http_client.disconnect() + + # -- SUBSCRIPTIONS ---------------------------------------------------------------------------- + + async def _subscribe(self, data_type: DataType) -> None: + # Replace method in child class, for exchange specific data types. + raise NotImplementedError("Cannot subscribe to {data_type.type} (not implemented).") + + async def _subscribe_instruments(self) -> None: + pass # Do nothing further + + async def _subscribe_instrument(self, instrument_id: InstrumentId) -> None: + pass # Do nothing further + + async def _subscribe_order_book_deltas( + self, + instrument_id: InstrumentId, + book_type: BookType, + depth: Optional[int] = None, + kwargs: Optional[dict] = None, + ) -> None: + update_speed = None + if kwargs is not None: + update_speed = kwargs.get("update_speed") + await self._subscribe_order_book( + instrument_id=instrument_id, + book_type=book_type, + update_speed=update_speed, + depth=depth, + ) + + async def _subscribe_order_book_snapshots( + self, + instrument_id: InstrumentId, + book_type: BookType, + depth: Optional[int] = None, + kwargs: Optional[dict] = None, + ) -> None: + update_speed = None + if kwargs is not None: + update_speed = kwargs.get("update_speed") + await self._subscribe_order_book( + instrument_id=instrument_id, + book_type=book_type, + update_speed=update_speed, + depth=depth, + ) + + async def _subscribe_order_book( # noqa (too complex) + self, + instrument_id: InstrumentId, + book_type: BookType, + update_speed: Optional[int] = None, + depth: Optional[int] = None, + ) -> None: + if book_type == BookType.L3_MBO: + self._log.error( + "Cannot subscribe to order book deltas: " + "L3_MBO data is not published by Binance. " + "Valid book types are L1_TBBO, L2_MBP.", + ) + return + + valid_speeds = [100, 1000] + if self._binance_account_type.is_futures: + if update_speed is None: + update_speed = 0 # default 0 ms for futures. + valid_speeds = [0, 100, 250, 500] # 0ms option for futures exists but not documented? + elif update_speed is None: + update_speed = 100 # default 100ms for spot + if update_speed not in valid_speeds: + self._log.error( + "Cannot subscribe to order book:" + f"invalid `update_speed`, was {update_speed}. " + f"Valid update speeds are {valid_speeds} ms.", + ) + return + + if depth is None: + depth = 0 + + # Add delta stream buffer + self._book_buffer[instrument_id] = [] + + if 0 < depth <= 20: + if depth not in (5, 10, 20): + self._log.error( + "Cannot subscribe to order book snapshots: " + f"invalid `depth`, was {depth}. " + "Valid depths are 5, 10 or 20.", + ) + return + self._ws_client.subscribe_partial_book_depth( + symbol=instrument_id.symbol.value, + depth=depth, + speed=update_speed, + ) + + while not self._ws_client.is_connected: + await asyncio.sleep(self._connect_websockets_interval) + + snapshot: OrderBookSnapshot = await self._http_market.request_order_book_snapshot( + instrument_id=instrument_id, + limit=depth, + ts_init=self._clock.timestamp_ns(), + ) + self._handle_data(snapshot) + else: + self._ws_client.subscribe_diff_book_depth( + symbol=instrument_id.symbol.value, + speed=update_speed, + ) + + book_buffer = self._book_buffer.pop(instrument_id, []) + for deltas in book_buffer: + if deltas.sequence <= snapshot.sequence: + continue + self._handle_data(deltas) + + async def _subscribe_ticker(self, instrument_id: InstrumentId) -> None: + self._ws_client.subscribe_ticker(instrument_id.symbol.value) + + async def _subscribe_quote_ticks(self, instrument_id: InstrumentId) -> None: + self._ws_client.subscribe_book_ticker(instrument_id.symbol.value) + + async def _subscribe_trade_ticks(self, instrument_id: InstrumentId) -> None: + if self._use_agg_trade_ticks: + self._ws_client.subscribe_agg_trades(instrument_id.symbol.value) + else: + self._ws_client.subscribe_trades(instrument_id.symbol.value) + + async def _subscribe_bars(self, bar_type: BarType) -> None: + PyCondition.true(bar_type.is_externally_aggregated(), "aggregation_source is not EXTERNAL") + + if not bar_type.spec.is_time_aggregated(): + self._log.error( + f"Cannot subscribe to {bar_type}: only time bars are aggregated by Binance.", + ) + return + + resolution = self._enum_parser.parse_internal_bar_agg(bar_type.spec.aggregation) + if self._binance_account_type.is_futures and resolution == "s": + self._log.error( + f"Cannot subscribe to {bar_type}. ", + "Second interval bars are not aggregated by Binance Futures.", + ) + try: + interval = BinanceKlineInterval(f"{bar_type.spec.step}{resolution}") + except ValueError: + self._log.error( + f"Bar interval {bar_type.spec.step}{resolution} not supported by Binance.", + ) + + self._ws_client.subscribe_bars( + symbol=bar_type.instrument_id.symbol.value, + interval=interval.value, + ) + self._add_subscription_bars(bar_type) + + async def _unsubscribe(self, data_type: DataType): + # Replace method in child class, for exchange specific data types. + raise NotImplementedError(f"Cannot unsubscribe from {data_type.type} (not implemented).") + + async def _unsubscribe_instruments(self) -> None: + pass # Do nothing further + + async def _unsubscribe_instrument(self, instrument_id: InstrumentId) -> None: + pass # Do nothing further + + async def _unsubscribe_order_book_deltas(self, instrument_id: InstrumentId) -> None: + pass # TODO: Unsubscribe from Binance if no other subscriptions + + async def _unsubscribe_order_book_snapshots(self, instrument_id: InstrumentId) -> None: + pass # TODO: Unsubscribe from Binance if no other subscriptions + + async def _unsubscribe_ticker(self, instrument_id: InstrumentId) -> None: + pass # TODO: Unsubscribe from Binance if no other subscriptions + + async def _unsubscribe_quote_ticks(self, instrument_id: InstrumentId) -> None: + pass # TODO: Unsubscribe from Binance if no other subscriptions + + async def _unsubscribe_trade_ticks(self, instrument_id: InstrumentId) -> None: + pass # TODO: Unsubscribe from Binance if no other subscriptions + + async def _unsubscribe_bars(self, bar_type: BarType) -> None: + pass # TODO: Unsubscribe from Binance if no other subscriptions + + # -- REQUESTS --------------------------------------------------------------------------------- + + async def _request_instrument(self, instrument_id: InstrumentId, correlation_id: UUID4) -> None: + instrument: Optional[Instrument] = self._instrument_provider.find(instrument_id) + if instrument is None: + self._log.error(f"Cannot find instrument for {instrument_id}.") + return + + data_type = DataType( + type=Instrument, + metadata={"instrument_id": instrument_id}, + ) + + self._handle_data_response( + data_type=data_type, + data=[instrument], # Data engine handles lists of instruments + correlation_id=correlation_id, + ) + + async def _request_quote_ticks( + self, + instrument_id: InstrumentId, # noqa + limit: int, # noqa + correlation_id: UUID4, # noqa + from_datetime: Optional[pd.Timestamp] = None, # noqa + to_datetime: Optional[pd.Timestamp] = None, # noqa + ) -> None: + self._log.error( + "Cannot request historical quote ticks: not published by Binance.", + ) + + async def _request_trade_ticks( + self, + instrument_id: InstrumentId, + limit: int, + correlation_id: UUID4, + from_datetime: Optional[pd.Timestamp] = None, + to_datetime: Optional[pd.Timestamp] = None, + ) -> None: + if limit == 0 or limit > 1000: + limit = 1000 + + if not self._use_agg_trade_ticks: + if from_datetime is not None or to_datetime is not None: + self._log.warning( + "Trade ticks have been requested with a from/to time range, " + f"however the request will be for the most recent {limit}. " + "Consider using aggregated trade ticks (`use_agg_trade_ticks`).", + ) + ticks = await self._http_market.request_trade_ticks( + instrument_id=instrument_id, + limit=limit, + ts_init=self._clock.timestamp_ns(), + ) + else: + # Convert from timestamps to milliseconds + start_time_ms = None + end_time_ms = None + if from_datetime: + start_time_ms = str(int(from_datetime.timestamp() * 1000)) + if to_datetime: + end_time_ms = str(int(to_datetime.timestamp() * 1000)) + ticks = await self._http_market.request_agg_trade_ticks( + instrument_id=instrument_id, + limit=limit, + start_time=start_time_ms, + end_time=end_time_ms, + ts_init=self._clock.timestamp_ns(), + ) + + self._handle_trade_ticks(instrument_id, ticks, correlation_id) + + async def _request_bars( # noqa (too complex) + self, + bar_type: BarType, + limit: int, + correlation_id: UUID4, + from_datetime: Optional[pd.Timestamp] = None, + to_datetime: Optional[pd.Timestamp] = None, + ) -> None: + if limit == 0 or limit > 1000: + limit = 1000 + + if bar_type.is_internally_aggregated(): + self._log.error( + f"Cannot request {bar_type}: " + f"only historical bars with EXTERNAL aggregation available from Binance.", + ) + return + + if not bar_type.spec.is_time_aggregated(): + self._log.error( + f"Cannot request {bar_type}: only time bars are aggregated by Binance.", + ) + return + + resolution = self._enum_parser.parse_internal_bar_agg(bar_type.spec.aggregation) + if not self._binance_account_type.is_spot_or_margin and resolution == "s": + self._log.error( + f"Cannot request {bar_type}: ", + "second interval bars are not aggregated by Binance Futures.", + ) + try: + interval = BinanceKlineInterval(f"{bar_type.spec.step}{resolution}") + except ValueError: + self._log.error( + f"Cannot create Binance Kline interval. {bar_type.spec.step}{resolution} " + "not supported.", + ) + + if bar_type.spec.price_type != PriceType.LAST: + self._log.error( + f"Cannot request {bar_type}: " + f"only historical bars for LAST price type available from Binance.", + ) + return + + start_time_ms = None + if from_datetime is not None: + start_time_ms = secs_to_millis(from_datetime.timestamp()) + + end_time_ms = None + if to_datetime is not None: + end_time_ms = secs_to_millis(to_datetime.timestamp()) + + bars = await self._http_market.request_binance_bars( + bar_type=bar_type, + interval=interval, + start_time=start_time_ms, + end_time=end_time_ms, + limit=limit, + ts_init=self._clock.timestamp_ns(), + ) + + partial: BinanceBar = bars.pop() + self._handle_bars(bar_type, bars, partial, correlation_id) + + def _send_all_instruments_to_data_engine(self) -> None: + for instrument in self._instrument_provider.get_all().values(): + self._handle_data(instrument) + + for currency in self._instrument_provider.currencies().values(): + self._cache.add_currency(currency) + + def _get_cached_instrument_id(self, symbol: str) -> InstrumentId: + # Parse instrument ID + nautilus_symbol: str = BinanceSymbol(symbol).parse_binance_to_internal( + self._binance_account_type, + ) + instrument_id: Optional[InstrumentId] = self._instrument_ids.get(nautilus_symbol) + if not instrument_id: + instrument_id = InstrumentId(Symbol(nautilus_symbol), BINANCE_VENUE) + self._instrument_ids[nautilus_symbol] = instrument_id + return instrument_id + + # -- WEBSOCKET HANDLERS --------------------------------------------------------------------------------- + + def _handle_ws_message(self, raw: bytes) -> None: + # TODO(cs): Uncomment for development + # self._log.info(str(raw), LogColor.CYAN) + + wrapper = self._decoder_data_msg_wrapper.decode(raw) + try: + handled = False + for handler in self._ws_handlers: + if handler in wrapper.stream: + self._ws_handlers[handler](raw) + handled = True + if not handled: + self._log.error( + f"Unrecognized websocket message type: {wrapper.stream}", + ) + except Exception as e: + self._log.error(f"Error handling websocket message, {e}") + + def _handle_book_diff_update(self, raw: bytes) -> None: + msg = self._decoder_order_book_msg.decode(raw) + instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) + book_deltas: OrderBookDeltas = msg.data.parse_to_order_book_deltas( + instrument_id=instrument_id, + ts_init=self._clock.timestamp_ns(), + ) + book_buffer: Optional[list[OrderBookData]] = self._book_buffer.get(instrument_id) + if book_buffer is not None: + book_buffer.append(book_deltas) + else: + self._handle_data(book_deltas) + + def _handle_book_ticker(self, raw: bytes) -> None: + msg = self._decoder_quote_msg.decode(raw) + instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) + quote_tick: QuoteTick = msg.data.parse_to_quote_tick( + instrument_id=instrument_id, + ts_init=self._clock.timestamp_ns(), + ) + self._handle_data(quote_tick) + + def _handle_ticker(self, raw: bytes) -> None: + msg = self._decoder_ticker_msg.decode(raw) + instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) + ticker: BinanceTicker = msg.data.parse_to_binance_ticker( + instrument_id=instrument_id, + ts_init=self._clock.timestamp_ns(), + ) + self._handle_data(ticker) + + def _handle_kline(self, raw: bytes) -> None: + msg = self._decoder_candlestick_msg.decode(raw) + if not msg.data.k.x: + return # Not closed yet + instrument_id = self._get_cached_instrument_id(msg.data.s) + bar: BinanceBar = msg.data.k.parse_to_binance_bar( + instrument_id=instrument_id, + enum_parser=self._enum_parser, + ts_init=self._clock.timestamp_ns(), + ) + self._handle_data(bar) + + def _handle_book_partial_update(self, raw: bytes) -> None: + raise NotImplementedError("Please implement book partial update handling in child class.") + + def _handle_trade(self, raw: bytes) -> None: + raise NotImplementedError("Please implement trade handling in child class.") + + def _handle_agg_trade(self, raw: bytes) -> None: + msg = self._decoder_agg_trade_msg.decode(raw) + instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) + trade_tick: TradeTick = msg.data.parse_to_trade_tick( + instrument_id=instrument_id, + ts_init=self._clock.timestamp_ns(), + ) + self._handle_data(trade_tick) diff --git a/nautilus_trader/adapters/binance/common/enums.py b/nautilus_trader/adapters/binance/common/enums.py index e386b1150a4c..0440e6268f01 100644 --- a/nautilus_trader/adapters/binance/common/enums.py +++ b/nautilus_trader/adapters/binance/common/enums.py @@ -16,6 +16,17 @@ from enum import Enum from enum import unique +from nautilus_trader.model.data.bar import BarSpecification +from nautilus_trader.model.enums import BarAggregation +from nautilus_trader.model.enums import OrderSide +from nautilus_trader.model.enums import OrderStatus +from nautilus_trader.model.enums import OrderType +from nautilus_trader.model.enums import PriceType +from nautilus_trader.model.enums import TimeInForce +from nautilus_trader.model.enums import TriggerType +from nautilus_trader.model.enums import bar_aggregation_to_str +from nautilus_trader.model.orders.base import Order + """ Defines `Binance` common enums. @@ -46,6 +57,28 @@ class BinanceRateLimitInterval(Enum): DAY = "DAY" +@unique +class BinanceKlineInterval(Enum): + """Represents a `Binance` kline chart interval.""" + + SECOND_1 = "1s" + MINUTE_1 = "1m" + MINUTE_3 = "3m" + MINUTE_5 = "5m" + MINUTE_15 = "15m" + MINUTE_30 = "30m" + HOUR_1 = "1h" + HOUR_2 = "2h" + HOUR_4 = "4h" + HOUR_6 = "6h" + HOUR_8 = "8h" + HOUR_12 = "12h" + DAY_1 = "1d" + DAY_3 = "3d" + WEEK_1 = "1w" + MONTH_1 = "1M" + + @unique class BinanceExchangeFilterType(Enum): """Represents a `Binance` exchange filter type.""" @@ -77,7 +110,8 @@ class BinanceAccountType(Enum): """Represents a `Binance` account type.""" SPOT = "SPOT" - MARGIN = "MARGIN" + MARGIN_CROSS = "MARGIN_CROSS" + MARGIN_ISOLATED = "MARGIN_ISOLATED" FUTURES_USDT = "FUTURES_USDT" FUTURES_COIN = "FUTURES_COIN" @@ -87,11 +121,25 @@ def is_spot(self): @property def is_margin(self): - return self == BinanceAccountType.MARGIN + return self in ( + BinanceAccountType.MARGIN_CROSS, + BinanceAccountType.MARGIN_ISOLATED, + ) + + @property + def is_spot_or_margin(self): + return self in ( + BinanceAccountType.SPOT, + BinanceAccountType.MARGIN_CROSS, + BinanceAccountType.MARGIN_ISOLATED, + ) @property def is_futures(self) -> bool: - return self in (BinanceAccountType.FUTURES_USDT, BinanceAccountType.FUTURES_COIN) + return self in ( + BinanceAccountType.FUTURES_USDT, + BinanceAccountType.FUTURES_COIN, + ) @unique @@ -127,3 +175,197 @@ class BinanceOrderStatus(Enum): EXPIRED = "EXPIRED" NEW_INSURANCE = "NEW_INSURANCE" # Liquidation with Insurance Fund NEW_ADL = "NEW_ADL" # Counterparty Liquidation + + +@unique +class BinanceTimeInForce(Enum): + """Represents a `Binance` order time in force.""" + + GTC = "GTC" + IOC = "IOC" + FOK = "FOK" + GTX = "GTX" # FUTURES only, Good Till Crossing (Post Only) + + +@unique +class BinanceOrderType(Enum): + """Represents a `Binance` order type.""" + + LIMIT = "LIMIT" + MARKET = "MARKET" + STOP = "STOP" # FUTURES only + STOP_LOSS = "STOP_LOSS" # SPOT/MARGIN only + STOP_LOSS_LIMIT = "STOP_LOSS_LIMIT" # SPOT/MARGIN only + TAKE_PROFIT = "TAKE_PROFIT" + TAKE_PROFIT_LIMIT = "TAKE_PROFIT_LIMIT" # SPOT/MARGIN only + LIMIT_MAKER = "LIMIT_MAKER" # SPOT/MARGIN only + STOP_MARKET = "STOP_MARKET" # FUTURES only + TAKE_PROFIT_MARKET = "TAKE_PROFIT_MARKET" # FUTURES only + TRAILING_STOP_MARKET = "TRAILING_STOP_MARKET" # FUTURES only + + +@unique +class BinanceSecurityType(Enum): + """Represents a `Binance` endpoint security type.""" + + NONE = "NONE" + TRADE = "TRADE" + MARGIN = "MARGIN" # SPOT/MARGIN only + USER_DATA = "USER_DATA" + USER_STREAM = "USER_STREAM" + MARKET_DATA = "MARKET_DATA" + + +@unique +class BinanceMethodType(Enum): + """Represents a `Binance` endpoint method type.""" + + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + + +@unique +class BinanceNewOrderRespType(Enum): + """ + Represents a `Binance` newOrderRespType. + """ + + ACK = "ACK" + RESULT = "RESULT" + FULL = "FULL" + + +class BinanceEnumParser: + """ + Provides common parsing methods for enums used by the 'Binance' exchange. + + Warnings + -------- + This class should not be used directly, but through a concrete subclass. + """ + + def __init__(self) -> None: + # Construct dictionary hashmaps + self.ext_to_int_status = { + BinanceOrderStatus.NEW: OrderStatus.ACCEPTED, + BinanceOrderStatus.CANCELED: OrderStatus.CANCELED, + BinanceOrderStatus.PARTIALLY_FILLED: OrderStatus.PARTIALLY_FILLED, + BinanceOrderStatus.FILLED: OrderStatus.FILLED, + BinanceOrderStatus.NEW_ADL: OrderStatus.FILLED, + BinanceOrderStatus.NEW_INSURANCE: OrderStatus.FILLED, + BinanceOrderStatus.EXPIRED: OrderStatus.EXPIRED, + } + + self.ext_to_int_order_side = { + BinanceOrderSide.BUY: OrderSide.BUY, + BinanceOrderSide.SELL: OrderSide.SELL, + } + self.int_to_ext_order_side = {b: a for a, b in self.ext_to_int_order_side.items()} + + self.ext_to_int_bar_agg = { + "s": BarAggregation.SECOND, + "m": BarAggregation.MINUTE, + "h": BarAggregation.HOUR, + "d": BarAggregation.DAY, + "w": BarAggregation.WEEK, + "M": BarAggregation.MONTH, + } + self.int_to_ext_bar_agg = {b: a for a, b in self.ext_to_int_bar_agg.items()} + + self.ext_to_int_time_in_force = { + BinanceTimeInForce.FOK: TimeInForce.FOK, + BinanceTimeInForce.GTC: TimeInForce.GTC, + BinanceTimeInForce.GTX: TimeInForce.GTC, # Convert GTX to GTC + BinanceTimeInForce.IOC: TimeInForce.IOC, + } + self.int_to_ext_time_in_force = { + TimeInForce.GTC: BinanceTimeInForce.GTC, + TimeInForce.GTD: BinanceTimeInForce.GTC, # Convert GTD to GTC + TimeInForce.FOK: BinanceTimeInForce.FOK, + TimeInForce.IOC: BinanceTimeInForce.IOC, + } + + def parse_binance_order_side(self, order_side: BinanceOrderSide) -> OrderSide: + try: + return self.ext_to_int_order_side[order_side] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized Binance order side, was {order_side}", # pragma: no cover + ) + + def parse_internal_order_side(self, order_side: OrderSide) -> BinanceOrderSide: + try: + return self.int_to_ext_order_side[order_side] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized Nautilus order side, was {order_side}", # pragma: no cover + ) + + def parse_binance_time_in_force(self, time_in_force: BinanceTimeInForce) -> TimeInForce: + try: + return self.ext_to_int_time_in_force[time_in_force] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized Binance time in force, was {time_in_force}", # pragma: no cover + ) + + def parse_internal_time_in_force(self, time_in_force: TimeInForce) -> BinanceTimeInForce: + try: + return self.int_to_ext_time_in_force[time_in_force] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized Nautilus time in force, was {time_in_force}", # pragma: no cover + ) + + def parse_binance_order_status(self, order_status: BinanceOrderStatus) -> OrderStatus: + try: + return self.ext_to_int_status[order_status] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized binance order status, was {order_status}", # pragma: no cover + ) + + def parse_binance_order_type(self, order_type: BinanceOrderType) -> OrderType: + # Implement in child class + raise NotImplementedError + + def parse_internal_order_type(self, order: Order) -> BinanceOrderType: + # Implement in child class + raise NotImplementedError + + def parse_binance_bar_agg(self, bar_agg: str) -> BarAggregation: + try: + return self.ext_to_int_bar_agg[bar_agg] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized Binance kline resolution, was {bar_agg}", + ) + + def parse_internal_bar_agg(self, bar_agg: BarAggregation) -> str: + try: + return self.int_to_ext_bar_agg[bar_agg] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + "unrecognized or non-supported Nautilus BarAggregation,", + f"was {bar_aggregation_to_str(bar_agg)}", # pragma: no cover + ) + + def parse_binance_kline_interval_to_bar_spec( + self, + kline_interval: BinanceKlineInterval, + ) -> BarSpecification: + step = kline_interval.value[:-1] + binance_bar_agg = kline_interval.value[-1] + return BarSpecification( + step=int(step), + aggregation=self.parse_binance_bar_agg(binance_bar_agg), + price_type=PriceType.LAST, + ) + + def parse_binance_trigger_type(self, trigger_type: str) -> TriggerType: + # Replace method in child class, if compatible + raise NotImplementedError( # pragma: no cover (design-time error) + "Cannot parse binance trigger type (not implemented).", # pragma: no cover + ) diff --git a/nautilus_trader/adapters/binance/common/execution.py b/nautilus_trader/adapters/binance/common/execution.py new file mode 100644 index 000000000000..93618ee665c4 --- /dev/null +++ b/nautilus_trader/adapters/binance/common/execution.py @@ -0,0 +1,790 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import asyncio +from typing import Optional + +import pandas as pd + +from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser +from nautilus_trader.adapters.binance.common.enums import BinanceTimeInForce +from nautilus_trader.adapters.binance.common.schemas.account import BinanceOrder +from nautilus_trader.adapters.binance.common.schemas.account import BinanceUserTrade +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.common.schemas.user import BinanceListenKey +from nautilus_trader.adapters.binance.http.account import BinanceAccountHttpAPI +from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.error import BinanceError +from nautilus_trader.adapters.binance.http.market import BinanceMarketHttpAPI +from nautilus_trader.adapters.binance.http.user import BinanceUserDataHttpAPI +from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient +from nautilus_trader.cache.cache import Cache +from nautilus_trader.common.clock import LiveClock +from nautilus_trader.common.enums import LogColor +from nautilus_trader.common.logging import Logger +from nautilus_trader.common.providers import InstrumentProvider +from nautilus_trader.core.correctness import PyCondition +from nautilus_trader.core.datetime import secs_to_millis +from nautilus_trader.core.uuid import UUID4 +from nautilus_trader.execution.messages import CancelAllOrders +from nautilus_trader.execution.messages import CancelOrder +from nautilus_trader.execution.messages import ModifyOrder +from nautilus_trader.execution.messages import SubmitOrder +from nautilus_trader.execution.messages import SubmitOrderList +from nautilus_trader.execution.reports import OrderStatusReport +from nautilus_trader.execution.reports import PositionStatusReport +from nautilus_trader.execution.reports import TradeReport +from nautilus_trader.live.execution_client import LiveExecutionClient +from nautilus_trader.model.enums import AccountType +from nautilus_trader.model.enums import OmsType +from nautilus_trader.model.enums import OrderSide +from nautilus_trader.model.enums import OrderType +from nautilus_trader.model.enums import TimeInForce +from nautilus_trader.model.enums import TrailingOffsetType +from nautilus_trader.model.enums import TriggerType +from nautilus_trader.model.enums import trailing_offset_type_to_str +from nautilus_trader.model.enums import trigger_type_to_str +from nautilus_trader.model.identifiers import AccountId +from nautilus_trader.model.identifiers import ClientId +from nautilus_trader.model.identifiers import ClientOrderId +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import Symbol +from nautilus_trader.model.identifiers import VenueOrderId +from nautilus_trader.model.objects import Price +from nautilus_trader.model.orders.base import Order +from nautilus_trader.model.orders.limit import LimitOrder +from nautilus_trader.model.orders.market import MarketOrder +from nautilus_trader.model.orders.stop_limit import StopLimitOrder +from nautilus_trader.model.orders.stop_market import StopMarketOrder +from nautilus_trader.model.orders.trailing_stop_market import TrailingStopMarketOrder +from nautilus_trader.model.position import Position +from nautilus_trader.msgbus.bus import MessageBus + + +class BinanceCommonExecutionClient(LiveExecutionClient): + """ + Execution client providing common functionality for the `Binance` exchanges. + + Parameters + ---------- + loop : asyncio.AbstractEventLoop + The event loop for the client. + client : BinanceHttpClient + The binance HTTP client. + account : BinanceAccountHttpAPI + The binance Account HTTP API. + market : BinanceMarketHttpAPI + The binance Market HTTP API. + user : BinanceUserHttpAPI + The binance User HTTP API. + enum_parser : BinanceEnumParser + The parser for Binance enums. + msgbus : MessageBus + The message bus for the client. + cache : Cache + The cache for the client. + clock : LiveClock + The clock for the client. + logger : Logger + The logger for the client. + instrument_provider : BinanceSpotInstrumentProvider + The instrument provider. + account_type : BinanceAccountType + The account type for the client. + base_url_ws : str, optional + The base URL for the WebSocket client. + clock_sync_interval_secs : int, default 0 + The interval (seconds) between syncing the Nautilus clock with the Binance server(s) clock. + If zero, then will *not* perform syncing. + warn_gtd_to_gtc : bool, default True + If log warning for GTD time in force transformed to GTC. + + Warnings + -------- + This class should not be used directly, but through a concrete subclass. + """ + + def __init__( + self, + loop: asyncio.AbstractEventLoop, + client: BinanceHttpClient, + account: BinanceAccountHttpAPI, + market: BinanceMarketHttpAPI, + user: BinanceUserDataHttpAPI, + enum_parser: BinanceEnumParser, + msgbus: MessageBus, + cache: Cache, + clock: LiveClock, + logger: Logger, + instrument_provider: InstrumentProvider, + account_type: BinanceAccountType, + base_url_ws: Optional[str] = None, + clock_sync_interval_secs: int = 0, + warn_gtd_to_gtc: bool = True, + ): + super().__init__( + loop=loop, + client_id=ClientId(BINANCE_VENUE.value), + venue=BINANCE_VENUE, + oms_type=OmsType.HEDGING if account_type.is_futures else OmsType.NETTING, + instrument_provider=instrument_provider, + account_type=AccountType.CASH if account_type.is_spot else AccountType.MARGIN, + base_currency=None, + msgbus=msgbus, + cache=cache, + clock=clock, + logger=logger, + ) + + self._binance_account_type = account_type + self._warn_gtd_to_gtc = warn_gtd_to_gtc + self._log.info(f"Account type: {self._binance_account_type.value}.", LogColor.BLUE) + + self._set_account_id(AccountId(f"{BINANCE_VENUE.value}-spot-master")) + + # Clock sync + self._clock_sync_interval_secs = clock_sync_interval_secs + + # Tasks + self._task_clock_sync: Optional[asyncio.Task] = None + + # Enum parser + self._enum_parser = enum_parser + + # Http API + self._http_client = client + self._http_account = account + self._http_market = market + self._http_user = user + + # Listen keys + self._ping_listen_keys_interval: int = 60 * 5 # Once every 5 mins (hardcode) + self._ping_listen_keys_task: Optional[asyncio.Task] = None + self._listen_key: Optional[str] = None + + # WebSocket API + self._ws_client = BinanceWebSocketClient( + loop=loop, + clock=clock, + logger=logger, + handler=self._handle_user_ws_message, + base_url=base_url_ws, + ) + + # Hot caches + self._instrument_ids: dict[str, InstrumentId] = {} + + # Order submission method hashmap + self._submit_order_method = { + OrderType.MARKET: self._submit_market_order, + OrderType.LIMIT: self._submit_limit_order, + OrderType.STOP_LIMIT: self._submit_stop_limit_order, + OrderType.LIMIT_IF_TOUCHED: self._submit_stop_limit_order, + OrderType.STOP_MARKET: self._submit_stop_market_order, + OrderType.MARKET_IF_TOUCHED: self._submit_stop_market_order, + OrderType.TRAILING_STOP_MARKET: self._submit_trailing_stop_market_order, + } + + self._log.info(f"Base URL HTTP {self._http_client.base_url}.", LogColor.BLUE) + self._log.info(f"Base URL WebSocket {base_url_ws}.", LogColor.BLUE) + + async def _connect(self) -> None: + # Connect HTTP client + if not self._http_client.connected: + await self._http_client.connect() + try: + # Initialize instrument provider + await self._instrument_provider.initialize() + # Authenticate API key and update account(s) + await self._update_account_state() + # Get listen keys + response: BinanceListenKey = await self._http_user.create_listen_key() + except BinanceError as e: + self._log.exception(f"Error on connect: {e.message}", e) + return + self._listen_key = response.listenKey + self._log.info(f"Listen key {self._listen_key}") + self._ping_listen_keys_task = self.create_task(self._ping_listen_keys()) + + # Setup clock sync + if self._clock_sync_interval_secs > 0: + self._task_clock_sync = self.create_task(self._sync_clock_with_binance_server()) + + # Connect WebSocket client + self._ws_client.subscribe(key=self._listen_key) + await self._ws_client.connect() + + async def _update_account_state(self) -> None: + # Replace method in child class + raise NotImplementedError + + async def _ping_listen_keys(self) -> None: + try: + while True: + self._log.debug( + f"Scheduled `ping_listen_keys` to run in " + f"{self._ping_listen_keys_interval}s.", + ) + await asyncio.sleep(self._ping_listen_keys_interval) + if self._listen_key: + self._log.debug(f"Pinging WebSocket listen key {self._listen_key}...") + await self._http_user.keepalive_listen_key(listen_key=self._listen_key) + except asyncio.CancelledError: + self._log.debug("`ping_listen_keys` task was canceled.") + + async def _sync_clock_with_binance_server(self) -> None: + try: + while True: + # self._log.info( + # f"Syncing Nautilus clock with Binance server...", + # ) + server_time = await self._http_market.request_server_time() + self._log.info(f"Binance server time {server_time} UNIX (ms).") + + nautilus_time = self._clock.timestamp_ms() + self._log.info(f"Nautilus clock time {nautilus_time} UNIX (ms).") + + # offset_ns = millis_to_nanos(nautilus_time - server_time) + # self._log.info(f"Setting Nautilus clock offset {offset_ns} (ns).") + # self._clock.set_offset(offset_ns) + + await asyncio.sleep(self._clock_sync_interval_secs) + except asyncio.CancelledError: + self._log.debug("`sync_clock_with_binance_server` task was canceled.") + + async def _disconnect(self) -> None: + # Cancel tasks + if self._ping_listen_keys_task: + self._log.debug("Canceling `ping_listen_keys` task...") + self._ping_listen_keys_task.cancel() + self._ping_listen_keys_task.done() + + if self._task_clock_sync: + self._log.debug("Canceling `task_clock_sync` task...") + self._task_clock_sync.cancel() + self._task_clock_sync.done() + + # Disconnect WebSocket clients + if self._ws_client.is_connected: + await self._ws_client.disconnect() + + # Disconnect HTTP client + if self._http_client.connected: + await self._http_client.disconnect() + + # -- EXECUTION REPORTS ------------------------------------------------------------------------ + + async def generate_order_status_report( + self, + instrument_id: InstrumentId, + client_order_id: Optional[ClientOrderId] = None, + venue_order_id: Optional[VenueOrderId] = None, + ) -> Optional[OrderStatusReport]: + PyCondition.false( + client_order_id is None and venue_order_id is None, + "both `client_order_id` and `venue_order_id` were `None`", + ) + + self._log.info( + f"Generating OrderStatusReport for " + f"{repr(client_order_id) if client_order_id else ''} " + f"{repr(venue_order_id) if venue_order_id else ''}...", + ) + + try: + if venue_order_id: + binance_order = await self._http_account.query_order( + symbol=instrument_id.symbol.value, + order_id=venue_order_id.value, + ) + else: + binance_order = await self._http_account.query_order( + symbol=instrument_id.symbol.value, + orig_client_order_id=client_order_id.value + if client_order_id is not None + else None, + ) + except BinanceError as e: + self._log.error( + f"Cannot generate order status report for {repr(client_order_id)}: {e.message}", + ) + return None + if not binance_order: + return None + + report: OrderStatusReport = binance_order.parse_to_order_status_report( + account_id=self.account_id, + instrument_id=self._get_cached_instrument_id(binance_order.symbol), + report_id=UUID4(), + enum_parser=self._enum_parser, + ts_init=self._clock.timestamp_ns(), + ) + + self._log.debug(f"Received {report}.") + return report + + def _get_cache_active_symbols(self) -> list[str]: + # Check cache for all active symbols + open_orders: list[Order] = self._cache.orders_open(venue=self.venue) + open_positions: list[Position] = self._cache.positions_open(venue=self.venue) + active_symbols: list[str] = [] + for o in open_orders: + active_symbols.append(o.instrument_id.symbol.value) + for p in open_positions: + active_symbols.append(p.instrument_id.symbol.value) + return active_symbols + + async def _get_binance_position_status_reports( + self, + symbol: Optional[str] = None, + ) -> list[str]: + # Implement in child class + raise NotImplementedError + + async def _get_binance_active_position_symbols( + self, + symbol: Optional[str] = None, + ) -> list[str]: + # Implement in child class + raise NotImplementedError + + async def generate_order_status_reports( + self, + instrument_id: InstrumentId = None, + start: Optional[pd.Timestamp] = None, + end: Optional[pd.Timestamp] = None, + open_only: bool = False, + ) -> list[OrderStatusReport]: + self._log.info(f"Generating OrderStatusReports for {self.id}...") + + try: + # Check Binance for all order active symbols + symbol = instrument_id.symbol.value if instrument_id is not None else None + active_symbols = self._get_cache_active_symbols() + active_symbols.extend(await self._get_binance_active_position_symbols(symbol)) + binance_open_orders = await self._http_account.query_open_orders(symbol) + for order in binance_open_orders: + active_symbols.append(order.symbol) + # Get all orders for those active symbols + binance_orders: list[BinanceOrder] = [] + for symbol in active_symbols: + response = await self._http_account.query_all_orders( + symbol=symbol, + start_time=secs_to_millis(start.timestamp()) if start is not None else None, + end_time=secs_to_millis(end.timestamp()) if end is not None else None, + ) + binance_orders.extend(response) + except BinanceError as e: + self._log.exception(f"Cannot generate OrderStatusReport: {e.message}", e) + return [] + + reports: list[OrderStatusReport] = [] + for order in binance_orders: + # Apply filter (always report open orders regardless of start, end filter) + # TODO(cs): Time filter is WIP + # timestamp = pd.to_datetime(data["time"], utc=True) + # if data["status"] not in ("NEW", "PARTIALLY_FILLED", "PENDING_CANCEL"): + # if start is not None and timestamp < start: + # continue + # if end is not None and timestamp > end: + # continue + report = order.parse_to_order_status_report( + account_id=self.account_id, + instrument_id=self._get_cached_instrument_id(order.symbol), + report_id=UUID4(), + enum_parser=self._enum_parser, + ts_init=self._clock.timestamp_ns(), + ) + self._log.debug(f"Received {reports}.") + reports.append(report) + + len_reports = len(reports) + plural = "" if len_reports == 1 else "s" + self._log.info(f"Generated {len(reports)} OrderStatusReport{plural}.") + + return reports + + async def generate_trade_reports( + self, + instrument_id: InstrumentId = None, + venue_order_id: VenueOrderId = None, + start: Optional[pd.Timestamp] = None, + end: Optional[pd.Timestamp] = None, + ) -> list[TradeReport]: + self._log.info(f"Generating TradeReports for {self.id}...") + + try: + # Check Binance for all trades on active symbols + symbol = instrument_id.symbol.value if instrument_id is not None else None + active_symbols = self._get_cache_active_symbols() + active_symbols.extend(await self._get_binance_active_position_symbols(symbol)) + binance_trades: list[BinanceUserTrade] = [] + for symbol in active_symbols: + response = await self._http_account.query_user_trades( + symbol=symbol, + start_time=secs_to_millis(start.timestamp()) if start is not None else None, + end_time=secs_to_millis(end.timestamp()) if end is not None else None, + ) + binance_trades.extend(response) + except BinanceError as e: + self._log.exception(f"Cannot generate TradeReport: {e.message}", e) + return [] + + # Parse all Binance trades + reports: list[TradeReport] = [] + for trade in binance_trades: + # Apply filter + # TODO(cs): Time filter is WIP + # timestamp = pd.to_datetime(data["time"], utc=True) + # if start is not None and timestamp < start: + # continue + # if end is not None and timestamp > end: + # continue + if trade.symbol is None: + self.log.warning(f"No symbol for trade {trade}.") + continue + report = trade.parse_to_trade_report( + account_id=self.account_id, + instrument_id=self._get_cached_instrument_id(trade.symbol), + report_id=UUID4(), + ts_init=self._clock.timestamp_ns(), + ) + self._log.debug(f"Received {report}.") + reports.append(report) + + # Confirm sorting in ascending order + reports = sorted(reports, key=lambda x: x.trade_id) + + len_reports = len(reports) + plural = "" if len_reports == 1 else "s" + self._log.info(f"Generated {len(reports)} TradeReport{plural}.") + + return reports + + async def generate_position_status_reports( + self, + instrument_id: InstrumentId = None, + start: Optional[pd.Timestamp] = None, + end: Optional[pd.Timestamp] = None, + ) -> list[PositionStatusReport]: + self._log.info(f"Generating PositionStatusReports for {self.id}...") + + try: + symbol = instrument_id.symbol.value if instrument_id is not None else None + reports = await self._get_binance_position_status_reports(symbol) + except BinanceError as e: + self._log.exception(f"Cannot generate PositionStatusReport: {e.message}", e) + return [] + + len_reports = len(reports) + plural = "" if len_reports == 1 else "s" + self._log.info(f"Generated {len(reports)} PositionStatusReport{plural}.") + + return reports + + # -- COMMAND HANDLERS ------------------------------------------------------------------------- + + async def _submit_order(self, command: SubmitOrder) -> None: + order: Order = command.order + + # Check validity + self._check_order_validity(order) + self._log.debug(f"Submitting {order}.") + + # Generate event here to ensure correct ordering of events + self.generate_order_submitted( + strategy_id=order.strategy_id, + instrument_id=order.instrument_id, + client_order_id=order.client_order_id, + ts_event=self._clock.timestamp_ns(), + ) + try: + await self._submit_order_method[order.order_type](order) + except BinanceError as e: + self.generate_order_rejected( + strategy_id=order.strategy_id, + instrument_id=order.instrument_id, + client_order_id=order.client_order_id, + reason=e.message, + ts_event=self._clock.timestamp_ns(), + ) + except KeyError: + raise RuntimeError(f"unsupported order type, was {order.order_type}") + + def _check_order_validity(self, order: Order): + # Implement in child class + raise NotImplementedError + + async def _submit_market_order(self, order: MarketOrder) -> None: + await self._http_account.new_order( + symbol=order.instrument_id.symbol.value, + side=self._enum_parser.parse_internal_order_side(order.side), + order_type=self._enum_parser.parse_internal_order_type(order), + quantity=str(order.quantity), + new_client_order_id=order.client_order_id.value, + recv_window=str(5000), + ) + + async def _submit_limit_order(self, order: LimitOrder) -> None: + time_in_force = self._enum_parser.parse_internal_time_in_force(order.time_in_force) + if order.time_in_force == TimeInForce.GTD and time_in_force == BinanceTimeInForce.GTC: + if self._warn_gtd_to_gtc: + self._log.warning("Converted GTD `time_in_force` to GTC.") + if order.is_post_only and self._binance_account_type.is_spot_or_margin: + time_in_force = None + elif order.is_post_only and self._binance_account_type.is_futures: + time_in_force = BinanceTimeInForce.GTX + + await self._http_account.new_order( + symbol=order.instrument_id.symbol.value, + side=self._enum_parser.parse_internal_order_side(order.side), + order_type=self._enum_parser.parse_internal_order_type(order), + time_in_force=time_in_force, + quantity=str(order.quantity), + price=str(order.price), + iceberg_qty=str(order.display_qty) if order.display_qty is not None else None, + reduce_only=str(order.is_reduce_only) if order.is_reduce_only is True else None, + new_client_order_id=order.client_order_id.value, + recv_window=str(5000), + ) + + async def _submit_stop_limit_order(self, order: StopLimitOrder) -> None: + time_in_force = self._enum_parser.parse_internal_time_in_force(order.time_in_force) + + if self._binance_account_type.is_spot_or_margin: + working_type = None + elif order.trigger_type in (TriggerType.DEFAULT, TriggerType.LAST_TRADE): + working_type = "CONTRACT_PRICE" + elif order.trigger_type == TriggerType.MARK_PRICE: + working_type = "MARK_PRICE" + else: + self._log.error( + f"Cannot submit order: invalid `order.trigger_type`, was " + f"{trigger_type_to_str(order.trigger_price)}. {order}", + ) + return + + await self._http_account.new_order( + symbol=order.instrument_id.symbol.value, + side=self._enum_parser.parse_internal_order_side(order.side), + order_type=self._enum_parser.parse_internal_order_type(order), + time_in_force=time_in_force, + quantity=str(order.quantity), + price=str(order.price), + stop_price=str(order.trigger_price), + working_type=working_type, + iceberg_qty=str(order.display_qty) if order.display_qty is not None else None, + reduce_only=str(order.is_reduce_only) if order.is_reduce_only is True else None, + new_client_order_id=order.client_order_id.value, + recv_window=str(5000), + ) + + async def _submit_order_list(self, command: SubmitOrderList) -> None: + for order in command.order_list: + self.generate_order_submitted( + strategy_id=order.strategy_id, + instrument_id=order.instrument_id, + client_order_id=order.client_order_id, + ts_event=self._clock.timestamp_ns(), + ) + + for order in command.order_list: + if order.linked_order_ids: # TODO(cs): Implement + self._log.warning(f"Cannot yet handle OCO conditional orders, {order}.") + await self._submit_order(order) + + async def _submit_stop_market_order(self, order: StopMarketOrder) -> None: + time_in_force = self._enum_parser.parse_internal_time_in_force(order.time_in_force) + + if self._binance_account_type.is_spot_or_margin: + working_type = None + elif order.trigger_type in (TriggerType.DEFAULT, TriggerType.LAST_TRADE): + working_type = "CONTRACT_PRICE" + elif order.trigger_type == TriggerType.MARK_PRICE: + working_type = "MARK_PRICE" + else: + self._log.error( + f"Cannot submit order: invalid `order.trigger_type`, was " + f"{trigger_type_to_str(order.trigger_price)}. {order}", + ) + return + + await self._http_account.new_order( + symbol=order.instrument_id.symbol.value, + side=self._enum_parser.parse_internal_order_side(order.side), + order_type=self._enum_parser.parse_internal_order_type(order), + time_in_force=time_in_force, + quantity=str(order.quantity), + stop_price=str(order.trigger_price), + working_type=working_type, + reduce_only=str(order.is_reduce_only) if order.is_reduce_only is True else None, + new_client_order_id=order.client_order_id.value, + recv_window=str(5000), + ) + + async def _submit_trailing_stop_market_order(self, order: TrailingStopMarketOrder) -> None: + time_in_force = self._enum_parser.parse_internal_time_in_force(order.time_in_force) + + if order.trigger_type in (TriggerType.DEFAULT, TriggerType.LAST_TRADE): + working_type = "CONTRACT_PRICE" + elif order.trigger_type == TriggerType.MARK_PRICE: + working_type = "MARK_PRICE" + else: + self._log.error( + f"Cannot submit order: invalid `order.trigger_type`, was " + f"{trigger_type_to_str(order.trigger_price)}. {order}", + ) + return + + if order.trailing_offset_type != TrailingOffsetType.BASIS_POINTS: + self._log.error( + f"Cannot submit order: invalid `order.trailing_offset_type`, was " + f"{trailing_offset_type_to_str(order.trailing_offset_type)} (use `BASIS_POINTS`). " + f"{order}", + ) + return + + # Ensure activation price + activation_price: Optional[Price] = order.trigger_price + if not activation_price: + quote = self._cache.quote_tick(order.instrument_id) + trade = self._cache.trade_tick(order.instrument_id) + if quote: + if order.side == OrderSide.BUY: + activation_price = quote.ask + elif order.side == OrderSide.SELL: + activation_price = quote.bid + elif trade: + activation_price = trade.price + else: + self._log.error( + "Cannot submit order: no trigger price specified for Binance activation price " + f"and could not find quotes or trades for {order.instrument_id}", + ) + + await self._http_account.new_order( + symbol=order.instrument_id.symbol.value, + side=self._enum_parser.parse_internal_order_side(order.side), + order_type=self._enum_parser.parse_internal_order_type(order), + time_in_force=time_in_force, + quantity=str(order.quantity), + activation_price=str(activation_price), + callback_rate=str(order.trailing_offset / 100), + working_type=working_type, + reduce_only=str(order.is_reduce_only) if order.is_reduce_only is True else None, + new_client_order_id=order.client_order_id.value, + recv_window=str(5000), + ) + + def _get_cached_instrument_id(self, symbol: str) -> InstrumentId: + # Parse instrument ID + nautilus_symbol: str = BinanceSymbol(symbol).parse_binance_to_internal( + self._binance_account_type, + ) + instrument_id: Optional[InstrumentId] = self._instrument_ids.get(nautilus_symbol) + if not instrument_id: + instrument_id = InstrumentId(Symbol(nautilus_symbol), BINANCE_VENUE) + self._instrument_ids[nautilus_symbol] = instrument_id + return instrument_id + + async def _modify_order(self, command: ModifyOrder) -> None: + self._log.error( # pragma: no cover + "Cannot modify order: Not supported by the exchange.", # pragma: no cover + ) + + async def _cancel_order(self, command: CancelOrder) -> None: + self.generate_order_pending_cancel( + strategy_id=command.strategy_id, + instrument_id=command.instrument_id, + client_order_id=command.client_order_id, + venue_order_id=command.venue_order_id, + ts_event=self._clock.timestamp_ns(), + ) + + await self._cancel_order_single( + instrument_id=command.instrument_id, + client_order_id=command.client_order_id, + venue_order_id=command.venue_order_id, + ) + + async def _cancel_all_orders(self, command: CancelAllOrders) -> None: + open_orders_strategy = self._cache.orders_open( + instrument_id=command.instrument_id, + strategy_id=command.strategy_id, + ) + for order in open_orders_strategy: + if order.is_pending_cancel: + continue # Already pending cancel + self.generate_order_pending_cancel( + strategy_id=order.strategy_id, + instrument_id=order.instrument_id, + client_order_id=order.client_order_id, + venue_order_id=order.venue_order_id, + ts_event=self._clock.timestamp_ns(), + ) + + # Check total orders for instrument + open_orders_total_count = self._cache.orders_open_count( + instrument_id=command.instrument_id, + ) + + try: + if open_orders_total_count == len(open_orders_strategy): + await self._http_account.cancel_all_open_orders( + symbol=command.instrument_id.symbol.value, + ) + else: + for order in open_orders_strategy: + await self._cancel_order_single( + instrument_id=order.instrument_id, + client_order_id=order.client_order_id, + venue_order_id=order.venue_order_id, + ) + except BinanceError as e: + self._log.exception(f"Cannot cancel open orders: {e.message}", e) + + async def _cancel_order_single( + self, + instrument_id: InstrumentId, + client_order_id: ClientOrderId, + venue_order_id: Optional[VenueOrderId], + ) -> None: + try: + if venue_order_id is not None: + await self._http_account.cancel_order( + symbol=instrument_id.symbol.value, + order_id=venue_order_id.value, + ) + else: + await self._http_account.cancel_order( + symbol=instrument_id.symbol.value, + orig_client_order_id=client_order_id.value, + ) + except BinanceError as e: + self._log.exception( + f"Cannot cancel order " + f"{repr(client_order_id)}, " + f"{repr(venue_order_id)}: " + f"{e.message}", + e, + ) + + # -- WEBSOCKET EVENT HANDLERS -------------------------------------------------------------------- + + def _handle_user_ws_message(self, raw: bytes) -> None: + # Implement in child class + raise NotImplementedError diff --git a/nautilus_trader/adapters/binance/common/functions.py b/nautilus_trader/adapters/binance/common/functions.py deleted file mode 100644 index 9806490d0d5f..000000000000 --- a/nautilus_trader/adapters/binance/common/functions.py +++ /dev/null @@ -1,44 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -import json - -from nautilus_trader.adapters.binance.common.enums import BinanceAccountType - - -def parse_symbol(symbol: str, account_type: BinanceAccountType): - symbol = symbol.upper() - if account_type.is_spot or account_type.is_margin: - return symbol - - # Parse Futures symbol - if symbol[-1].isdigit(): - return symbol # Deliverable - if symbol.endswith("_PERP"): - symbol = symbol.replace("_", "-") - return symbol - else: - return symbol + "-PERP" - - -def format_symbol(symbol: str): - return symbol.upper().replace(" ", "").replace("/", "").replace("-PERP", "") - - -def convert_symbols_list_to_json_array(symbols: list[str]): - if symbols is None: - return symbols - formatted_symbols: list[str] = [format_symbol(s) for s in symbols] - return json.dumps(formatted_symbols).replace(" ", "").replace("/", "") diff --git a/nautilus_trader/adapters/binance/common/parsing/data.py b/nautilus_trader/adapters/binance/common/parsing/data.py deleted file mode 100644 index 25ea28775a7a..000000000000 --- a/nautilus_trader/adapters/binance/common/parsing/data.py +++ /dev/null @@ -1,229 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from decimal import Decimal - -from nautilus_trader.adapters.binance.common.schemas import BinanceCandlestick -from nautilus_trader.adapters.binance.common.schemas import BinanceOrderBookData -from nautilus_trader.adapters.binance.common.schemas import BinanceQuoteData -from nautilus_trader.adapters.binance.common.schemas import BinanceTickerData -from nautilus_trader.adapters.binance.common.schemas import BinanceTrade -from nautilus_trader.adapters.binance.common.types import BinanceBar -from nautilus_trader.adapters.binance.common.types import BinanceTicker -from nautilus_trader.core.datetime import millis_to_nanos -from nautilus_trader.model.data.bar import BarSpecification -from nautilus_trader.model.data.bar import BarType -from nautilus_trader.model.data.tick import QuoteTick -from nautilus_trader.model.data.tick import TradeTick -from nautilus_trader.model.enums import AggregationSource -from nautilus_trader.model.enums import AggressorSide -from nautilus_trader.model.enums import BarAggregation -from nautilus_trader.model.enums import BookAction -from nautilus_trader.model.enums import BookType -from nautilus_trader.model.enums import OrderSide -from nautilus_trader.model.enums import PriceType -from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import TradeId -from nautilus_trader.model.objects import Price -from nautilus_trader.model.objects import Quantity -from nautilus_trader.model.orderbook.data import BookOrder -from nautilus_trader.model.orderbook.data import OrderBookDelta -from nautilus_trader.model.orderbook.data import OrderBookDeltas - - -def parse_trade_tick_http( - instrument_id: InstrumentId, - trade: BinanceTrade, - ts_init: int, -) -> TradeTick: - return TradeTick( - instrument_id=instrument_id, - price=Price.from_str(trade.price), - size=Quantity.from_str(trade.qty), - aggressor_side=AggressorSide.SELLER if trade.isBuyerMaker else AggressorSide.BUYER, - trade_id=TradeId(str(trade.id)), - ts_event=millis_to_nanos(trade.time), - ts_init=ts_init, - ) - - -def parse_bar_http(bar_type: BarType, values: list, ts_init: int) -> BinanceBar: - return BinanceBar( - bar_type=bar_type, - open=Price.from_str(values[1]), - high=Price.from_str(values[2]), - low=Price.from_str(values[3]), - close=Price.from_str(values[4]), - volume=Quantity.from_str(values[5]), - quote_volume=Decimal(values[7]), - count=values[8], - taker_buy_base_volume=Decimal(values[9]), - taker_buy_quote_volume=Decimal(values[10]), - ts_event=millis_to_nanos(values[0]), - ts_init=ts_init, - ) - - -def parse_diff_depth_stream_ws( - instrument_id: InstrumentId, - data: BinanceOrderBookData, - ts_init: int, -) -> OrderBookDeltas: - ts_event: int = millis_to_nanos(data.T) if data.T is not None else millis_to_nanos(data.E) - - bid_deltas: list[OrderBookDelta] = [ - parse_book_delta_ws(instrument_id, OrderSide.BUY, d, ts_event, ts_init, data.u) - for d in data.b - ] - ask_deltas: list[OrderBookDelta] = [ - parse_book_delta_ws(instrument_id, OrderSide.SELL, d, ts_event, ts_init, data.u) - for d in data.a - ] - - return OrderBookDeltas( - instrument_id=instrument_id, - book_type=BookType.L2_MBP, - deltas=bid_deltas + ask_deltas, - ts_event=ts_event, - ts_init=ts_init, - sequence=data.u, - ) - - -def parse_book_delta_ws( - instrument_id: InstrumentId, - side: OrderSide, - delta: tuple[str, str], - ts_event: int, - ts_init: int, - update_id: int, -) -> OrderBookDelta: - price = float(delta[0]) - size = float(delta[1]) - - order = BookOrder( - price=price, - size=size, - side=side, - ) - - return OrderBookDelta( - instrument_id=instrument_id, - book_type=BookType.L2_MBP, - action=BookAction.UPDATE if size > 0.0 else BookAction.DELETE, - order=order, - ts_event=ts_event, - ts_init=ts_init, - sequence=update_id, - ) - - -def parse_quote_tick_ws( - instrument_id: InstrumentId, - data: BinanceQuoteData, - ts_init: int, -) -> QuoteTick: - return QuoteTick( - instrument_id=instrument_id, - bid=Price.from_str(data.b), - ask=Price.from_str(data.a), - bid_size=Quantity.from_str(data.B), - ask_size=Quantity.from_str(data.A), - ts_event=ts_init, - ts_init=ts_init, - ) - - -def parse_ticker_24hr_ws( - instrument_id: InstrumentId, - data: BinanceTickerData, - ts_init: int, -) -> BinanceTicker: - return BinanceTicker( - instrument_id=instrument_id, - price_change=Decimal(data.p), - price_change_percent=Decimal(data.P), - weighted_avg_price=Decimal(data.w), - prev_close_price=Decimal(data.x) if data.x is not None else None, - last_price=Decimal(data.c), - last_qty=Decimal(data.Q), - bid_price=Decimal(data.b) if data.b is not None else None, - bid_qty=Decimal(data.B) if data.B is not None else None, - ask_price=Decimal(data.a) if data.a is not None else None, - ask_qty=Decimal(data.A) if data.A is not None else None, - open_price=Decimal(data.o), - high_price=Decimal(data.h), - low_price=Decimal(data.l), - volume=Decimal(data.v), - quote_volume=Decimal(data.q), - open_time_ms=data.O, - close_time_ms=data.C, - first_id=data.F, - last_id=data.L, - count=data.n, - ts_event=millis_to_nanos(data.E), - ts_init=ts_init, - ) - - -def parse_bar_ws( - instrument_id: InstrumentId, - data: BinanceCandlestick, - ts_init: int, -) -> BinanceBar: - resolution = data.i[-1] - if resolution == "s": - aggregation = BarAggregation.SECOND - elif resolution == "m": - aggregation = BarAggregation.MINUTE - elif resolution == "h": - aggregation = BarAggregation.HOUR - elif resolution == "d": - aggregation = BarAggregation.DAY - elif resolution == "w": - aggregation = BarAggregation.WEEK - elif resolution == "M": - aggregation = BarAggregation.MONTH - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"unsupported time aggregation resolution, was {resolution}", # pragma: no cover (design-time error) # noqa - ) - - bar_spec = BarSpecification( - step=int(data.i[:-1]), - aggregation=aggregation, - price_type=PriceType.LAST, - ) - - bar_type = BarType( - instrument_id=instrument_id, - bar_spec=bar_spec, - aggregation_source=AggregationSource.EXTERNAL, - ) - - return BinanceBar( - bar_type=bar_type, - open=Price.from_str(data.o), - high=Price.from_str(data.h), - low=Price.from_str(data.l), - close=Price.from_str(data.c), - volume=Quantity.from_str(data.v), - quote_volume=Decimal(data.q), - count=data.n, - taker_buy_base_volume=Decimal(data.V), - taker_buy_quote_volume=Decimal(data.Q), - ts_event=millis_to_nanos(data.T), - ts_init=ts_init, - ) diff --git a/nautilus_trader/adapters/binance/common/schemas.py b/nautilus_trader/adapters/binance/common/schemas.py deleted file mode 100644 index 9dbd8aa62ef7..000000000000 --- a/nautilus_trader/adapters/binance/common/schemas.py +++ /dev/null @@ -1,275 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from typing import Optional - -import msgspec - - -################################################################################ -# HTTP responses -################################################################################ - - -class BinanceListenKey(msgspec.Struct): - """HTTP response from creating a new `Binance` user listen key.""" - - listenKey: str - - -class BinanceQuote(msgspec.Struct): - """HTTP response from `Binance` GET /fapi/v1/ticker/bookTicker.""" - - symbol: str - bidPrice: str - bidQty: str - askPrice: str - askQty: str - time: int # Transaction time - - -class BinanceTrade(msgspec.Struct): - """HTTP response from `Binance` GET /fapi/v1/trades.""" - - id: int - price: str - qty: str - quoteQty: str - time: int - isBuyerMaker: bool - isBestMatch: Optional[bool] = True - - -class BinanceTicker(msgspec.Struct, kw_only=True): - """HTTP response from `Binance` GET /fapi/v1/ticker/24hr .""" - - symbol: str - priceChange: str - priceChangePercent: str - weightedAvgPrice: str - prevClosePrice: Optional[str] = None - lastPrice: str - lastQty: str - bidPrice: str - bidQty: str - askPrice: str - askQty: str - openPrice: str - highPrice: str - lowPrice: str - volume: str - quoteVolume: str - openTime: int - closeTime: int - firstId: int - lastId: int - count: int - - -################################################################################ -# WebSocket messages -################################################################################ - - -class BinanceDataMsgWrapper(msgspec.Struct): - """ - Provides a wrapper for data WebSocket messages from `Binance`. - """ - - stream: str - - -class BinanceOrderBookData(msgspec.Struct, kw_only=True): - """WebSocket message 'inner struct' for `Binance` Diff. Book Depth Streams.""" - - e: str # Event type - E: int # Event time - T: Optional[int] = None # Transaction time (Binance Futures only) - s: str # Symbol - U: int # First update ID in event - u: int # Final update ID in event - pu: Optional[int] = None # ?? (Binance Futures only) - b: list[tuple[str, str]] # Bids to be updated - a: list[tuple[str, str]] # Asks to be updated - - -class BinanceOrderBookMsg(msgspec.Struct): - """WebSocket message from `Binance` Diff. Book Depth Streams.""" - - stream: str - data: BinanceOrderBookData - - -class BinanceQuoteData(msgspec.Struct): - """WebSocket message from `Binance` Individual Symbol Book Ticker Streams.""" - - s: str # symbol - u: int # order book updateId - b: str # best bid price - B: str # best bid qty - a: str # best ask price - A: str # best ask qty - - -class BinanceQuoteMsg(msgspec.Struct): - """WebSocket message from `Binance` Individual Symbol Book Ticker Streams.""" - - stream: str - data: BinanceQuoteData - - -class BinanceAggregatedTradeData(msgspec.Struct): - """WebSocket message from `Binance` Aggregate Trade Streams.""" - - e: str # Event type - E: int # Event time - s: str # Symbol - a: int # Aggregate trade ID - p: str # Price - q: str # Quantity - f: int # First trade ID - l: int # Last trade ID - T: int # Trade time - m: bool # Is the buyer the market maker? - - -class BinanceAggregatedTradeMsg(msgspec.Struct): - """WebSocket message.""" - - stream: str - data: BinanceAggregatedTradeData - - -class BinanceTickerData(msgspec.Struct, kw_only=True): - """ - WebSocker message from `Binance` 24hr Ticker - - Fields - ------ - - e: Event type - - E: Event time - - s: Symbol - - p: Price change - - P: Price change percent - - w: Weighted average price - - x: Previous close price - - c: Last price - - Q: Last quantity - - b: Best bid price - - B: Best bid quantity - - a: Best ask price - - A: Best ask quantity - - o: Open price - - h: High price - - l: Low price - - v: Total traded base asset volume - - q: Total traded quote asset volume - - O: Statistics open time - - C: Statistics close time - - F: First trade ID - - L: Last trade ID - - n: Total number of trades - """ - - e: str # Event type - E: int # Event time - s: str # Symbol - p: str # Price change - P: str # Price change percent - w: str # Weighted average price - x: Optional[str] = None # First trade(F)-1 price (first trade before the 24hr rolling window) - c: str # Last price - Q: str # Last quantity - b: Optional[str] = None # Best bid price - B: Optional[str] = None # Best bid quantity - a: Optional[str] = None # Best ask price - A: Optional[str] = None # Best ask quantity - o: str # Open price - h: str # High price - l: str # Low price - v: str # Total traded base asset volume - q: str # Total traded quote asset volume - O: int # Statistics open time - C: int # Statistics close time - F: int # First trade ID - L: int # Last trade ID - n: int # Total number of trades - - -class BinanceTickerMsg(msgspec.Struct): - """WebSocket message.""" - - stream: str - data: BinanceTickerData - - -class BinanceCandlestick(msgspec.Struct): - """ - WebSocket message 'inner struct' for `Binance` Kline/Candlestick Streams. - - Fields - ------ - - t: Kline start time - - T: Kline close time - - s: Symbol - - i: Interval - - f: First trade ID - - L: Last trade ID - - o: Open price - - c: Close price - - h: High price - - l: Low price - - v: Base asset volume - - n: Number of trades - - x: Is this kline closed? - - q: Quote asset volume - - V: Taker buy base asset volume - - Q: Taker buy quote asset volume - - B: Ignore - """ - - t: int # Kline start time - T: int # Kline close time - s: str # Symbol - i: str # Interval - f: int # First trade ID - L: int # Last trade ID - o: str # Open price - c: str # Close price - h: str # High price - l: str # Low price - v: str # Base asset volume - n: int # Number of trades - x: bool # Is this kline closed? - q: str # Quote asset volume - V: str # Taker buy base asset volume - Q: str # Taker buy quote asset volume - B: str # Ignore - - -class BinanceCandlestickData(msgspec.Struct): - """WebSocket message 'inner struct'.""" - - e: str - E: int - s: str - k: BinanceCandlestick - - -class BinanceCandlestickMsg(msgspec.Struct): - """WebSocket message for `Binance` Kline/Candlestick Streams.""" - - stream: str - data: BinanceCandlestickData diff --git a/nautilus_trader/adapters/binance/common/parsing/__init__.py b/nautilus_trader/adapters/binance/common/schemas/__init__.py similarity index 100% rename from nautilus_trader/adapters/binance/common/parsing/__init__.py rename to nautilus_trader/adapters/binance/common/schemas/__init__.py diff --git a/nautilus_trader/adapters/binance/common/schemas/account.py b/nautilus_trader/adapters/binance/common/schemas/account.py new file mode 100644 index 000000000000..61bf6583775e --- /dev/null +++ b/nautilus_trader/adapters/binance/common/schemas/account.py @@ -0,0 +1,247 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from decimal import Decimal +from typing import Optional + +import msgspec + +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser +from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide +from nautilus_trader.adapters.binance.common.enums import BinanceOrderStatus +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.adapters.binance.common.enums import BinanceTimeInForce +from nautilus_trader.core.datetime import millis_to_nanos +from nautilus_trader.core.uuid import UUID4 +from nautilus_trader.execution.reports import OrderStatusReport +from nautilus_trader.execution.reports import TradeReport +from nautilus_trader.model.currency import Currency +from nautilus_trader.model.enums import ContingencyType +from nautilus_trader.model.enums import LiquiditySide +from nautilus_trader.model.enums import OrderSide +from nautilus_trader.model.enums import TrailingOffsetType +from nautilus_trader.model.enums import TriggerType +from nautilus_trader.model.identifiers import AccountId +from nautilus_trader.model.identifiers import ClientOrderId +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import OrderListId +from nautilus_trader.model.identifiers import PositionId +from nautilus_trader.model.identifiers import TradeId +from nautilus_trader.model.identifiers import VenueOrderId +from nautilus_trader.model.objects import Money +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity + + +################################################################################ +# HTTP responses +################################################################################ + + +class BinanceUserTrade(msgspec.Struct, frozen=True): + """ + HTTP response from `Binance Spot/Margin` + `GET /api/v3/myTrades` + HTTP response from `Binance USD-M Futures` + `GET /fapi/v1/userTrades` + HTTP response from `Binance COIN-M Futures` + `GET /dapi/v1/userTrades` + """ + + commission: str + commissionAsset: str + price: str + qty: str + + # Parameters not present in 'fills' list (see FULL response of BinanceOrder) + symbol: Optional[str] = None + id: Optional[int] = None + orderId: Optional[int] = None + time: Optional[int] = None + quoteQty: Optional[str] = None # SPOT/MARGIN & USD-M FUTURES only + + # Parameters in SPOT/MARGIN only: + orderListId: Optional[int] = None # unless OCO, the value will always be -1 + isBuyer: Optional[bool] = None + isMaker: Optional[bool] = None + isBestMatch: Optional[bool] = None + tradeId: Optional[int] = None # only in BinanceOrder FULL response + + # Parameters in FUTURES only: + buyer: Optional[bool] = None + maker: Optional[bool] = None + realizedPnl: Optional[str] = None + side: Optional[BinanceOrderSide] = None + positionSide: Optional[str] = None + baseQty: Optional[str] = None # COIN-M FUTURES only + pair: Optional[str] = None # COIN-M FUTURES only + + def parse_to_trade_report( + self, + account_id: AccountId, + instrument_id: InstrumentId, + report_id: UUID4, + ts_init: int, + ) -> TradeReport: + venue_position_id = None + if self.positionSide is not None: + venue_position_id = PositionId(f"{instrument_id}-{self.positionSide}") + + order_side = OrderSide.BUY if self.isBuyer or self.buyer else OrderSide.SELL + liquidity_side = LiquiditySide.MAKER if self.isMaker or self.maker else LiquiditySide.TAKER + + return TradeReport( + account_id=account_id, + instrument_id=instrument_id, + venue_order_id=VenueOrderId(str(self.orderId)), + venue_position_id=venue_position_id, + trade_id=TradeId(str(self.id)), + order_side=order_side, + last_qty=Quantity.from_str(self.qty), + last_px=Price.from_str(self.price), + liquidity_side=liquidity_side, + ts_event=millis_to_nanos(self.time), + commission=Money(self.commission, Currency.from_str(self.commissionAsset)), + report_id=report_id, + ts_init=ts_init, + ) + + +class BinanceOrder(msgspec.Struct, frozen=True): + """ + HTTP response from `Binance Spot/Margin` + `GET /api/v3/order` + HTTP response from `Binance USD-M Futures` + `GET /fapi/v1/order` + HTTP response from `Binance COIN-M Futures` + `GET /dapi/v1/order` + """ + + symbol: str + orderId: int + clientOrderId: str + + # Parameters not in ACK response: + price: Optional[str] = None + origQty: Optional[str] = None + executedQty: Optional[str] = None + status: Optional[BinanceOrderStatus] = None + timeInForce: Optional[BinanceTimeInForce] = None + type: Optional[BinanceOrderType] = None + side: Optional[BinanceOrderSide] = None + stopPrice: Optional[str] = None # please ignore when order type is TRAILING_STOP_MARKET + time: Optional[int] = None + updateTime: Optional[int] = None + + # Parameters in SPOT/MARGIN only: + orderListId: Optional[int] = None # Unless OCO, the value will always be -1 + cumulativeQuoteQty: Optional[str] = None # cumulative quote qty + icebergQty: Optional[str] = None + isWorking: Optional[bool] = None + workingTime: Optional[int] = None + origQuoteOrderQty: Optional[str] = None + selfTradePreventionMode: Optional[str] = None + transactTime: Optional[int] = None # POST & DELETE methods only + fills: Optional[list[BinanceUserTrade]] = None # FULL response only + + # Parameters in FUTURES only: + avgPrice: Optional[str] = None + origType: Optional[BinanceOrderType] = None + reduceOnly: Optional[bool] = None + positionSide: Optional[str] = None + closePosition: Optional[bool] = None + activatePrice: Optional[str] = None # activation price, only for TRAILING_STOP_MARKET order + priceRate: Optional[str] = None # callback rate, only for TRAILING_STOP_MARKET order + workingType: Optional[str] = None + priceProtect: Optional[bool] = None # if conditional order trigger is protected + cumQuote: Optional[str] = None # USD-M FUTURES only + cumBase: Optional[str] = None # COIN-M FUTURES only + pair: Optional[str] = None # COIN-M FUTURES only + + def parse_to_order_status_report( + self, + account_id: AccountId, + instrument_id: InstrumentId, + report_id: UUID4, + enum_parser: BinanceEnumParser, + ts_init: int, + ) -> OrderStatusReport: + if self.price is None: + raise RuntimeError( + "Cannot generate order status report from Binance ACK response.", + ) + + client_order_id = ClientOrderId(self.clientOrderId) if self.clientOrderId != "" else None + order_list_id = OrderListId(str(self.orderListId)) if self.orderListId is not None else None + contingency_type = ( + ContingencyType.OCO + if self.orderListId is not None and self.orderListId != -1 + else ContingencyType.NO_CONTINGENCY + ) + + trigger_price = Decimal(self.stopPrice) + trigger_type = None + if self.workingType is not None: + trigger_type = enum_parser.parse_binance_trigger_type(self.workingType) + elif trigger_price > 0: + trigger_type = TriggerType.LAST_TRADE if trigger_price > 0 else None + + trailing_offset = None + trailing_offset_type = TrailingOffsetType.NO_TRAILING_OFFSET + if self.priceRate is not None: + trailing_offset = Decimal(self.priceRate) + trailing_offset_type = TrailingOffsetType.BASIS_POINTS + + avg_px = Decimal(self.avgPrice) if self.avgPrice is not None else None + post_only = ( + self.type == BinanceOrderType.LIMIT_MAKER or self.timeInForce == BinanceTimeInForce.GTX + ) + reduce_only = self.reduceOnly if self.reduceOnly is not None else False + + return OrderStatusReport( + account_id=account_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + order_list_id=order_list_id, + venue_order_id=VenueOrderId(str(self.orderId)), + order_side=enum_parser.parse_binance_order_side(self.side), + order_type=enum_parser.parse_binance_order_type(self.type), + contingency_type=contingency_type, + time_in_force=enum_parser.parse_binance_time_in_force(self.timeInForce), + order_status=enum_parser.parse_binance_order_status(self.status), + price=Price.from_str(str(Decimal(self.price))), + trigger_price=Price.from_str(str(trigger_price)), + trigger_type=trigger_type, + trailing_offset=trailing_offset, + trailing_offset_type=trailing_offset_type, + quantity=Quantity.from_str(self.origQty), + filled_qty=Quantity.from_str(self.executedQty), + avg_px=avg_px, + post_only=post_only, + reduce_only=reduce_only, + ts_accepted=millis_to_nanos(self.time), + ts_last=millis_to_nanos(self.updateTime), + report_id=report_id, + ts_init=ts_init, + ) + + +class BinanceStatusCode(msgspec.Struct, frozen=True): + """ + HTTP response status code + """ + + code: int + msg: str diff --git a/nautilus_trader/adapters/binance/common/schemas/market.py b/nautilus_trader/adapters/binance/common/schemas/market.py new file mode 100644 index 000000000000..2173f5bf0731 --- /dev/null +++ b/nautilus_trader/adapters/binance/common/schemas/market.py @@ -0,0 +1,667 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from decimal import Decimal +from typing import Optional + +import msgspec + +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser +from nautilus_trader.adapters.binance.common.enums import BinanceExchangeFilterType +from nautilus_trader.adapters.binance.common.enums import BinanceKlineInterval +from nautilus_trader.adapters.binance.common.enums import BinanceRateLimitInterval +from nautilus_trader.adapters.binance.common.enums import BinanceRateLimitType +from nautilus_trader.adapters.binance.common.enums import BinanceSymbolFilterType +from nautilus_trader.adapters.binance.common.types import BinanceBar +from nautilus_trader.adapters.binance.common.types import BinanceTicker +from nautilus_trader.core.datetime import millis_to_nanos +from nautilus_trader.model.data.bar import BarType +from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.tick import TradeTick +from nautilus_trader.model.enums import AggregationSource +from nautilus_trader.model.enums import AggressorSide +from nautilus_trader.model.enums import BookAction +from nautilus_trader.model.enums import BookType +from nautilus_trader.model.enums import OrderSide +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import TradeId +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity +from nautilus_trader.model.orderbook.data import BookOrder +from nautilus_trader.model.orderbook.data import OrderBookDelta +from nautilus_trader.model.orderbook.data import OrderBookDeltas +from nautilus_trader.model.orderbook.data import OrderBookSnapshot + + +################################################################################ +# HTTP responses +################################################################################ + + +class BinanceTime(msgspec.Struct, frozen=True): + """ + Schema of current server time + GET response of `time` + """ + + serverTime: int + + +class BinanceExchangeFilter(msgspec.Struct): + """ + Schema of an exchange filter, within response of GET `exchangeInfo.` + """ + + filterType: BinanceExchangeFilterType + maxNumOrders: Optional[int] = None + maxNumAlgoOrders: Optional[int] = None + + +class BinanceRateLimit(msgspec.Struct): + """ + Schema of rate limit info, within response of GET `exchangeInfo.` + """ + + rateLimitType: BinanceRateLimitType + interval: BinanceRateLimitInterval + intervalNum: int + limit: int + count: Optional[int] = None # SPOT/MARGIN rateLimit/order response only + + +class BinanceSymbolFilter(msgspec.Struct): + """ + Schema of a symbol filter, within response of GET `exchangeInfo.` + """ + + filterType: BinanceSymbolFilterType + minPrice: Optional[str] = None + maxPrice: Optional[str] = None + tickSize: Optional[str] = None + multiplierUp: Optional[str] = None + multiplierDown: Optional[str] = None + multiplierDecimal: Optional[str] = None + avgPriceMins: Optional[int] = None + minQty: Optional[str] = None + maxQty: Optional[str] = None + stepSize: Optional[str] = None + limit: Optional[int] = None + maxNumOrders: Optional[int] = None + + notional: Optional[str] = None # SPOT/MARGIN & USD-M FUTURES only + minNotional: Optional[str] = None # SPOT/MARGIN & USD-M FUTURES only + maxNumAlgoOrders: Optional[int] = None # SPOT/MARGIN & USD-M FUTURES only + + bidMultiplierUp: Optional[str] = None # SPOT/MARGIN only + bidMultiplierDown: Optional[str] = None # SPOT/MARGIN only + askMultiplierUp: Optional[str] = None # SPOT/MARGIN only + askMultiplierDown: Optional[str] = None # SPOT/MARGIN only + applyMinToMarket: Optional[bool] = None # SPOT/MARGIN only + maxNotional: Optional[str] = None # SPOT/MARGIN only + applyMaxToMarket: Optional[bool] = None # SPOT/MARGIN only + maxNumIcebergOrders: Optional[int] = None # SPOT/MARGIN only + maxPosition: Optional[str] = None # SPOT/MARGIN only + minTrailingAboveDelta: Optional[int] = None # SPOT/MARGIN only + maxTrailingAboveDelta: Optional[int] = None # SPOT/MARGIN only + minTrailingBelowDelta: Optional[int] = None # SPOT/MARGIN only + maxTrailingBelowDelta: Optional[int] = None # SPOT/MARGIN only + + +class BinanceDepth(msgspec.Struct, frozen=True): + """ + Schema of a binance orderbook depth. + + GET response of `depth`. + """ + + lastUpdateId: int + bids: list[tuple[str, str]] + asks: list[tuple[str, str]] + + symbol: Optional[str] = None # COIN-M FUTURES only + pair: Optional[str] = None # COIN-M FUTURES only + + E: Optional[int] = None # FUTURES only, Message output time + T: Optional[int] = None # FUTURES only, Transaction time + + def parse_to_order_book_snapshot( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> OrderBookSnapshot: + return OrderBookSnapshot( + instrument_id=instrument_id, + book_type=BookType.L2_MBP, + bids=[[float(o[0]), float(o[1])] for o in self.bids or []], + asks=[[float(o[0]), float(o[1])] for o in self.asks or []], + ts_event=ts_init, + ts_init=ts_init, + sequence=self.lastUpdateId or 0, + ) + + +class BinanceTrade(msgspec.Struct, frozen=True): + """Schema of a single trade.""" + + id: int + price: str + qty: str + quoteQty: str + time: int + isBuyerMaker: bool + isBestMatch: Optional[bool] = None # SPOT/MARGIN only + + def parse_to_trade_tick( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> TradeTick: + """Parse Binance trade to internal TradeTick.""" + return TradeTick( + instrument_id=instrument_id, + price=Price.from_str(self.price), + size=Quantity.from_str(self.qty), + aggressor_side=AggressorSide.SELLER if self.isBuyerMaker else AggressorSide.BUYER, + trade_id=TradeId(str(self.id)), + ts_event=millis_to_nanos(self.time), + ts_init=ts_init, + ) + + +class BinanceAggTrade(msgspec.Struct, frozen=True): + """Schema of a single compressed aggregate trade.""" + + a: int # Aggregate tradeId + p: str # Price + q: str # Quantity + f: int # First tradeId + l: int # Last tradeId + T: int # Timestamp + m: bool # Was the buyer the maker? + M: Optional[bool] = None # SPOT/MARGIN only, was the trade the best price match? + + def parse_to_trade_tick( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> TradeTick: + """Parse Binance trade to internal TradeTick""" + return TradeTick( + instrument_id=instrument_id, + price=Price.from_str(self.p), + size=Quantity.from_str(self.q), + aggressor_side=AggressorSide.SELLER if self.m else AggressorSide.BUYER, + trade_id=TradeId(str(self.a)), + ts_event=millis_to_nanos(self.T), + ts_init=ts_init, + ) + + +class BinanceKline(msgspec.Struct, array_like=True): + """Array-like schema of single Binance kline.""" + + open_time: int + open: str + high: str + low: str + close: str + volume: str + close_time: int + asset_volume: str + trades_count: int + taker_base_volume: str + taker_quote_volume: str + ignore: str + + def parse_to_binance_bar( + self, + bar_type: BarType, + ts_init: int, + ) -> BinanceBar: + """Parse kline to BinanceBar.""" + return BinanceBar( + bar_type=bar_type, + open=Price.from_str(self.open), + high=Price.from_str(self.high), + low=Price.from_str(self.low), + close=Price.from_str(self.close), + volume=Quantity.from_str(self.volume), + quote_volume=Decimal(self.asset_volume), + count=self.trades_count, + taker_buy_base_volume=Decimal(self.taker_base_volume), + taker_buy_quote_volume=Decimal(self.taker_quote_volume), + ts_event=millis_to_nanos(self.open_time), + ts_init=ts_init, + ) + + +class BinanceTicker24hr(msgspec.Struct, frozen=True): + """Schema of single Binance 24hr ticker (FULL/MINI).""" + + symbol: Optional[str] + lastPrice: Optional[str] + openPrice: Optional[str] + highPrice: Optional[str] + lowPrice: Optional[str] + volume: Optional[str] + openTime: Optional[int] + closeTime: Optional[int] + firstId: Optional[int] + lastId: Optional[int] + count: Optional[int] + + priceChange: Optional[str] = None # FULL response only (SPOT/MARGIN) + priceChangePercent: Optional[str] = None # FULL response only (SPOT/MARGIN) + weightedAvgPrice: Optional[str] = None # FULL response only (SPOT/MARGIN) + lastQty: Optional[str] = None # FULL response only (SPOT/MARGIN) + + prevClosePrice: Optional[str] = None # SPOT/MARGIN only + bidPrice: Optional[str] = None # SPOT/MARGIN only + bidQty: Optional[str] = None # SPOT/MARGIN only + askPrice: Optional[str] = None # SPOT/MARGIN only + askQty: Optional[str] = None # SPOT/MARGIN only + + pair: Optional[str] = None # COIN-M FUTURES only + baseVolume: Optional[str] = None # COIN-M FUTURES only + + quoteVolume: Optional[str] = None # SPOT/MARGIN & USD-M FUTURES only + + +class BinanceTickerPrice(msgspec.Struct, frozen=True): + """Schema of single Binance Price Ticker.""" + + symbol: Optional[str] + price: Optional[str] + time: Optional[int] = None # FUTURES only + ps: Optional[str] = None # COIN-M FUTURES only, pair + + +class BinanceTickerBook(msgspec.Struct, frozen=True): + """Schema of a single Binance Order Book Ticker.""" + + symbol: Optional[str] + bidPrice: Optional[str] + bidQty: Optional[str] + askPrice: Optional[str] + askQty: Optional[str] + pair: Optional[str] = None # USD-M FUTURES only + time: Optional[int] = None # FUTURES only, transaction time + + +################################################################################ +# WebSocket messages +################################################################################ + + +class BinanceDataMsgWrapper(msgspec.Struct): + """ + Provides a wrapper for data WebSocket messages from `Binance`. + """ + + stream: str + + +class BinanceOrderBookDelta(msgspec.Struct, array_like=True): + """Schema of single ask/bid delta.""" + + price: str + size: str + + def parse_to_order_book_delta( + self, + instrument_id: InstrumentId, + side: OrderSide, + ts_event: int, + ts_init: int, + update_id: int, + ) -> OrderBookDelta: + price = float(self.price) + size = float(self.size) + + order = BookOrder( + price=price, + size=size, + side=side, + ) + + return OrderBookDelta( + instrument_id=instrument_id, + book_type=BookType.L2_MBP, + action=BookAction.UPDATE if size > 0.0 else BookAction.DELETE, + order=order, + ts_event=ts_event, + ts_init=ts_init, + sequence=update_id, + ) + + +class BinanceOrderBookData(msgspec.Struct, frozen=True): + """WebSocket message 'inner struct' for `Binance` Partial & Diff. Book Depth Streams.""" + + e: str # Event type + E: int # Event time + s: str # Symbol + U: int # First update ID in event + u: int # Final update ID in event + b: list[BinanceOrderBookDelta] # Bids to be updated + a: list[BinanceOrderBookDelta] # Asks to be updated + + T: Optional[int] = None # FUTURES only, transaction time + pu: Optional[int] = None # FUTURES only, previous final update ID + + ps: Optional[str] = None # COIN-M FUTURES only, pair + + def parse_to_order_book_deltas( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> OrderBookDeltas: + ts_event: int = millis_to_nanos(self.T) if self.T is not None else millis_to_nanos(self.E) + + bid_deltas: list[OrderBookDelta] = [ + delta.parse_to_order_book_delta(instrument_id, OrderSide.BUY, ts_event, ts_init, self.u) + for delta in self.b + ] + ask_deltas: list[OrderBookDelta] = [ + delta.parse_to_order_book_delta( + instrument_id, + OrderSide.SELL, + ts_event, + ts_init, + self.u, + ) + for delta in self.a + ] + + return OrderBookDeltas( + instrument_id=instrument_id, + book_type=BookType.L2_MBP, + deltas=bid_deltas + ask_deltas, + ts_event=ts_event, + ts_init=ts_init, + sequence=self.u, + ) + + def parse_to_order_book_snapshot( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> OrderBookSnapshot: + return OrderBookSnapshot( + instrument_id=instrument_id, + book_type=BookType.L2_MBP, + bids=[[float(o.price), float(o.size)] for o in self.b], + asks=[[float(o.price), float(o.size)] for o in self.a], + ts_event=millis_to_nanos(self.T), + ts_init=ts_init, + sequence=self.u, + ) + + +class BinanceOrderBookMsg(msgspec.Struct, frozen=True): + """WebSocket message from `Binance` Partial & Diff. Book Depth Streams.""" + + stream: str + data: BinanceOrderBookData + + +class BinanceQuoteData(msgspec.Struct, frozen=True): + """WebSocket message from `Binance` Individual Symbol Book Ticker Streams.""" + + s: str # symbol + u: int # order book updateId + b: str # best bid price + B: str # best bid qty + a: str # best ask price + A: str # best ask qty + + def parse_to_quote_tick( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> QuoteTick: + return QuoteTick( + instrument_id=instrument_id, + bid=Price.from_str(self.b), + ask=Price.from_str(self.a), + bid_size=Quantity.from_str(self.B), + ask_size=Quantity.from_str(self.A), + ts_event=ts_init, + ts_init=ts_init, + ) + + +class BinanceQuoteMsg(msgspec.Struct, frozen=True): + """WebSocket message from `Binance` Individual Symbol Book Ticker Streams.""" + + stream: str + data: BinanceQuoteData + + +class BinanceAggregatedTradeData(msgspec.Struct, frozen=True): + """WebSocket message from `Binance` Aggregate Trade Streams.""" + + e: str # Event type + E: int # Event time + s: str # Symbol + a: int # Aggregate trade ID + p: str # Price + q: str # Quantity + f: int # First trade ID + l: int # Last trade ID + T: int # Trade time + m: bool # Is the buyer the market maker? + + def parse_to_trade_tick( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> TradeTick: + return TradeTick( + instrument_id=instrument_id, + price=Price.from_str(self.p), + size=Quantity.from_str(self.q), + aggressor_side=AggressorSide.SELLER if self.m else AggressorSide.BUYER, + trade_id=TradeId(str(self.a)), + ts_event=millis_to_nanos(self.T), + ts_init=ts_init, + ) + + +class BinanceAggregatedTradeMsg(msgspec.Struct, frozen=True): + """WebSocket message.""" + + stream: str + data: BinanceAggregatedTradeData + + +class BinanceTickerData(msgspec.Struct, kw_only=True, frozen=True): + """ + WebSocket message from `Binance` 24hr Ticker + + Fields + ------ + - e: Event type + - E: Event time + - s: Symbol + - p: Price change + - P: Price change percent + - w: Weighted average price + - x: Previous close price + - c: Last price + - Q: Last quantity + - b: Best bid price + - B: Best bid quantity + - a: Best ask price + - A: Best ask quantity + - o: Open price + - h: High price + - l: Low price + - v: Total traded base asset volume + - q: Total traded quote asset volume + - O: Statistics open time + - C: Statistics close time + - F: First trade ID + - L: Last trade ID + - n: Total number of trades + """ + + e: str # Event type + E: int # Event time + s: str # Symbol + p: str # Price change + P: str # Price change percent + w: str # Weighted average price + x: Optional[str] = None # First trade(F)-1 price (first trade before the 24hr rolling window) + c: str # Last price + Q: str # Last quantity + b: Optional[str] = None # Best bid price + B: Optional[str] = None # Best bid quantity + a: Optional[str] = None # Best ask price + A: Optional[str] = None # Best ask quantity + o: str # Open price + h: str # High price + l: str # Low price + v: str # Total traded base asset volume + q: str # Total traded quote asset volume + O: int # Statistics open time + C: int # Statistics close time + F: int # First trade ID + L: int # Last trade ID + n: int # Total number of trades + + def parse_to_binance_ticker( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> BinanceTicker: + return BinanceTicker( + instrument_id=instrument_id, + price_change=Decimal(self.p), + price_change_percent=Decimal(self.P), + weighted_avg_price=Decimal(self.w), + prev_close_price=Decimal(self.x) if self.x is not None else None, + last_price=Decimal(self.c), + last_qty=Decimal(self.Q), + bid_price=Decimal(self.b) if self.b is not None else None, + bid_qty=Decimal(self.B) if self.B is not None else None, + ask_price=Decimal(self.a) if self.a is not None else None, + ask_qty=Decimal(self.A) if self.A is not None else None, + open_price=Decimal(self.o), + high_price=Decimal(self.h), + low_price=Decimal(self.l), + volume=Decimal(self.v), + quote_volume=Decimal(self.q), + open_time_ms=self.O, + close_time_ms=self.C, + first_id=self.F, + last_id=self.L, + count=self.n, + ts_event=millis_to_nanos(self.E), + ts_init=ts_init, + ) + + +class BinanceTickerMsg(msgspec.Struct, frozen=True): + """WebSocket message.""" + + stream: str + data: BinanceTickerData + + +class BinanceCandlestick(msgspec.Struct, frozen=True): + """ + WebSocket message 'inner struct' for `Binance` Kline/Candlestick Streams. + + Fields + ------ + - t: Kline start time + - T: Kline close time + - s: Symbol + - i: Interval + - f: First trade ID + - L: Last trade ID + - o: Open price + - c: Close price + - h: High price + - l: Low price + - v: Base asset volume + - n: Number of trades + - x: Is this kline closed? + - q: Quote asset volume + - V: Taker buy base asset volume + - Q: Taker buy quote asset volume + - B: Ignore + """ + + t: int # Kline start time + T: int # Kline close time + s: str # Symbol + i: BinanceKlineInterval # Interval + f: int # First trade ID + L: int # Last trade ID + o: str # Open price + c: str # Close price + h: str # High price + l: str # Low price + v: str # Base asset volume + n: int # Number of trades + x: bool # Is this kline closed? + q: str # Quote asset volume + V: str # Taker buy base asset volume + Q: str # Taker buy quote asset volume + B: str # Ignore + + def parse_to_binance_bar( + self, + instrument_id: InstrumentId, + enum_parser: BinanceEnumParser, + ts_init: int, + ) -> BinanceBar: + bar_type = BarType( + instrument_id=instrument_id, + bar_spec=enum_parser.parse_binance_kline_interval_to_bar_spec(self.i), + aggregation_source=AggregationSource.EXTERNAL, + ) + return BinanceBar( + bar_type=bar_type, + open=Price.from_str(self.o), + high=Price.from_str(self.h), + low=Price.from_str(self.l), + close=Price.from_str(self.c), + volume=Quantity.from_str(self.v), + quote_volume=Decimal(self.q), + count=self.n, + taker_buy_base_volume=Decimal(self.V), + taker_buy_quote_volume=Decimal(self.Q), + ts_event=millis_to_nanos(self.T), + ts_init=ts_init, + ) + + +class BinanceCandlestickData(msgspec.Struct, frozen=True): + """WebSocket message 'inner struct'.""" + + e: str + E: int + s: str + k: BinanceCandlestick + + +class BinanceCandlestickMsg(msgspec.Struct, frozen=True): + """WebSocket message for `Binance` Kline/Candlestick Streams.""" + + stream: str + data: BinanceCandlestickData diff --git a/nautilus_trader/adapters/binance/common/schemas/symbol.py b/nautilus_trader/adapters/binance/common/schemas/symbol.py new file mode 100644 index 000000000000..8b6dceabc95f --- /dev/null +++ b/nautilus_trader/adapters/binance/common/schemas/symbol.py @@ -0,0 +1,60 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import json + +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType + + +################################################################################ +# HTTP responses +################################################################################ + + +class BinanceSymbol(str): + """Binance compatible symbol.""" + + def __new__(cls, symbol: str): + if symbol is not None: + # Format the string on construction to be Binance compatible + return super().__new__( + cls, + symbol.upper().replace(" ", "").replace("/", "").replace("-PERP", ""), + ) + + def parse_binance_to_internal(self, account_type: BinanceAccountType) -> str: + if account_type.is_spot_or_margin: + return str(self) + + # Parse Futures symbol + if self[-1].isdigit(): + return str(self) # Deliverable + if self.endswith("_PERP"): + return str(self).replace("_", "-") + else: + return str(self) + "-PERP" + + +class BinanceSymbols(str): + """Binance compatible list of symbols.""" + + def __new__(cls, symbols: list[str]): + if symbols is not None: + binance_symbols: list[BinanceSymbol] = [BinanceSymbol(symbol) for symbol in symbols] + return super().__new__(cls, json.dumps(binance_symbols).replace(" ", "")) + + def parse_str_to_list(self) -> list[BinanceSymbol]: + binance_symbols: list[BinanceSymbol] = json.loads(self) + return binance_symbols diff --git a/nautilus_trader/adapters/binance/http/enums.py b/nautilus_trader/adapters/binance/common/schemas/user.py similarity index 72% rename from nautilus_trader/adapters/binance/http/enums.py rename to nautilus_trader/adapters/binance/common/schemas/user.py index d17da2174597..d3887333b69d 100644 --- a/nautilus_trader/adapters/binance/http/enums.py +++ b/nautilus_trader/adapters/binance/common/schemas/user.py @@ -13,14 +13,15 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from enum import Enum +import msgspec -class NewOrderRespType(Enum): - """ - Represents a `Binance` newOrderRespType. - """ +################################################################################ +# HTTP responses +################################################################################ - ACK = "ACK" - RESULT = "RESULT" - FULL = "FULL" + +class BinanceListenKey(msgspec.Struct): + """HTTP response from creating a new `Binance` user listen key.""" + + listenKey: str diff --git a/nautilus_trader/adapters/binance/common/types.py b/nautilus_trader/adapters/binance/common/types.py index 0ab8d81eaed8..09cde0bcb6d0 100644 --- a/nautilus_trader/adapters/binance/common/types.py +++ b/nautilus_trader/adapters/binance/common/types.py @@ -108,7 +108,6 @@ def __getstate__(self): ) def __setstate__(self, state): - super().__setstate__(state[:14]) self.quote_volume = Decimal(state[14]) self.count = state[15] diff --git a/nautilus_trader/adapters/binance/config.py b/nautilus_trader/adapters/binance/config.py index 21ce25803673..4bfe1da4832c 100644 --- a/nautilus_trader/adapters/binance/config.py +++ b/nautilus_trader/adapters/binance/config.py @@ -44,6 +44,9 @@ class BinanceDataClientConfig(LiveDataClientConfig): If client is connecting to Binance US. testnet : bool, default False If the client is connecting to a Binance testnet. + use_agg_trade_ticks : bool, default False + Whether to use aggregated trade tick endpoints instead of raw trade ticks. + TradeId of ticks will be the Aggregate tradeId returned by Binance. """ api_key: Optional[str] = None @@ -53,6 +56,7 @@ class BinanceDataClientConfig(LiveDataClientConfig): base_url_ws: Optional[str] = None us: bool = False testnet: bool = False + use_agg_trade_ticks: bool = False class BinanceExecClientConfig(LiveExecClientConfig): @@ -79,7 +83,7 @@ class BinanceExecClientConfig(LiveExecClientConfig): If client is connecting to Binance US. testnet : bool, default False If the client is connecting to a Binance testnet. - clock_sync_interval_secs : int, default 900 (15 mins) + clock_sync_interval_secs : int, default 0 The interval (seconds) between syncing the Nautilus clock with the Binance server(s) clock. If zero, then will *not* perform syncing. warn_gtd_to_gtc : bool, default True @@ -93,5 +97,5 @@ class BinanceExecClientConfig(LiveExecClientConfig): base_url_ws: Optional[str] = None us: bool = False testnet: bool = False - clock_sync_interval_secs: int = 900 + clock_sync_interval_secs: int = 0 warn_gtd_to_gtc: bool = True diff --git a/nautilus_trader/adapters/binance/factories.py b/nautilus_trader/adapters/binance/factories.py index f738837e749d..9a81e5c5dd68 100644 --- a/nautilus_trader/adapters/binance/factories.py +++ b/nautilus_trader/adapters/binance/factories.py @@ -30,7 +30,6 @@ from nautilus_trader.adapters.binance.spot.providers import BinanceSpotInstrumentProvider from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger from nautilus_trader.common.logging import Logger from nautilus_trader.config import InstrumentProviderConfig from nautilus_trader.live.factories import LiveDataClientFactory @@ -38,7 +37,7 @@ from nautilus_trader.msgbus.bus import MessageBus -HTTP_CLIENTS: dict[str, BinanceHttpClient] = {} +BINANCE_HTTP_CLIENTS: dict[str, BinanceHttpClient] = {} def get_cached_binance_http_client( @@ -84,14 +83,14 @@ def get_cached_binance_http_client( BinanceHttpClient """ - global HTTP_CLIENTS + global BINANCE_HTTP_CLIENTS key = key or _get_api_key(account_type, is_testnet) secret = secret or _get_api_secret(account_type, is_testnet) default_http_base_url = _get_http_base_url(account_type, is_testnet, is_us) client_key: str = "|".join((key, secret)) - if client_key not in HTTP_CLIENTS: + if client_key not in BINANCE_HTTP_CLIENTS: client = BinanceHttpClient( loop=loop, clock=clock, @@ -100,14 +99,15 @@ def get_cached_binance_http_client( secret=secret, base_url=base_url or default_http_base_url, ) - HTTP_CLIENTS[client_key] = client - return HTTP_CLIENTS[client_key] + BINANCE_HTTP_CLIENTS[client_key] = client + return BINANCE_HTTP_CLIENTS[client_key] @lru_cache(1) def get_cached_binance_spot_instrument_provider( client: BinanceHttpClient, logger: Logger, + clock: LiveClock, account_type: BinanceAccountType, config: InstrumentProviderConfig, ) -> BinanceSpotInstrumentProvider: @@ -122,6 +122,8 @@ def get_cached_binance_spot_instrument_provider( The client for the instrument provider. logger : Logger The logger for the instrument provider. + clock : LiveClock + The clock for the instrument provider. account_type : BinanceAccountType The Binance account type for the instrument provider. config : InstrumentProviderConfig @@ -135,6 +137,7 @@ def get_cached_binance_spot_instrument_provider( return BinanceSpotInstrumentProvider( client=client, logger=logger, + clock=clock, account_type=account_type, config=config, ) @@ -144,6 +147,7 @@ def get_cached_binance_spot_instrument_provider( def get_cached_binance_futures_instrument_provider( client: BinanceHttpClient, logger: Logger, + clock: LiveClock, account_type: BinanceAccountType, config: InstrumentProviderConfig, ) -> BinanceFuturesInstrumentProvider: @@ -158,6 +162,8 @@ def get_cached_binance_futures_instrument_provider( The client for the instrument provider. logger : Logger The logger for the instrument provider. + clock : LiveClock + The clock for the instrument provider. account_type : BinanceAccountType The Binance account type for the instrument provider. config : InstrumentProviderConfig @@ -171,6 +177,7 @@ def get_cached_binance_futures_instrument_provider( return BinanceFuturesInstrumentProvider( client=client, logger=logger, + clock=clock, account_type=account_type, config=config, ) @@ -189,7 +196,7 @@ def create( # type: ignore msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ) -> Union[BinanceSpotDataClient, BinanceFuturesDataClient]: """ Create a new Binance data client. @@ -208,7 +215,7 @@ def create( # type: ignore The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns @@ -241,11 +248,12 @@ def create( # type: ignore ) provider: Union[BinanceSpotInstrumentProvider, BinanceFuturesInstrumentProvider] - if config.account_type.is_spot or config.account_type.is_margin: + if config.account_type.is_spot_or_margin: # Get instrument provider singleton provider = get_cached_binance_spot_instrument_provider( client=client, logger=logger, + clock=clock, account_type=config.account_type, config=config.instrument_provider, ) @@ -261,12 +269,14 @@ def create( # type: ignore instrument_provider=provider, account_type=config.account_type, base_url_ws=config.base_url_ws or default_base_url_ws, + use_agg_trade_ticks=config.use_agg_trade_ticks, ) else: # Get instrument provider singleton provider = get_cached_binance_futures_instrument_provider( client=client, logger=logger, + clock=clock, account_type=config.account_type, config=config.instrument_provider, ) @@ -282,6 +292,7 @@ def create( # type: ignore instrument_provider=provider, account_type=config.account_type, base_url_ws=config.base_url_ws or default_base_url_ws, + use_agg_trade_ticks=config.use_agg_trade_ticks, ) @@ -298,7 +309,7 @@ def create( # type: ignore msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ) -> Union[BinanceSpotExecutionClient, BinanceFuturesExecutionClient]: """ Create a new Binance execution client. @@ -317,7 +328,7 @@ def create( # type: ignore The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns @@ -355,6 +366,7 @@ def create( # type: ignore provider = get_cached_binance_spot_instrument_provider( client=client, logger=logger, + clock=clock, account_type=config.account_type, config=config.instrument_provider, ) @@ -378,6 +390,7 @@ def create( # type: ignore provider = get_cached_binance_futures_instrument_provider( client=client, logger=logger, + clock=clock, account_type=config.account_type, config=config.instrument_provider, ) @@ -400,12 +413,12 @@ def create( # type: ignore def _get_api_key(account_type: BinanceAccountType, is_testnet: bool) -> str: if is_testnet: - if account_type.is_spot or account_type.is_margin: + if account_type.is_spot_or_margin: return os.environ["BINANCE_TESTNET_API_KEY"] else: return os.environ["BINANCE_FUTURES_TESTNET_API_KEY"] - if account_type.is_spot or account_type.is_margin: + if account_type.is_spot_or_margin: return os.environ["BINANCE_API_KEY"] else: return os.environ["BINANCE_FUTURES_API_KEY"] @@ -413,12 +426,12 @@ def _get_api_key(account_type: BinanceAccountType, is_testnet: bool) -> str: def _get_api_secret(account_type: BinanceAccountType, is_testnet: bool) -> str: if is_testnet: - if account_type.is_spot or account_type.is_margin: + if account_type.is_spot_or_margin: return os.environ["BINANCE_TESTNET_API_SECRET"] else: return os.environ["BINANCE_FUTURES_TESTNET_API_SECRET"] - if account_type.is_spot or account_type.is_margin: + if account_type.is_spot_or_margin: return os.environ["BINANCE_API_SECRET"] else: return os.environ["BINANCE_FUTURES_API_SECRET"] @@ -427,7 +440,7 @@ def _get_api_secret(account_type: BinanceAccountType, is_testnet: bool) -> str: def _get_http_base_url(account_type: BinanceAccountType, is_testnet: bool, is_us: bool) -> str: # Testnet base URLs if is_testnet: - if account_type in (BinanceAccountType.SPOT, BinanceAccountType.MARGIN): + if account_type.is_spot_or_margin: return "https://testnet.binance.vision" elif account_type == BinanceAccountType.FUTURES_USDT: return "https://testnet.binancefuture.com" @@ -440,9 +453,9 @@ def _get_http_base_url(account_type: BinanceAccountType, is_testnet: bool, is_us # Live base URLs top_level_domain: str = "us" if is_us else "com" - if account_type == BinanceAccountType.SPOT: + if account_type.is_spot: return f"https://api.binance.{top_level_domain}" - elif account_type == BinanceAccountType.MARGIN: + elif account_type.is_margin: return f"https://sapi.binance.{top_level_domain}" elif account_type == BinanceAccountType.FUTURES_USDT: return f"https://fapi.binance.{top_level_domain}" @@ -457,7 +470,7 @@ def _get_http_base_url(account_type: BinanceAccountType, is_testnet: bool, is_us def _get_ws_base_url(account_type: BinanceAccountType, is_testnet: bool, is_us: bool) -> str: # Testnet base URLs if is_testnet: - if account_type in (BinanceAccountType.SPOT, BinanceAccountType.MARGIN): + if account_type.is_spot_or_margin: return "wss://testnet.binance.vision" elif account_type == BinanceAccountType.FUTURES_USDT: return "wss://stream.binancefuture.com" @@ -470,7 +483,7 @@ def _get_ws_base_url(account_type: BinanceAccountType, is_testnet: bool, is_us: # Live base URLs top_level_domain: str = "us" if is_us else "com" - if account_type in (BinanceAccountType.SPOT, BinanceAccountType.MARGIN): + if account_type.is_spot_or_margin: return f"wss://stream.binance.{top_level_domain}:9443" elif account_type == BinanceAccountType.FUTURES_USDT: return f"wss://fstream.binance.{top_level_domain}" diff --git a/nautilus_trader/adapters/binance/futures/__init__.py b/nautilus_trader/adapters/binance/futures/__init__.py index e69de29bb2d1..ca16b56e4794 100644 --- a/nautilus_trader/adapters/binance/futures/__init__.py +++ b/nautilus_trader/adapters/binance/futures/__init__.py @@ -0,0 +1,14 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- diff --git a/nautilus_trader/adapters/binance/futures/data.py b/nautilus_trader/adapters/binance/futures/data.py index 29584e2c4022..47410005c0a6 100644 --- a/nautilus_trader/adapters/binance/futures/data.py +++ b/nautilus_trader/adapters/binance/futures/data.py @@ -14,67 +14,33 @@ # ------------------------------------------------------------------------------------------------- import asyncio -from typing import Any, Optional +from typing import Optional import msgspec -import pandas as pd -from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE +from nautilus_trader.adapters.binance.common.data import BinanceCommonDataClient from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.functions import parse_symbol -from nautilus_trader.adapters.binance.common.parsing.data import parse_bar_http -from nautilus_trader.adapters.binance.common.parsing.data import parse_bar_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_diff_depth_stream_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_quote_tick_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_ticker_24hr_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_trade_tick_http -from nautilus_trader.adapters.binance.common.schemas import BinanceCandlestickMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceDataMsgWrapper -from nautilus_trader.adapters.binance.common.schemas import BinanceOrderBookMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceQuoteMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceTickerMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceTrade -from nautilus_trader.adapters.binance.common.types import BinanceBar -from nautilus_trader.adapters.binance.common.types import BinanceTicker +from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesEnumParser from nautilus_trader.adapters.binance.futures.http.market import BinanceFuturesMarketHttpAPI -from nautilus_trader.adapters.binance.futures.http.user import BinanceFuturesUserDataHttpAPI -from nautilus_trader.adapters.binance.futures.parsing.data import parse_futures_book_snapshot -from nautilus_trader.adapters.binance.futures.parsing.data import parse_futures_mark_price_ws -from nautilus_trader.adapters.binance.futures.parsing.data import parse_futures_trade_tick_ws from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesMarkPriceMsg from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesTradeMsg from nautilus_trader.adapters.binance.futures.types import BinanceFuturesMarkPriceUpdate from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.enums import LogColor from nautilus_trader.common.logging import Logger from nautilus_trader.common.providers import InstrumentProvider -from nautilus_trader.core.asynchronous import sleep0 -from nautilus_trader.core.datetime import secs_to_millis -from nautilus_trader.core.uuid import UUID4 -from nautilus_trader.live.data_client import LiveMarketDataClient -from nautilus_trader.model.data.bar import BarType +from nautilus_trader.core.correctness import PyCondition from nautilus_trader.model.data.base import DataType from nautilus_trader.model.data.base import GenericData -from nautilus_trader.model.data.tick import QuoteTick from nautilus_trader.model.data.tick import TradeTick -from nautilus_trader.model.enums import BarAggregation -from nautilus_trader.model.enums import BookType -from nautilus_trader.model.enums import PriceType -from nautilus_trader.model.enums import bar_aggregation_to_str -from nautilus_trader.model.identifiers import ClientId from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import Symbol -from nautilus_trader.model.instruments.base import Instrument from nautilus_trader.model.orderbook.data import OrderBookData -from nautilus_trader.model.orderbook.data import OrderBookDeltas from nautilus_trader.model.orderbook.data import OrderBookSnapshot from nautilus_trader.msgbus.bus import MessageBus -class BinanceFuturesDataClient(LiveMarketDataClient): +class BinanceFuturesDataClient(BinanceCommonDataClient): """ Provides a data client for the `Binance Futures` exchange. @@ -98,6 +64,9 @@ class BinanceFuturesDataClient(LiveMarketDataClient): The account type for the client. base_url_ws : str, optional The base URL for the WebSocket client. + use_agg_trade_ticks : bool, default False + Whether to use aggregated trade tick endpoints instead of raw trade ticks. + TradeId of ticks will be the Aggregate tradeId returned by Binance. """ def __init__( @@ -111,102 +80,41 @@ def __init__( instrument_provider: InstrumentProvider, account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, base_url_ws: Optional[str] = None, + use_agg_trade_ticks: bool = False, ): - super().__init__( - loop=loop, - client_id=ClientId(BINANCE_VENUE.value), - venue=BINANCE_VENUE, - instrument_provider=instrument_provider, - msgbus=msgbus, - cache=cache, - clock=clock, - logger=logger, + PyCondition.true( + account_type.is_futures, + "account_type was not FUTURES_USDT or FUTURES_COIN", ) - assert account_type.is_futures, "account type is not for futures" - self._binance_account_type = account_type - self._log.info(f"Account type: {self._binance_account_type.value}.", LogColor.BLUE) - - self._update_instruments_interval: int = 60 * 60 # Once per hour (hardcode) - self._update_instruments_task: Optional[asyncio.Task] = None + # Futures HTTP API + self._futures_http_market = BinanceFuturesMarketHttpAPI(client, account_type) - # HTTP API - self._http_client = client - self._http_market = BinanceFuturesMarketHttpAPI(client=client, account_type=account_type) - self._http_user = BinanceFuturesUserDataHttpAPI(client=client, account_type=account_type) + # Futures enum parser + self._futures_enum_parser = BinanceFuturesEnumParser() - # Listen keys - self._ping_listen_keys_interval: int = 60 * 5 # Once every 5 mins (hardcode) - self._ping_listen_keys_task: Optional[asyncio.Task] = None - self._listen_key: Optional[str] = None - - # WebSocket API - self._ws_client = BinanceWebSocketClient( + # Instantiate common base class + super().__init__( loop=loop, + client=client, + market=self._futures_http_market, + enum_parser=self._futures_enum_parser, + msgbus=msgbus, + cache=cache, clock=clock, logger=logger, - handler=self._handle_ws_message, - base_url=base_url_ws, + instrument_provider=instrument_provider, + account_type=account_type, + base_url_ws=base_url_ws, + use_agg_trade_ticks=use_agg_trade_ticks, ) - # Hot caches - self._instrument_ids: dict[str, InstrumentId] = {} - self._book_buffer: dict[InstrumentId, list[OrderBookData]] = {} - - self._log.info(f"Base URL HTTP {self._http_client.base_url}.", LogColor.BLUE) - self._log.info(f"Base URL WebSocket {base_url_ws}.", LogColor.BLUE) - - async def _connect(self) -> None: - # Connect HTTP client - if not self._http_client.connected: - await self._http_client.connect() + # Register additional futures websocket handlers + self._ws_handlers["@markPrice"] = self._handle_mark_price - await self._instrument_provider.initialize() - - self._send_all_instruments_to_data_engine() - self._update_instruments_task = self.create_task(self._update_instruments()) - - # Connect WebSocket clients - self.create_task(self._connect_websockets()) - - async def _connect_websockets(self) -> None: - self._log.info("Awaiting subscriptions...") - await asyncio.sleep(4) - if self._ws_client.has_subscriptions: - await self._ws_client.connect() - - async def _update_instruments(self) -> None: - try: - while True: - self._log.debug( - f"Scheduled `update_instruments` to run in " - f"{self._update_instruments_interval}s.", - ) - await asyncio.sleep(self._update_instruments_interval) - await self._instrument_provider.load_all_async() - self._send_all_instruments_to_data_engine() - except asyncio.CancelledError: - self._log.debug("`update_instruments` task was canceled.") - - async def _disconnect(self) -> None: - # Cancel tasks - if self._update_instruments_task: - self._log.debug("Canceling `update_instruments` task...") - self._update_instruments_task.cancel() - self._update_instruments_task.done() - - if self._ping_listen_keys_task: - self._log.debug("Canceling `ping_listen_keys` task...") - self._ping_listen_keys_task.cancel() - self._ping_listen_keys_task.done() - - # Disconnect WebSocket client - if self._ws_client.is_connected: - await self._ws_client.disconnect() - - # Disconnect HTTP client - if self._http_client.connected: - await self._http_client.disconnect() + # Websocket msgspec decoders + self._decoder_futures_trade_msg = msgspec.json.Decoder(BinanceFuturesTradeMsg) + self._decoder_futures_mark_price_msg = msgspec.json.Decoder(BinanceFuturesMarkPriceMsg) # -- SUBSCRIPTIONS ---------------------------------------------------------------------------- @@ -231,148 +139,6 @@ async def _subscribe(self, data_type: DataType) -> None: f"Cannot subscribe to {data_type.type} (not implemented).", ) - async def _subscribe_instruments(self) -> None: - pass # Do nothing further - - async def _subscribe_instrument(self, instrument_id: InstrumentId) -> None: - pass # Do nothing further - - async def _subscribe_order_book_deltas( - self, - instrument_id: InstrumentId, - book_type: BookType, - depth: Optional[int] = None, - kwargs: Optional[dict] = None, - ) -> None: - await self._subscribe_order_book( - instrument_id=instrument_id, - book_type=book_type, - depth=depth, - ) - - async def _subscribe_order_book_snapshots( - self, - instrument_id: InstrumentId, - book_type: BookType, - depth: Optional[int] = None, - kwargs: Optional[dict] = None, - ) -> None: - await self._subscribe_order_book( - instrument_id=instrument_id, - book_type=book_type, - depth=depth, - ) - - async def _subscribe_order_book( - self, - instrument_id: InstrumentId, - book_type: BookType, - depth: Optional[int] = None, - ) -> None: - if book_type == BookType.L3_MBO: - self._log.error( - "Cannot subscribe to order book deltas: " - "L3_MBO data is not published by Binance. " - "Valid book types are L1_TBBO, L2_MBP.", - ) - return - - if depth is None or depth == 0: - depth = 20 - - # Add delta stream buffer - self._book_buffer[instrument_id] = [] - - if 0 < depth <= 20: - if depth not in (5, 10, 20): - self._log.error( - "Cannot subscribe to order book snapshots: " - f"invalid `depth`, was {depth}. " - "Valid depths are 5, 10 or 20.", - ) - return - self._ws_client.subscribe_partial_book_depth( - symbol=instrument_id.symbol.value, - depth=depth, - speed=0, - ) - else: - self._ws_client.subscribe_diff_book_depth( - symbol=instrument_id.symbol.value, - speed=0, - ) - - while not self._ws_client.is_connected: - await sleep0() - - data: dict[str, Any] = await self._http_market.depth( - symbol=instrument_id.symbol.value, - limit=depth, - ) - - ts_event: int = self._clock.timestamp_ns() - last_update_id: int = data.get("lastUpdateId", 0) - - snapshot = OrderBookSnapshot( - instrument_id=instrument_id, - book_type=BookType.L2_MBP, - bids=[[float(o[0]), float(o[1])] for o in data["bids"]], - asks=[[float(o[0]), float(o[1])] for o in data["asks"]], - ts_event=ts_event, - ts_init=ts_event, - sequence=last_update_id, - ) - - self._handle_data(snapshot) - - book_buffer = self._book_buffer.pop(instrument_id, []) - for deltas in book_buffer: - if deltas.sequence <= last_update_id: - continue - self._handle_data(deltas) - - async def _subscribe_ticker(self, instrument_id: InstrumentId) -> None: - self._ws_client.subscribe_ticker(instrument_id.symbol.value) - - async def _subscribe_quote_ticks(self, instrument_id: InstrumentId) -> None: - self._ws_client.subscribe_book_ticker(instrument_id.symbol.value) - - async def _subscribe_trade_ticks(self, instrument_id: InstrumentId) -> None: - self._ws_client.subscribe_trades(instrument_id.symbol.value) - - async def _subscribe_bars(self, bar_type: BarType) -> None: - if not bar_type.spec.is_time_aggregated(): - self._log.error( - f"Cannot subscribe to {bar_type}: only time bars are aggregated by Binance.", - ) - return - - if bar_type.spec.aggregation in (BarAggregation.MILLISECOND, BarAggregation.SECOND): - self._log.error( - f"Cannot subscribe to {bar_type}: " - f"{bar_aggregation_to_str(bar_type.spec.aggregation)} " - f"bars are not aggregated by Binance.", - ) - return - - if bar_type.spec.aggregation == BarAggregation.MINUTE: - resolution = "m" - elif bar_type.spec.aggregation == BarAggregation.HOUR: - resolution = "h" - elif bar_type.spec.aggregation == BarAggregation.DAY: - resolution = "d" - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BarAggregation`, " # pragma: no cover - f"was {bar_aggregation_to_str(bar_type.spec.aggregation)}", # pragma: no cover - ) - - self._ws_client.subscribe_bars( - symbol=bar_type.instrument_id.symbol.value, - interval=f"{bar_type.spec.step}{resolution}", - ) - self._add_subscription_bars(bar_type) - async def _unsubscribe(self, data_type: DataType) -> None: if data_type.type == BinanceFuturesMarkPriceUpdate: if not self._binance_account_type.is_futures: @@ -392,240 +158,15 @@ async def _unsubscribe(self, data_type: DataType) -> None: f"Cannot unsubscribe from {data_type.type} (not implemented).", ) - async def _unsubscribe_instruments(self) -> None: - pass # Do nothing further - - async def _unsubscribe_instrument(self, instrument_id: InstrumentId) -> None: - pass # Do nothing further - - async def _unsubscribe_order_book_deltas(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_order_book_snapshots(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_ticker(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_quote_ticks(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_trade_ticks(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_bars(self, bar_type: BarType) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - # -- REQUESTS --------------------------------------------------------------------------------- - - async def _request_instrument(self, instrument_id: InstrumentId, correlation_id: UUID4) -> None: - instrument: Optional[Instrument] = self._instrument_provider.find(instrument_id) - if instrument is None: - self._log.error(f"Cannot find instrument for {instrument_id}.") - return - - data_type = DataType( - type=Instrument, - metadata={"instrument_id": instrument_id}, - ) - - self._handle_data_response( - data_type=data_type, - data=[instrument], # Data engine handles lists of instruments - correlation_id=correlation_id, - ) - - async def _request_quote_ticks( - self, - instrument_id: InstrumentId, # noqa - limit: int, # noqa - correlation_id: UUID4, # noqa - from_datetime: Optional[pd.Timestamp] = None, # noqa - to_datetime: Optional[pd.Timestamp] = None, # noqa - ) -> None: - self._log.error( - "Cannot request historical quote ticks: not published by Binance.", - ) - - async def _request_trade_ticks( - self, - instrument_id: InstrumentId, - limit: int, - correlation_id: UUID4, - from_datetime: Optional[pd.Timestamp] = None, - to_datetime: Optional[pd.Timestamp] = None, - ) -> None: - if limit == 0 or limit > 1000: - limit = 1000 - - if from_datetime is not None or to_datetime is not None: - self._log.warning( - "Trade ticks have been requested with a from/to time range, " - f"however the request will be for the most recent {limit}.", - ) - - response: list[BinanceTrade] = await self._http_market.trades( - instrument_id.symbol.value, - limit, - ) - - ticks: list[TradeTick] = [ - parse_trade_tick_http( - trade=trade, - instrument_id=instrument_id, - ts_init=self._clock.timestamp_ns(), - ) - for trade in response - ] - - self._handle_trade_ticks(instrument_id, ticks, correlation_id) - - async def _request_bars( # noqa (too complex) - self, - bar_type: BarType, - limit: int, - correlation_id: UUID4, - from_datetime: Optional[pd.Timestamp] = None, - to_datetime: Optional[pd.Timestamp] = None, - ) -> None: - if bar_type.is_internally_aggregated(): - self._log.error( - f"Cannot request {bar_type}: " - f"only historical bars with EXTERNAL aggregation available from Binance.", - ) - return - - if not bar_type.spec.is_time_aggregated(): - self._log.error( - f"Cannot request {bar_type}: only time bars are aggregated by Binance.", - ) - return - - if bar_type.spec.aggregation in (BarAggregation.MILLISECOND, BarAggregation.SECOND): - self._log.error( - f"Cannot request {bar_type}: " - f"{bar_aggregation_to_str(bar_type.spec.aggregation)} " - f"bars are not aggregated by Binance.", - ) - return - - if bar_type.spec.price_type != PriceType.LAST: - self._log.error( - f"Cannot request {bar_type}: " - f"only historical bars for LAST price type available from Binance.", - ) - return - - if limit == 0 or limit > 1000: - limit = 1000 - - if bar_type.spec.aggregation == BarAggregation.MINUTE: - resolution = "m" - elif bar_type.spec.aggregation == BarAggregation.HOUR: - resolution = "h" - elif bar_type.spec.aggregation == BarAggregation.DAY: - resolution = "d" - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BarAggregation`, " # pragma: no cover - f"was {bar_aggregation_to_str(bar_type.spec.aggregation)}", # pragma: no cover - ) - - start_time_ms = None - if from_datetime is not None: - start_time_ms = secs_to_millis(from_datetime.timestamp()) - - end_time_ms = None - if to_datetime is not None: - end_time_ms = secs_to_millis(to_datetime.timestamp()) + # -- WEBSOCKET HANDLERS --------------------------------------------------------------------------------- - data: list[list[Any]] = await self._http_market.klines( - symbol=bar_type.instrument_id.symbol.value, - interval=f"{bar_type.spec.step}{resolution}", - start_time_ms=start_time_ms, - end_time_ms=end_time_ms, - limit=limit, - ) - - bars: list[BinanceBar] = [ - parse_bar_http( - bar_type, - values=b, - ts_init=self._clock.timestamp_ns(), - ) - for b in data - ] - partial: BinanceBar = bars.pop() - - self._handle_bars(bar_type, bars, partial, correlation_id) - - def _send_all_instruments_to_data_engine(self) -> None: - for instrument in self._instrument_provider.get_all().values(): - self._handle_data(instrument) - - for currency in self._instrument_provider.currencies().values(): - self._cache.add_currency(currency) - - def _get_cached_instrument_id(self, symbol: str) -> InstrumentId: - # Parse instrument ID - nautilus_symbol: str = parse_symbol(symbol, account_type=self._binance_account_type) - instrument_id: Optional[InstrumentId] = self._instrument_ids.get(nautilus_symbol) - if not instrument_id: - instrument_id = InstrumentId(Symbol(nautilus_symbol), BINANCE_VENUE) - self._instrument_ids[nautilus_symbol] = instrument_id - return instrument_id - - def _handle_ws_message(self, raw: bytes) -> None: - # TODO(cs): Uncomment for development - # self._log.info(str(raw), LogColor.CYAN) - - wrapper = msgspec.json.decode(raw, type=BinanceDataMsgWrapper) - - try: - if "@depth@" in wrapper.stream: - self._handle_book_diff_update(raw) - elif "@depth" in wrapper.stream: - self._handle_book_update(raw) - elif "@bookTicker" in wrapper.stream: - self._handle_book_ticker(raw) - elif "@trade" in wrapper.stream: - self._handle_trade(raw) - elif "@ticker" in wrapper.stream: - self._handle_ticker(raw) - elif "@kline" in wrapper.stream: - self._handle_kline(raw) - elif "@markPrice" in wrapper.stream: - self._handle_mark_price(raw) - else: - self._log.error( - f"Unrecognized websocket message type " f"{msgspec.json.decode(raw)['stream']}", - ) - except (TypeError, ValueError) as e: - self._log.error(f"Error handling websocket message, {e}") - - def _handle_book_diff_update(self, raw: bytes) -> None: - msg: BinanceOrderBookMsg = msgspec.json.decode(raw, type=BinanceOrderBookMsg) - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - book_deltas: OrderBookDeltas = parse_diff_depth_stream_ws( - instrument_id=instrument_id, - data=msg.data, - ts_init=self._clock.timestamp_ns(), - ) - book_buffer: Optional[list[OrderBookData]] = self._book_buffer.get(instrument_id) - if book_buffer is not None: - book_buffer.append(book_deltas) - else: - self._handle_data(book_deltas) - - def _handle_book_update(self, raw: bytes) -> None: - msg: BinanceOrderBookMsg = msgspec.json.decode(raw, type=BinanceOrderBookMsg) + def _handle_book_partial_update(self, raw: bytes) -> None: + msg = self._decoder_order_book_msg.decode(raw) instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - book_snapshot: OrderBookSnapshot = parse_futures_book_snapshot( + book_snapshot: OrderBookSnapshot = msg.data.parse_to_order_book_snapshot( instrument_id=instrument_id, - data=msg.data, ts_init=self._clock.timestamp_ns(), ) - # Check if book buffer active book_buffer: Optional[list[OrderBookData]] = self._book_buffer.get(instrument_id) if book_buffer is not None: @@ -633,55 +174,21 @@ def _handle_book_update(self, raw: bytes) -> None: else: self._handle_data(book_snapshot) - def _handle_book_ticker(self, raw: bytes) -> None: - msg: BinanceQuoteMsg = msgspec.json.decode(raw, type=BinanceQuoteMsg) - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - quote_tick: QuoteTick = parse_quote_tick_ws( - instrument_id=instrument_id, - data=msg.data, - ts_init=self._clock.timestamp_ns(), - ) - self._handle_data(quote_tick) - def _handle_trade(self, raw: bytes) -> None: - msg: BinanceFuturesTradeMsg = msgspec.json.decode(raw, type=BinanceFuturesTradeMsg) + # NOTE @trade is an undocumented endpoint for Futures exchanges + msg = self._decoder_futures_trade_msg.decode(raw) instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - trade_tick: TradeTick = parse_futures_trade_tick_ws( + trade_tick: TradeTick = msg.data.parse_to_trade_tick( instrument_id=instrument_id, - data=msg.data, ts_init=self._clock.timestamp_ns(), ) self._handle_data(trade_tick) - def _handle_ticker(self, raw: bytes) -> None: - msg: BinanceTickerMsg = msgspec.json.decode(raw, type=BinanceTickerMsg) - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - ticker: BinanceTicker = parse_ticker_24hr_ws( - instrument_id=instrument_id, - data=msg.data, - ts_init=self._clock.timestamp_ns(), - ) - self._handle_data(ticker) - - def _handle_kline(self, raw: bytes) -> None: - msg: BinanceCandlestickMsg = msgspec.json.decode(raw, type=BinanceCandlestickMsg) - if not msg.data.k.x: - return # Not closed yet - - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - bar: BinanceBar = parse_bar_ws( - instrument_id=instrument_id, - data=msg.data.k, - ts_init=self._clock.timestamp_ns(), - ) - self._handle_data(bar) - def _handle_mark_price(self, raw: bytes) -> None: - msg: BinanceFuturesMarkPriceMsg = msgspec.json.decode(raw, type=BinanceFuturesMarkPriceMsg) + msg = self._decoder_futures_mark_price_msg.decode(raw) instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - data: BinanceFuturesMarkPriceUpdate = parse_futures_mark_price_ws( + data = msg.data.parse_to_binance_futures_mark_price_update( instrument_id=instrument_id, - data=msg.data, ts_init=self._clock.timestamp_ns(), ) data_type = DataType( diff --git a/nautilus_trader/adapters/binance/futures/enums.py b/nautilus_trader/adapters/binance/futures/enums.py index 795957b02f81..efa15766a600 100644 --- a/nautilus_trader/adapters/binance/futures/enums.py +++ b/nautilus_trader/adapters/binance/futures/enums.py @@ -16,6 +16,14 @@ from enum import Enum from enum import unique +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.model.enums import OrderType +from nautilus_trader.model.enums import PositionSide +from nautilus_trader.model.enums import TimeInForce +from nautilus_trader.model.enums import TriggerType +from nautilus_trader.model.orders.base import Order + """ Defines `Binance` Futures specific enums. @@ -51,19 +59,6 @@ class BinanceFuturesContractStatus(Enum): CLOSE = "CLOSE" -@unique -class BinanceFuturesOrderType(Enum): - """Represents a `Binance Futures` price type.""" - - LIMIT = "LIMIT" - MARKET = "MARKET" - STOP = "STOP" - STOP_MARKET = "STOP_MARKET" - TAKE_PROFIT = "TAKE_PROFIT" - TAKE_PROFIT_MARKET = "TAKE_PROFIT_MARKET" - TRAILING_STOP_MARKET = "TRAILING_STOP_MARKET" - - @unique class BinanceFuturesPositionSide(Enum): """Represents a `Binance Futures` position side.""" @@ -73,16 +68,6 @@ class BinanceFuturesPositionSide(Enum): SHORT = "SHORT" -@unique -class BinanceFuturesTimeInForce(Enum): - """Represents a `Binance Futures` order time in force.""" - - GTC = "GTC" - IOC = "IOC" - FOK = "FOK" - GTX = "GTX" # Good Till Crossing (Post Only) - - @unique class BinanceFuturesWorkingType(Enum): """Represents a `Binance Futures` working type.""" @@ -129,3 +114,83 @@ class BinanceFuturesEventType(Enum): ACCOUNT_UPDATE = "ACCOUNT_UPDATE" ORDER_TRADE_UPDATE = "ORDER_TRADE_UPDATE" ACCOUNT_CONFIG_UPDATE = "ACCOUNT_CONFIG_UPDATE" + + +class BinanceFuturesEnumParser(BinanceEnumParser): + """ + Provides parsing methods for enums used by the 'Binance Futures' exchange. + """ + + def __init__(self) -> None: + super().__init__() + + self.futures_ext_to_int_order_type = { + BinanceOrderType.LIMIT: OrderType.LIMIT, + BinanceOrderType.MARKET: OrderType.MARKET, + BinanceOrderType.STOP: OrderType.STOP_LIMIT, + BinanceOrderType.STOP_MARKET: OrderType.STOP_MARKET, + BinanceOrderType.TAKE_PROFIT: OrderType.LIMIT_IF_TOUCHED, + BinanceOrderType.TAKE_PROFIT_MARKET: OrderType.MARKET_IF_TOUCHED, + BinanceOrderType.TRAILING_STOP_MARKET: OrderType.TRAILING_STOP_MARKET, + } + self.futures_int_to_ext_order_type = { + b: a for a, b in self.futures_ext_to_int_order_type.items() + } + + self.futures_ext_to_int_position_side = { + BinanceFuturesPositionSide.BOTH: PositionSide.FLAT, + BinanceFuturesPositionSide.LONG: PositionSide.LONG, + BinanceFuturesPositionSide.SHORT: PositionSide.SHORT, + } + + self.futures_valid_time_in_force = { + TimeInForce.GTC, + TimeInForce.GTD, # Will be transformed to GTC with warning + TimeInForce.FOK, + TimeInForce.IOC, + } + + self.futures_valid_order_types = { + OrderType.MARKET, + OrderType.LIMIT, + OrderType.STOP_MARKET, + OrderType.STOP_LIMIT, + OrderType.MARKET_IF_TOUCHED, + OrderType.LIMIT_IF_TOUCHED, + OrderType.TRAILING_STOP_MARKET, + } + + def parse_binance_order_type(self, order_type: BinanceOrderType) -> OrderType: + try: + return self.futures_ext_to_int_order_type[order_type] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized Binance Futures order type, was {order_type}", # pragma: no cover + ) + + def parse_internal_order_type(self, order: Order) -> BinanceOrderType: + try: + return self.futures_int_to_ext_order_type[order.order_type] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized or unsupported internal order type, was {order.order_type}", # pragma: no cover + ) + + def parse_binance_trigger_type(self, trigger_type: str) -> TriggerType: + if trigger_type == BinanceFuturesWorkingType.CONTRACT_PRICE: + return TriggerType.LAST_TRADE + elif trigger_type == BinanceFuturesWorkingType.MARK_PRICE: + return TriggerType.MARK_PRICE + else: + return None + + def parse_futures_position_side( + self, + position_side: BinanceFuturesPositionSide, + ) -> PositionSide: + try: + return self.futures_ext_to_int_position_side[position_side] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized binance futures position side, was {position_side}", # pragma: no cover + ) diff --git a/nautilus_trader/adapters/binance/futures/execution.py b/nautilus_trader/adapters/binance/futures/execution.py index 166a279c5516..d14223d480f1 100644 --- a/nautilus_trader/adapters/binance/futures/execution.py +++ b/nautilus_trader/adapters/binance/futures/execution.py @@ -18,101 +18,39 @@ from typing import Optional import msgspec -import pandas as pd from nautilus_trader.accounting.accounts.margin import MarginAccount -from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.enums import BinanceExecutionType -from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide -from nautilus_trader.adapters.binance.common.functions import format_symbol -from nautilus_trader.adapters.binance.common.functions import parse_symbol -from nautilus_trader.adapters.binance.common.schemas import BinanceListenKey +from nautilus_trader.adapters.binance.common.execution import BinanceCommonExecutionClient +from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesEnumParser from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesEventType -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesTimeInForce from nautilus_trader.adapters.binance.futures.http.account import BinanceFuturesAccountHttpAPI from nautilus_trader.adapters.binance.futures.http.market import BinanceFuturesMarketHttpAPI from nautilus_trader.adapters.binance.futures.http.user import BinanceFuturesUserDataHttpAPI -from nautilus_trader.adapters.binance.futures.parsing.account import parse_account_balances_http -from nautilus_trader.adapters.binance.futures.parsing.account import parse_account_balances_ws -from nautilus_trader.adapters.binance.futures.parsing.account import parse_account_margins_http -from nautilus_trader.adapters.binance.futures.parsing.execution import binance_order_type -from nautilus_trader.adapters.binance.futures.parsing.execution import parse_order_report_http -from nautilus_trader.adapters.binance.futures.parsing.execution import parse_order_type -from nautilus_trader.adapters.binance.futures.parsing.execution import parse_position_report_http -from nautilus_trader.adapters.binance.futures.parsing.execution import parse_time_in_force -from nautilus_trader.adapters.binance.futures.parsing.execution import parse_trade_report_http -from nautilus_trader.adapters.binance.futures.parsing.execution import parse_trigger_type from nautilus_trader.adapters.binance.futures.providers import BinanceFuturesInstrumentProvider -from nautilus_trader.adapters.binance.futures.rules import BINANCE_FUTURES_VALID_ORDER_TYPES -from nautilus_trader.adapters.binance.futures.rules import BINANCE_FUTURES_VALID_TIF from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesAccountInfo -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesAccountTrade -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesOrder from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesPositionRisk -from nautilus_trader.adapters.binance.futures.schemas.user import BinanceFuturesAccountUpdateMsg from nautilus_trader.adapters.binance.futures.schemas.user import BinanceFuturesAccountUpdateWrapper -from nautilus_trader.adapters.binance.futures.schemas.user import BinanceFuturesOrderData -from nautilus_trader.adapters.binance.futures.schemas.user import BinanceFuturesOrderUpdateMsg from nautilus_trader.adapters.binance.futures.schemas.user import BinanceFuturesOrderUpdateWrapper from nautilus_trader.adapters.binance.futures.schemas.user import BinanceFuturesUserMsgWrapper from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.adapters.binance.http.error import BinanceError -from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogColor from nautilus_trader.common.logging import Logger from nautilus_trader.core.correctness import PyCondition from nautilus_trader.core.datetime import millis_to_nanos -from nautilus_trader.core.datetime import secs_to_millis from nautilus_trader.core.uuid import UUID4 -from nautilus_trader.execution.messages import CancelAllOrders -from nautilus_trader.execution.messages import CancelOrder -from nautilus_trader.execution.messages import ModifyOrder -from nautilus_trader.execution.messages import SubmitOrder -from nautilus_trader.execution.messages import SubmitOrderList -from nautilus_trader.execution.reports import OrderStatusReport from nautilus_trader.execution.reports import PositionStatusReport -from nautilus_trader.execution.reports import TradeReport -from nautilus_trader.live.execution_client import LiveExecutionClient -from nautilus_trader.model.enums import AccountType -from nautilus_trader.model.enums import LiquiditySide -from nautilus_trader.model.enums import OmsType -from nautilus_trader.model.enums import OrderSide -from nautilus_trader.model.enums import OrderStatus from nautilus_trader.model.enums import OrderType -from nautilus_trader.model.enums import TimeInForce -from nautilus_trader.model.enums import TrailingOffsetType -from nautilus_trader.model.enums import TriggerType -from nautilus_trader.model.enums import order_side_to_str from nautilus_trader.model.enums import order_type_to_str from nautilus_trader.model.enums import time_in_force_to_str -from nautilus_trader.model.enums import trailing_offset_type_to_str -from nautilus_trader.model.enums import trigger_type_to_str -from nautilus_trader.model.identifiers import AccountId -from nautilus_trader.model.identifiers import ClientId -from nautilus_trader.model.identifiers import ClientOrderId from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import PositionId -from nautilus_trader.model.identifiers import StrategyId -from nautilus_trader.model.identifiers import Symbol -from nautilus_trader.model.identifiers import TradeId -from nautilus_trader.model.identifiers import VenueOrderId -from nautilus_trader.model.instruments.base import Instrument -from nautilus_trader.model.objects import Money -from nautilus_trader.model.objects import Price -from nautilus_trader.model.objects import Quantity from nautilus_trader.model.orders.base import Order -from nautilus_trader.model.orders.limit import LimitOrder -from nautilus_trader.model.orders.market import MarketOrder -from nautilus_trader.model.orders.stop_market import StopMarketOrder -from nautilus_trader.model.orders.trailing_stop_market import TrailingStopMarketOrder -from nautilus_trader.model.position import Position from nautilus_trader.msgbus.bus import MessageBus -class BinanceFuturesExecutionClient(LiveExecutionClient): +class BinanceFuturesExecutionClient(BinanceCommonExecutionClient): """ Provides an execution client for the `Binance Futures` exchange. @@ -136,7 +74,7 @@ class BinanceFuturesExecutionClient(LiveExecutionClient): The account type for the client. base_url_ws : str, optional The base URL for the WebSocket client. - clock_sync_interval_secs : int, default 900 + clock_sync_interval_secs : int, default 0 The interval (seconds) between syncing the Nautilus clock with the Binance server(s) clock. If zero, then will *not* perform syncing. warn_gtd_to_gtc : bool, default True @@ -154,111 +92,71 @@ def __init__( instrument_provider: BinanceFuturesInstrumentProvider, account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, base_url_ws: Optional[str] = None, - clock_sync_interval_secs: int = 900, + clock_sync_interval_secs: int = 0, warn_gtd_to_gtc: bool = True, ): - super().__init__( - loop=loop, - client_id=ClientId(BINANCE_VENUE.value), - venue=BINANCE_VENUE, - oms_type=OmsType.HEDGING, - instrument_provider=instrument_provider, - account_type=AccountType.MARGIN, - base_currency=None, - msgbus=msgbus, - cache=cache, - clock=clock, - logger=logger, + PyCondition.true( + account_type.is_futures, + "account_type was not FUTURES_USDT or FUTURES_COIN", ) - self._binance_account_type = account_type - self._log.info(f"Account type: {self._binance_account_type.value}.", LogColor.BLUE) - - self._set_account_id(AccountId(f"{BINANCE_VENUE.value}-futures-master")) - - # Settings - self._warn_gtd_to_gtc = warn_gtd_to_gtc - - # Clock sync - self._clock_sync_interval_secs = clock_sync_interval_secs - - # Tasks - self._task_clock_sync: Optional[asyncio.Task] = None - - # HTTP API - self._http_client = client - self._http_account = BinanceFuturesAccountHttpAPI(client=client, account_type=account_type) - self._http_market = BinanceFuturesMarketHttpAPI(client=client, account_type=account_type) - self._http_user = BinanceFuturesUserDataHttpAPI(client=client, account_type=account_type) + # Futures HTTP API + self._futures_http_account = BinanceFuturesAccountHttpAPI(client, clock, account_type) + self._futures_http_market = BinanceFuturesMarketHttpAPI(client, account_type) + self._futures_http_user = BinanceFuturesUserDataHttpAPI(client, account_type) - # Listen keys - self._ping_listen_keys_interval: int = 60 * 5 # Once every 5 mins (hardcode) - self._ping_listen_keys_task: Optional[asyncio.Task] = None - self._listen_key: Optional[str] = None + # Futures enum parser + self._futures_enum_parser = BinanceFuturesEnumParser() - # WebSocket API - self._ws_client = BinanceWebSocketClient( + # Instantiate common base class + super().__init__( loop=loop, + client=client, + account=self._futures_http_account, + market=self._futures_http_market, + user=self._futures_http_user, + enum_parser=self._futures_enum_parser, + msgbus=msgbus, + cache=cache, clock=clock, logger=logger, - handler=self._handle_user_ws_message, - base_url=base_url_ws, + instrument_provider=instrument_provider, + account_type=account_type, + base_url_ws=base_url_ws, + clock_sync_interval_secs=clock_sync_interval_secs, + warn_gtd_to_gtc=warn_gtd_to_gtc, ) - # Hot caches - self._instrument_ids: dict[str, InstrumentId] = {} - - self._log.info(f"Base URL HTTP {self._http_client.base_url}.", LogColor.BLUE) - self._log.info(f"Base URL WebSocket {base_url_ws}.", LogColor.BLUE) - - async def _connect(self) -> None: - # Connect HTTP client - if not self._http_client.connected: - await self._http_client.connect() - - await self._instrument_provider.initialize() - - # Authenticate API key and update account(s) - account_info: BinanceFuturesAccountInfo = await self._http_account.account(recv_window=5000) - self._authenticate_api_key(account_info=account_info) - - binance_positions: list[BinanceFuturesPositionRisk] - binance_positions = await self._http_account.get_position_risk() - await self._update_account_state( - account_info=account_info, - position_risks=binance_positions, + # Register additional futures websocket user data event handlers + self._futures_user_ws_handlers = { + BinanceFuturesEventType.ACCOUNT_UPDATE: self._handle_account_update, + BinanceFuturesEventType.ORDER_TRADE_UPDATE: self._handle_order_trade_update, + BinanceFuturesEventType.MARGIN_CALL: self._handle_margin_call, + BinanceFuturesEventType.ACCOUNT_CONFIG_UPDATE: self._handle_account_config_update, + BinanceFuturesEventType.LISTEN_KEY_EXPIRED: self._handle_listen_key_expired, + } + + # Websocket futures schema decoders + self._decoder_futures_user_msg_wrapper = msgspec.json.Decoder(BinanceFuturesUserMsgWrapper) + self._decoder_futures_order_update_wrapper = msgspec.json.Decoder( + BinanceFuturesOrderUpdateWrapper, + ) + self._decoder_futures_account_update_wrapper = msgspec.json.Decoder( + BinanceFuturesAccountUpdateWrapper, ) - # Get listen keys - msg: BinanceListenKey = await self._http_user.create_listen_key() - - self._listen_key = msg.listenKey - self._log.info(f"Listen key {self._listen_key}") - self._ping_listen_keys_task = self.create_task(self._ping_listen_keys()) - - # Setup clock sync - if self._clock_sync_interval_secs > 0: - self._task_clock_sync = self.create_task(self._sync_clock_with_binance_server()) - - # Connect WebSocket client - self._ws_client.subscribe(key=self._listen_key) - await self._ws_client.connect() - - def _authenticate_api_key(self, account_info: BinanceFuturesAccountInfo) -> None: + async def _update_account_state(self) -> None: + account_info: BinanceFuturesAccountInfo = ( + await self._futures_http_account.query_futures_account_info(recv_window=str(5000)) + ) if account_info.canTrade: self._log.info("Binance API key authenticated.", LogColor.GREEN) self._log.info(f"API key {self._http_client.api_key} has trading permissions.") else: self._log.error("Binance API key does not have trading permissions.") - - async def _update_account_state( - self, - account_info: BinanceFuturesAccountInfo, - position_risks: list[BinanceFuturesPositionRisk], - ) -> None: self.generate_account_state( - balances=parse_account_balances_http(assets=account_info.assets), - margins=parse_account_margins_http(assets=account_info.assets), + balances=account_info.parse_to_account_balances(), + margins=account_info.parse_to_margin_balances(), reported=True, ts_event=millis_to_nanos(account_info.updateTime), ) @@ -266,323 +164,72 @@ async def _update_account_state( await asyncio.sleep(0.1) account: MarginAccount = self.get_account() - + position_risks = await self._futures_http_account.query_futures_position_risk() for position in position_risks: instrument_id: InstrumentId = self._get_cached_instrument_id(position.symbol) leverage = Decimal(position.leverage) account.set_leverage(instrument_id, leverage) self._log.debug(f"Set leverage {position.symbol} {leverage}X") - async def _ping_listen_keys(self) -> None: - try: - while True: - self._log.debug( - f"Scheduled `ping_listen_keys` to run in " - f"{self._ping_listen_keys_interval}s.", - ) - await asyncio.sleep(self._ping_listen_keys_interval) - if self._listen_key: - self._log.debug(f"Pinging WebSocket listen key {self._listen_key}...") - await self._http_user.ping_listen_key(self._listen_key) - except asyncio.CancelledError: - self._log.debug("`ping_listen_keys` task was canceled.") - - async def _sync_clock_with_binance_server(self) -> None: - try: - while True: - # self._log.debug( - # f"Syncing Nautilus clock with Binance server...", - # ) - response: dict[str, int] = await self._http_market.time() - server_time: int = response["serverTime"] - self._log.info(f"Binance server time {server_time} UNIX (ms).") - - nautilus_time = self._clock.timestamp_ms() - self._log.info(f"Nautilus clock time {nautilus_time} UNIX (ms).") - - # offset_ns = millis_to_nanos(nautilus_time - server_time) - # self._log.info(f"Setting Nautilus clock offset {offset_ns} (ns).") - # self._clock.set_offset(offset_ns) - - await asyncio.sleep(self._clock_sync_interval_secs) - except asyncio.CancelledError: - self._log.debug("`sync_clock_with_binance_server` task was canceled.") - - async def _disconnect(self) -> None: - # Cancel tasks - if self._ping_listen_keys_task: - self._log.debug("Canceling `ping_listen_keys` task...") - self._ping_listen_keys_task.cancel() - self._ping_listen_keys_task.done() - - if self._task_clock_sync: - self._log.debug("Canceling `task_clock_sync` task...") - self._task_clock_sync.cancel() - self._task_clock_sync.done() - - # Disconnect WebSocket clients - if self._ws_client.is_connected: - await self._ws_client.disconnect() - - # Disconnect HTTP client - if self._http_client.connected: - await self._http_client.disconnect() - # -- EXECUTION REPORTS ------------------------------------------------------------------------ - async def generate_order_status_report( - self, - instrument_id: InstrumentId, - client_order_id: Optional[ClientOrderId] = None, - venue_order_id: Optional[VenueOrderId] = None, - ) -> Optional[OrderStatusReport]: - PyCondition.false( - client_order_id is None and venue_order_id is None, - "both `client_order_id` and `venue_order_id` were `None`", - ) - - self._log.info( - f"Generating OrderStatusReport for " - f"{repr(client_order_id) if client_order_id else ''} " - f"{repr(venue_order_id) if venue_order_id else ''}...", - ) - - try: - binance_order: Optional[BinanceFuturesOrder] - if venue_order_id: - binance_order = await self._http_account.get_order( - symbol=instrument_id.symbol.value, - order_id=venue_order_id.value, - ) - else: - binance_order = await self._http_account.get_order( - symbol=instrument_id.symbol.value, - orig_client_order_id=client_order_id.value - if client_order_id is not None - else None, - ) - except BinanceError as e: - self._log.error( - f"Cannot generate order status report for {repr(client_order_id)}: {e.message}", - ) - return None - - if not binance_order: - return None - - report: OrderStatusReport = parse_order_report_http( - account_id=self.account_id, - instrument_id=self._get_cached_instrument_id(binance_order.symbol), - data=binance_order, - report_id=UUID4(), - ts_init=self._clock.timestamp_ns(), - ) - - self._log.debug(f"Received {report}.") - return report - - async def generate_order_status_reports( # noqa (C901 too complex) + async def _get_binance_position_status_reports( self, - instrument_id: InstrumentId = None, - start: Optional[pd.Timestamp] = None, - end: Optional[pd.Timestamp] = None, - open_only: bool = False, - ) -> list[OrderStatusReport]: - self._log.info(f"Generating OrderStatusReports for {self.id}...") - - # Check cache for all active symbols - open_orders: list[Order] = self._cache.orders_open(venue=self.venue) - open_positions: list[Position] = self._cache.positions_open(venue=self.venue) - - active_symbols: set[str] = set() - for o in open_orders: - active_symbols.add(format_symbol(o.instrument_id.symbol.value)) - for p in open_positions: - active_symbols.add(format_symbol(p.instrument_id.symbol.value)) - - binance_orders: list[BinanceFuturesOrder] = [] - reports: dict[VenueOrderId, OrderStatusReport] = {} - - try: - # Check Binance for all active positions - binance_positions: list[BinanceFuturesPositionRisk] - binance_positions = await self._http_account.get_position_risk() - for position in binance_positions: - if Decimal(position.positionAmt) == 0: - continue # Flat position - # Add active symbol - active_symbols.add(position.symbol) - - # Check Binance for all open orders - binance_open_orders: list[BinanceFuturesOrder] - binance_open_orders = await self._http_account.get_open_orders( - symbol=instrument_id.symbol.value if instrument_id is not None else None, - ) - binance_orders.extend(binance_open_orders) - # Add active symbol - for order in binance_orders: - active_symbols.add(order.symbol) - - # Check Binance for all orders for active symbols - for symbol in active_symbols: - response = await self._http_account.get_orders( - symbol=symbol, - start_time=secs_to_millis(start.timestamp()) if start is not None else None, - end_time=secs_to_millis(end.timestamp()) if end is not None else None, - ) - binance_orders.extend(response) - except BinanceError as e: - self._log.exception(f"Cannot generate order status report: {e.message}", e) - return [] - - # Parse all Binance orders - for data in binance_orders: - report = parse_order_report_http( - account_id=self.account_id, - instrument_id=self._get_cached_instrument_id(data.symbol), - data=data, - report_id=UUID4(), - ts_init=self._clock.timestamp_ns(), - ) - - self._log.debug(f"Received {report}.") - reports[report.venue_order_id] = report # One report per order - - len_reports = len(reports) - plural = "" if len_reports == 1 else "s" - self._log.info(f"Generated {len(reports)} OrderStatusReport{plural}.") - - return list(reports.values()) - - async def generate_trade_reports( # noqa (C901 too complex) - self, - instrument_id: InstrumentId = None, - venue_order_id: VenueOrderId = None, - start: Optional[pd.Timestamp] = None, - end: Optional[pd.Timestamp] = None, - ) -> list[TradeReport]: - self._log.info(f"Generating TradeReports for {self.id}...") - - # Check cache for all active symbols - open_orders: list[Order] = self._cache.orders_open(venue=self.venue) - open_positions: list[Position] = self._cache.positions_open(venue=self.venue) - - active_symbols: set[str] = set() - for o in open_orders: - active_symbols.add(format_symbol(o.instrument_id.symbol.value)) - for p in open_positions: - active_symbols.add(format_symbol(p.instrument_id.symbol.value)) - - binance_trades: list[BinanceFuturesAccountTrade] = [] - reports: list[TradeReport] = [] - - try: - # Check Binance for all active positions - binance_positions: list[BinanceFuturesPositionRisk] - binance_positions = await self._http_account.get_position_risk() - for data in binance_positions: - if Decimal(data.positionAmt) == 0: - continue # Flat position - # Add active symbol - active_symbols.add(data.symbol) - - # Check Binance for trades on all active symbols - for symbol in active_symbols: - symbol_trades = await self._http_account.get_account_trades( - symbol=symbol, - start_time=secs_to_millis(start.timestamp()) if start is not None else None, - end_time=secs_to_millis(end.timestamp()) if end is not None else None, - ) - binance_trades.extend(symbol_trades) - except BinanceError as e: - self._log.exception(f"Cannot generate trade report: {e.message}", e) - return [] - - # Parse all Binance trades - for trade in binance_trades: - report = parse_trade_report_http( - account_id=self.account_id, - instrument_id=self._get_cached_instrument_id(trade.symbol), - data=trade, - report_id=UUID4(), - ts_init=self._clock.timestamp_ns(), - ) - - self._log.debug(f"Received {report}.") - reports.append(report) - - # Confirm sorting in ascending order - reports = sorted(reports, key=lambda x: x.trade_id) - - len_reports = len(reports) - plural = "" if len_reports == 1 else "s" - self._log.info(f"Generated {len(reports)} TradeReport{plural}.") - - return reports - - async def generate_position_status_reports( - self, - instrument_id: InstrumentId = None, - start: Optional[pd.Timestamp] = None, - end: Optional[pd.Timestamp] = None, + symbol: Optional[str] = None, ) -> list[PositionStatusReport]: - self._log.info(f"Generating PositionStatusReports for {self.id}...") - reports: list[PositionStatusReport] = [] - - try: - # Check Binance for all active positions - binance_positions: list[BinanceFuturesPositionRisk] - binance_positions = await self._http_account.get_position_risk() - except BinanceError as e: - self._log.exception(f"Cannot generate position status report: {e.message}", e) - return [] - - # Parse all Binance positions - for data in binance_positions: - if Decimal(data.positionAmt) == 0: + # Check Binance for all active positions + binance_positions: list[BinanceFuturesPositionRisk] + binance_positions = await self._futures_http_account.query_futures_position_risk(symbol) + for position in binance_positions: + if Decimal(position.positionAmt) == 0: continue # Flat position - - report: PositionStatusReport = parse_position_report_http( + report = position.parse_to_position_status_report( account_id=self.account_id, - instrument_id=self._get_cached_instrument_id(data.symbol), - data=data, + instrument_id=self._get_cached_instrument_id(position.symbol), report_id=UUID4(), + enum_parser=self._futures_enum_parser, ts_init=self._clock.timestamp_ns(), ) - self._log.debug(f"Received {report}.") reports.append(report) - - len_reports = len(reports) - plural = "" if len_reports == 1 else "s" - self._log.info(f"Generated {len(reports)} PositionStatusReport{plural}.") - return reports - # -- COMMAND HANDLERS ------------------------------------------------------------------------- + async def _get_binance_active_position_symbols( + self, + symbol: Optional[str] = None, + ) -> list[str]: + # Check Binance for all active positions + active_symbols: list[str] = [] + binance_positions: list[BinanceFuturesPositionRisk] + binance_positions = await self._futures_http_account.query_futures_position_risk(symbol) + for position in binance_positions: + if Decimal(position.positionAmt) == 0: + continue # Flat position + # Add active symbol + active_symbols.append(position.symbol) + return active_symbols - async def _submit_order(self, command: SubmitOrder) -> None: # noqa (too complex) - order: Order = command.order if isinstance(command, SubmitOrder) else command + # -- COMMAND HANDLERS ------------------------------------------------------------------------- + def _check_order_validity(self, order: Order): # Check order type valid - if order.order_type not in BINANCE_FUTURES_VALID_ORDER_TYPES: + if order.order_type not in self._futures_enum_parser.futures_valid_order_types: self._log.error( f"Cannot submit order: {order_type_to_str(order.order_type)} " f"orders not supported by the Binance exchange for FUTURES accounts. " - f"Use any of {[order_type_to_str(t) for t in BINANCE_FUTURES_VALID_ORDER_TYPES]}", + f"Use any of {[order_type_to_str(t) for t in self._futures_enum_parser.futures_valid_order_types]}", ) return - # Check time in force valid - if order.time_in_force not in BINANCE_FUTURES_VALID_TIF: + if order.time_in_force not in self._futures_enum_parser.futures_valid_time_in_force: self._log.error( f"Cannot submit order: " f"{time_in_force_to_str(order.time_in_force)} " - f"not supported by the exchange. Use any of {BINANCE_FUTURES_VALID_TIF}.", + f"not supported by the exchange. " + f"Use any of {[time_in_force_to_str(t) for t in self._futures_enum_parser.futures_valid_time_in_force]}.", ) return - # Check post-only if order.is_post_only and order.order_type != OrderType.LIMIT: self._log.error( @@ -591,411 +238,30 @@ async def _submit_order(self, command: SubmitOrder) -> None: # noqa (too comple ) return - # Generate event here to ensure correct ordering of events - self.generate_order_submitted( - strategy_id=order.strategy_id, - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - ts_event=self._clock.timestamp_ns(), - ) - - try: - if order.order_type == OrderType.MARKET: - await self._submit_market_order(order) - elif order.order_type == OrderType.LIMIT: - await self._submit_limit_order(order) - elif order.order_type in (OrderType.STOP_MARKET, OrderType.MARKET_IF_TOUCHED): - await self._submit_stop_market_order(order) - elif order.order_type in (OrderType.STOP_LIMIT, OrderType.LIMIT_IF_TOUCHED): - await self._submit_stop_limit_order(order) - elif order.order_type == OrderType.TRAILING_STOP_MARKET: - await self._submit_trailing_stop_market_order(order) - except BinanceError as e: - self.generate_order_rejected( - strategy_id=order.strategy_id, - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - reason=e.message, - ts_event=self._clock.timestamp_ns(), - ) - - async def _submit_market_order(self, order: MarketOrder) -> None: - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type="MARKET", - quantity=str(order.quantity), - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_limit_order(self, order: LimitOrder) -> None: - time_in_force_str: str = self._convert_time_in_force_to_str(order.time_in_force) - if order.is_post_only: - time_in_force_str = "GTX" - - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type=binance_order_type(order).value, - time_in_force=time_in_force_str, - quantity=str(order.quantity), - price=str(order.price), - reduce_only=order.is_reduce_only, # Cannot be sent with Hedge-Mode or closePosition - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_stop_market_order(self, order: StopMarketOrder) -> None: - time_in_force_str: str = self._convert_time_in_force_to_str(order.time_in_force) - - if order.trigger_type in (TriggerType.DEFAULT, TriggerType.LAST_TRADE): - working_type = "CONTRACT_PRICE" - elif order.trigger_type == TriggerType.MARK_PRICE: - working_type = "MARK_PRICE" - else: - self._log.error( - f"Cannot submit order: invalid `order.trigger_type`, was " - f"{trigger_type_to_str(order.trigger_price)}. {order}", - ) - return - - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type=binance_order_type(order).value, - time_in_force=time_in_force_str, - quantity=str(order.quantity), - stop_price=str(order.trigger_price), - working_type=working_type, - reduce_only=order.is_reduce_only, # Cannot be sent with Hedge-Mode or closePosition - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_stop_limit_order(self, order: StopMarketOrder) -> None: - time_in_force_str: str = self._convert_time_in_force_to_str(order.time_in_force) - - if order.trigger_type in (TriggerType.DEFAULT, TriggerType.LAST_TRADE): - working_type = "CONTRACT_PRICE" - elif order.trigger_type == TriggerType.MARK_PRICE: - working_type = "MARK_PRICE" - else: - self._log.error( - f"Cannot submit order: invalid `order.trigger_type`, was " - f"{trigger_type_to_str(order.trigger_price)}. {order}", - ) - return - - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type=binance_order_type(order).value, - time_in_force=time_in_force_str, - quantity=str(order.quantity), - price=str(order.price), - stop_price=str(order.trigger_price), - working_type=working_type, - reduce_only=order.is_reduce_only, # Cannot be sent with Hedge-Mode or closePosition - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_trailing_stop_market_order(self, order: TrailingStopMarketOrder) -> None: - time_in_force_str: str = self._convert_time_in_force_to_str(order.time_in_force) - - if order.trigger_type in (TriggerType.DEFAULT, TriggerType.LAST_TRADE): - working_type = "CONTRACT_PRICE" - elif order.trigger_type == TriggerType.MARK_PRICE: - working_type = "MARK_PRICE" - else: - self._log.error( - f"Cannot submit order: invalid `order.trigger_type`, was " - f"{trigger_type_to_str(order.trigger_price)}. {order}", - ) - return - - if order.trailing_offset_type != TrailingOffsetType.BASIS_POINTS: - self._log.error( - f"Cannot submit order: invalid `order.trailing_offset_type`, was " - f"{trailing_offset_type_to_str(order.trailing_offset_type)} (use `BASIS_POINTS`). " - f"{order}", - ) - return - - # Ensure activation price - activation_price: Optional[Price] = order.trigger_price - if not activation_price: - quote = self._cache.quote_tick(order.instrument_id) - trade = self._cache.trade_tick(order.instrument_id) - if quote: - if order.side == OrderSide.BUY: - activation_price = quote.ask - elif order.side == OrderSide.SELL: - activation_price = quote.bid - elif trade: - activation_price = trade.price - else: - self._log.error( - "Cannot submit order: no trigger price specified for Binance activation price " - f"and could not find quotes or trades for {order.instrument_id}", - ) - - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type=binance_order_type(order).value, - time_in_force=time_in_force_str, - quantity=str(order.quantity), - activation_price=str(activation_price), - callback_rate=str(order.trailing_offset / 100), - working_type=working_type, - reduce_only=order.is_reduce_only, # Cannot be sent with Hedge-Mode or closePosition - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_order_list(self, command: SubmitOrderList) -> None: - for order in command.order_list.orders: - if order.linked_order_ids: # TODO(cs): Implement - self._log.warning(f"Cannot yet handle OCO conditional orders, {order}.") - await self._submit_order(order) - - async def _modify_order(self, command: ModifyOrder) -> None: - self._log.error( # pragma: no cover - "Cannot modify order: Not supported by the exchange.", # pragma: no cover - ) - - async def _cancel_order(self, command: CancelOrder) -> None: - self.generate_order_pending_cancel( - strategy_id=command.strategy_id, - instrument_id=command.instrument_id, - client_order_id=command.client_order_id, - venue_order_id=command.venue_order_id, - ts_event=self._clock.timestamp_ns(), - ) - - await self._cancel_order_single( - instrument_id=command.instrument_id, - client_order_id=command.client_order_id, - venue_order_id=command.venue_order_id, - ) - - async def _cancel_all_orders(self, command: CancelAllOrders) -> None: - open_orders_strategy = self._cache.orders_open( - instrument_id=command.instrument_id, - strategy_id=command.strategy_id, - ) - for order in open_orders_strategy: - if order.is_pending_cancel: - continue # Already pending cancel - self.generate_order_pending_cancel( - strategy_id=order.strategy_id, - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - venue_order_id=order.venue_order_id, - ts_event=self._clock.timestamp_ns(), - ) - - # Check total orders for instrument - open_orders_total_count = self._cache.orders_open_count( - instrument_id=command.instrument_id, - ) - - try: - if open_orders_total_count == len(open_orders_strategy): - await self._http_account.cancel_open_orders( - symbol=format_symbol(command.instrument_id.symbol.value), - ) - else: - for order in open_orders_strategy: - await self._cancel_order_single( - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - venue_order_id=order.venue_order_id, - ) - except BinanceError as e: - self._log.exception(f"Cannot cancel open orders: {e.message}", e) - - async def _cancel_order_single( - self, - instrument_id: InstrumentId, - client_order_id: ClientOrderId, - venue_order_id: Optional[VenueOrderId], - ) -> None: - try: - if venue_order_id is not None: - await self._http_account.cancel_order( - symbol=format_symbol(instrument_id.symbol.value), - order_id=venue_order_id.value, - ) - else: - await self._http_account.cancel_order( - symbol=format_symbol(instrument_id.symbol.value), - orig_client_order_id=client_order_id.value, - ) - except BinanceError as e: - self._log.exception( - f"Cannot cancel order " - f"{repr(client_order_id)}, " - f"{repr(venue_order_id)}: " - f"{e.message}", - e, - ) - - def _convert_time_in_force_to_str(self, time_in_force: TimeInForce): - time_in_force_str: str = time_in_force_to_str(time_in_force) - if time_in_force_str == TimeInForce.GTD.name: - if self._warn_gtd_to_gtc: - self._log.warning("Converting GTD `time_in_force` to GTC.") - time_in_force_str = TimeInForce.GTC.name - return time_in_force_str - - def _get_cached_instrument_id(self, symbol: str) -> InstrumentId: - # Parse instrument ID - nautilus_symbol: str = parse_symbol(symbol, account_type=self._binance_account_type) - instrument_id: Optional[InstrumentId] = self._instrument_ids.get(nautilus_symbol) - if not instrument_id: - instrument_id = InstrumentId(Symbol(nautilus_symbol), BINANCE_VENUE) - self._instrument_ids[nautilus_symbol] = instrument_id - return instrument_id + # -- WEBSOCKET EVENT HANDLERS -------------------------------------------------------------------- def _handle_user_ws_message(self, raw: bytes) -> None: # TODO(cs): Uncomment for development # self._log.info(str(json.dumps(msgspec.json.decode(raw), indent=4)), color=LogColor.MAGENTA) - - wrapper = msgspec.json.decode(raw, type=BinanceFuturesUserMsgWrapper) - + wrapper = self._decoder_futures_user_msg_wrapper.decode(raw) try: - if wrapper.data.e == BinanceFuturesEventType.ACCOUNT_UPDATE: - account_update = msgspec.json.decode(raw, type=BinanceFuturesAccountUpdateWrapper) - self._handle_account_update(account_update.data) - elif wrapper.data.e == BinanceFuturesEventType.ORDER_TRADE_UPDATE: - order_update = msgspec.json.decode(raw, type=BinanceFuturesOrderUpdateWrapper) - self._handle_order_trade_update(order_update.data) - elif wrapper.data.e == BinanceFuturesEventType.MARGIN_CALL: - self._log.warning("MARGIN CALL received.") # Implement - elif wrapper.data.e == BinanceFuturesEventType.ACCOUNT_CONFIG_UPDATE: - self._log.info("Account config updated.", LogColor.BLUE) # Implement - elif wrapper.data.e == BinanceFuturesEventType.LISTEN_KEY_EXPIRED: - self._log.warning("Listen key expired.") # Implement + self._futures_user_ws_handlers[wrapper.data.e](raw) except Exception as e: self._log.exception(f"Error on handling {repr(raw)}", e) - def _handle_account_update(self, msg: BinanceFuturesAccountUpdateMsg) -> None: - self.generate_account_state( - balances=parse_account_balances_ws(raw_balances=msg.a.B), - margins=[], - reported=True, - ts_event=millis_to_nanos(msg.T), - ) + def _handle_account_update(self, raw: bytes) -> None: + account_update = self._decoder_futures_account_update_wrapper.decode(raw) + account_update.data.handle_account_update(self) - def _handle_order_trade_update(self, msg: BinanceFuturesOrderUpdateMsg) -> None: - data: BinanceFuturesOrderData = msg.o - instrument_id: InstrumentId = self._get_cached_instrument_id(data.s) - client_order_id = ClientOrderId(data.c) if data.c != "" else None - venue_order_id = VenueOrderId(str(data.i)) - ts_event = millis_to_nanos(msg.T) + def _handle_order_trade_update(self, raw: bytes) -> None: + order_update = self._decoder_futures_order_update_wrapper.decode(raw) + order_update.data.o.handle_order_trade_update(self) - # Fetch strategy ID - strategy_id: StrategyId = self._cache.strategy_id_for_order(client_order_id) - if strategy_id is None: - if strategy_id is None: - self._generate_external_order_report( - instrument_id, - client_order_id, - venue_order_id, - msg.o, - ts_event, - ) - return + def _handle_margin_call(self, raw: bytes) -> None: + self._log.warning("MARGIN CALL received.") # Implement - if data.x == BinanceExecutionType.NEW: - self.generate_order_accepted( - strategy_id=strategy_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - ts_event=ts_event, - ) - elif data.x == BinanceExecutionType.TRADE: - instrument: Instrument = self._instrument_provider.find(instrument_id=instrument_id) - - # Determine commission - if data.N is not None: - commission = Money.from_str(f"{data.n} {data.N}") - else: - # Commission in margin collateral currency - commission = Money(0, instrument.quote_currency) - - self.generate_order_filled( - strategy_id=strategy_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - venue_position_id=PositionId(f"{instrument_id}-{data.ps.value}"), - trade_id=TradeId(str(data.t)), - order_side=OrderSide.BUY if data.S == BinanceOrderSide.BUY else OrderSide.SELL, - order_type=parse_order_type(data.o), - last_qty=Quantity.from_str(data.l), - last_px=Price.from_str(data.L), - quote_currency=instrument.quote_currency, - commission=commission, - liquidity_side=LiquiditySide.MAKER if data.m else LiquiditySide.TAKER, - ts_event=ts_event, - ) - elif data.x == BinanceExecutionType.CANCELED: - self.generate_order_canceled( - strategy_id=strategy_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - ts_event=ts_event, - ) - elif data.x == BinanceExecutionType.EXPIRED: - self.generate_order_expired( - strategy_id=strategy_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - ts_event=ts_event, - ) - else: - self._log.error( - f"Cannot handle ORDER_TRADE_UPDATE: unrecognized type {data.x.value}", - ) - - def _generate_external_order_report( - self, - instrument_id: InstrumentId, - client_order_id: ClientOrderId, - venue_order_id: VenueOrderId, - data: BinanceFuturesOrderData, - ts_event: int, - ) -> None: - report = OrderStatusReport( - account_id=self.account_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - order_side=OrderSide.BUY if data.S == BinanceOrderSide.BUY else OrderSide.SELL, - order_type=parse_order_type(data.o), - time_in_force=parse_time_in_force(data.f), - order_status=OrderStatus.ACCEPTED, - price=Price.from_str(data.p) if data.p is not None else None, - trigger_price=Price.from_str(data.sp) if data.sp is not None else None, - trigger_type=parse_trigger_type(data.wt), - trailing_offset=Decimal(data.cr) * 100 if data.cr is not None else None, - trailing_offset_type=TrailingOffsetType.BASIS_POINTS, - quantity=Quantity.from_str(data.q), - filled_qty=Quantity.from_str(data.z), - avg_px=None, - post_only=data.f == BinanceFuturesTimeInForce.GTX, - reduce_only=data.R, - report_id=UUID4(), - ts_accepted=ts_event, - ts_last=ts_event, - ts_init=self._clock.timestamp_ns(), - ) + def _handle_account_config_update(self, raw: bytes) -> None: + self._log.info("Account config updated.", LogColor.BLUE) # Implement - self._send_order_status_report(report) + def _handle_listen_key_expired(self, raw: bytes) -> None: + self._log.warning("Listen key expired.") # Implement diff --git a/nautilus_trader/adapters/binance/futures/http/__init__.py b/nautilus_trader/adapters/binance/futures/http/__init__.py index e69de29bb2d1..ca16b56e4794 100644 --- a/nautilus_trader/adapters/binance/futures/http/__init__.py +++ b/nautilus_trader/adapters/binance/futures/http/__init__.py @@ -0,0 +1,14 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- diff --git a/nautilus_trader/adapters/binance/futures/http/account.py b/nautilus_trader/adapters/binance/futures/http/account.py index c05f3c7dbebd..47fd0f1cfa3d 100644 --- a/nautilus_trader/adapters/binance/futures/http/account.py +++ b/nautilus_trader/adapters/binance/futures/http/account.py @@ -13,652 +13,368 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from typing import Any, Optional +from typing import Optional import msgspec from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.functions import format_symbol +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.schemas.account import BinanceStatusCode +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesAccountInfo -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesAccountTrade -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesOrder +from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesDualSidePosition from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesPositionRisk +from nautilus_trader.adapters.binance.http.account import BinanceAccountHttpAPI from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.adapters.binance.http.enums import NewOrderRespType +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.common.clock import LiveClock -class BinanceFuturesAccountHttpAPI: +class BinanceFuturesPositionModeHttp(BinanceHttpEndpoint): """ - Provides access to the `Binance Futures` Account/Trade HTTP REST API. + Endpoint of user's position mode for every FUTURES symbol. - Parameters + `GET /fapi/v1/positionSide/dual` + `GET /dapi/v1/positionSide/dual` + + `POST /fapi/v1/positionSide/dual` + `POST /dapi/v1/positionSide/dual` + + References ---------- - client : BinanceHttpClient - The Binance REST API client. - account_type : BinanceAccountType - The Binance account type. + https://binance-docs.github.io/apidocs/futures/en/#change-position-mode-trade + https://binance-docs.github.io/apidocs/delivery/en/#change-position-mode-trade """ def __init__( self, client: BinanceHttpClient, - account_type: BinanceAccountType = BinanceAccountType.SPOT, + base_endpoint: str, ): - self.client = client - - if account_type == BinanceAccountType.FUTURES_USDT: - self.BASE_ENDPOINT = "/fapi/v1/" - elif account_type == BinanceAccountType.FUTURES_COIN: - self.BASE_ENDPOINT = "/dapi/v1/" - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BinanceAccountType`, was {account_type}", # pragma: no cover - ) - - # Decoders - self._decoder_account = msgspec.json.Decoder(BinanceFuturesAccountInfo) - self._decoder_order = msgspec.json.Decoder(list[BinanceFuturesOrder]) - self._decoder_trade = msgspec.json.Decoder(list[BinanceFuturesAccountTrade]) - self._decoder_position = msgspec.json.Decoder(list[BinanceFuturesPositionRisk]) + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + BinanceMethodType.POST: BinanceSecurityType.TRADE, + } + url_path = base_endpoint + "positionSide/dual" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(BinanceFuturesDualSidePosition) + self._post_resp_decoder = msgspec.json.Decoder(BinanceStatusCode) - async def change_position_mode( - self, - is_dual_side_position: bool, - recv_window: Optional[int] = None, - ): + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Change Position Mode (TRADE). - - `POST /fapi/v1/positionSide/dual (HMAC SHA256)`. + Parameters of positionSide/dual GET request. Parameters ---------- - is_dual_side_position : bool - If `Hedge Mode` will be set, otherwise `One-way` Mode. - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#change-position-mode-trade - """ - payload: dict[str, str] = { - "dualSidePosition": str(is_dual_side_position).lower(), - } - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="POST", - url_path=self.BASE_ENDPOINT + "positionSide/dual", - payload=payload, - ) - return msgspec.json.decode(raw) + timestamp: str + recvWindow: Optional[str] = None - async def get_position_mode( - self, - recv_window: Optional[int] = None, - ): + class PostParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Get Current Position Mode (USER_DATA). - - `GET /fapi/v1/positionSide/dual (HMAC SHA256)`. + Parameters of positionSide/dual POST request. Parameters ---------- - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + dualSidePosition : str ('true', 'false') + The dual side position mode to set... + `true`: Hedge Mode, `false`: One-way mode. + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#get-current-position-mode-user_data """ - payload: dict[str, str] = {} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "positionSide/dual", - payload=payload, - ) - return msgspec.json.decode(raw) + timestamp: str + dualSidePosition: str + recvWindow: Optional[str] = None - async def new_order( # noqa (too complex) - self, - symbol: str, - side: str, - type: str, - position_side: Optional[str] = None, - time_in_force: Optional[str] = None, - quantity: Optional[str] = None, - reduce_only: Optional[bool] = False, - price: Optional[str] = None, - new_client_order_id: Optional[str] = None, - stop_price: Optional[str] = None, - close_position: Optional[bool] = None, - activation_price: Optional[str] = None, - callback_rate: Optional[str] = None, - working_type: Optional[str] = None, - price_protect: Optional[bool] = None, - new_order_resp_type: Optional[NewOrderRespType] = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: - """ - Submit a new order. + async def _get(self, parameters: GetParameters) -> BinanceFuturesDualSidePosition: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) - Submit New Order (TRADE). - `POST /api/v3/order`. + async def _post(self, parameters: PostParameters) -> BinanceStatusCode: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._post_resp_decoder.decode(raw) - Parameters - ---------- - symbol : str - The symbol for the request. - side : str - The order side for the request. - type : str - The order type for the request. - position_side : str, {'BOTH', 'LONG', 'SHORT'}, default BOTH - The position side for the order. - time_in_force : str, optional - The order time in force for the request. - quantity : str, optional - The order quantity in base asset units for the request. - reduce_only : bool, optional - If the order will only reduce a position. - price : str, optional - The order price for the request. - new_client_order_id : str, optional - The client order ID for the request. A unique ID among open orders. - Automatically generated if not provided. - stop_price : str, optional - The order stop price for the request. - Used with STOP_LOSS, STOP_LOSS_LIMIT, TAKE_PROFIT, and TAKE_PROFIT_LIMIT orders. - close_position : bool, optional - If close all open positions for the given symbol. - activation_price : str, optional. - The price to activate a trailing stop. - Used with TRAILING_STOP_MARKET orders, default as the last price (supporting different `working_type`). - callback_rate : str, optional - The percentage to trail the stop. - Used with TRAILING_STOP_MARKET orders, min 0.1, max 5 where 1 for 1%. - working_type : str {'MARK_PRICE', 'CONTRACT_PRICE'}, optional - The trigger type for the order. API default "CONTRACT_PRICE". - price_protect : bool, optional - If price protection is active. - new_order_resp_type : NewOrderRespType, optional - The response type for the order request. - MARKET and LIMIT order types default to FULL, all other orders default to ACK. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). - Returns - ------- - dict[str, Any] +class BinanceFuturesAllOpenOrdersHttp(BinanceHttpEndpoint): + """ + Endpoint of all open FUTURES orders. - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#new-order-trade + `DELETE /fapi/v1/allOpenOrders` + `DELETE /dapi/v1/allOpenOrders` - """ - payload: dict[str, str] = { - "symbol": format_symbol(symbol), - "side": side, - "type": type, + References + ---------- + https://binance-docs.github.io/apidocs/futures/en/#cancel-all-open-orders-trade + https://binance-docs.github.io/apidocs/delivery/en/#cancel-all-open-orders-trade + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.DELETE: BinanceSecurityType.TRADE, } - if position_side is not None: - payload["positionSide"] = position_side - if time_in_force is not None: - payload["timeInForce"] = time_in_force - if quantity is not None: - payload["quantity"] = quantity - if reduce_only is not None: - payload["reduceOnly"] = str(reduce_only).lower() - if price is not None: - payload["price"] = price - if new_client_order_id is not None: - payload["newClientOrderId"] = new_client_order_id - if stop_price is not None: - payload["stopPrice"] = stop_price - if close_position is not None: - payload["closePosition"] = str(close_position).lower() - if activation_price is not None: - payload["activationPrice"] = activation_price - if callback_rate is not None: - payload["callbackRate"] = callback_rate - if working_type is not None: - payload["workingType"] = working_type - if price_protect is not None: - payload["priceProtect"] = str(price_protect).lower() - if new_order_resp_type is not None: - payload["newOrderRespType"] = new_order_resp_type.value - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="POST", - url_path=self.BASE_ENDPOINT + "order", - payload=payload, + url_path = base_endpoint + "allOpenOrders" + super().__init__( + client, + methods, + url_path, ) + self._delete_resp_decoder = msgspec.json.Decoder(BinanceStatusCode) - return msgspec.json.decode(raw) - - async def cancel_order( - self, - symbol: str, - order_id: Optional[str] = None, - orig_client_order_id: Optional[str] = None, - new_client_order_id: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: + class DeleteParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Cancel an open order. - - Cancel Order (TRADE). - `DELETE /api/v3/order`. + Parameters of allOpenOrders DELETE request. Parameters ---------- - symbol : str - The symbol for the request. - order_id : str, optional - The order ID to cancel. - orig_client_order_id : str, optional - The original client order ID to cancel. - new_client_order_id : str, optional - The new client order ID to uniquely identify this request. - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + symbol : BinanceSymbol + The symbol of the request + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#cancel-order-trade - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if order_id is not None: - payload["orderId"] = str(order_id) - if orig_client_order_id is not None: - payload["origClientOrderId"] = str(orig_client_order_id) - if new_client_order_id is not None: - payload["newClientOrderId"] = str(new_client_order_id) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="DELETE", - url_path=self.BASE_ENDPOINT + "order", - payload=payload, - ) - return msgspec.json.decode(raw) + timestamp: str + symbol: BinanceSymbol + recvWindow: Optional[str] = None - async def cancel_open_orders( - self, - symbol: str, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: - """ - Cancel all open orders for a symbol. This includes OCO orders. + async def _delete(self, parameters: DeleteParameters) -> BinanceStatusCode: + method_type = BinanceMethodType.DELETE + raw = await self._method(method_type, parameters) + return self._delete_resp_decoder.decode(raw) - Cancel all Open Orders for a Symbol (TRADE). - `DELETE /fapi/v1/allOpenOrders (HMAC SHA256)`. - Parameters - ---------- - symbol : str - The symbol for the request. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). +class BinanceFuturesAccountHttp(BinanceHttpEndpoint): + """ + Endpoint of current FUTURES account information. - Returns - ------- - dict[str, Any] + `GET /fapi/v2/account` + `GET /dapi/v1/account` - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#cancel-all-open-orders-on-a-symbol-trade + References + ---------- + https://binance-docs.github.io/apidocs/futures/en/#account-information-v2-user_data + https://binance-docs.github.io/apidocs/delivery/en/#account-information-user_data + """ - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="DELETE", - url_path=self.BASE_ENDPOINT + "allOpenOrders", - payload=payload, + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + url_path = base_endpoint + "account" + super().__init__( + client, + methods, + url_path, ) + self._resp_decoder = msgspec.json.Decoder(BinanceFuturesAccountInfo) - return msgspec.json.decode(raw) - - async def get_order( - self, - symbol: str, - order_id: Optional[str] = None, - orig_client_order_id: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> Optional[BinanceFuturesOrder]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Check an order's status. - - Query Order (USER_DATA). - `GET TBD`. + Parameters of account GET request. Parameters ---------- - symbol : str - The symbol for the request. - order_id : str, optional - The order ID for the request. - orig_client_order_id : str, optional - The original client order ID for the request. - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - BinanceFuturesOrderMsg or None - - References - ---------- - TBD - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if order_id is not None: - payload["orderId"] = order_id - if orig_client_order_id is not None: - payload["origClientOrderId"] = orig_client_order_id - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "order", - payload=payload, - ) - if raw is None: - return None - return msgspec.json.decode(raw, type=BinanceFuturesOrder) + timestamp: str + recvWindow: Optional[str] = None - async def get_open_orders( - self, - symbol: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> list[BinanceFuturesOrder]: - """ - Get all open orders for a symbol. + async def _get(self, parameters: GetParameters) -> BinanceFuturesAccountInfo: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) - Query Current Open Orders (USER_DATA). - Parameters - ---------- - symbol : str, optional - The symbol for the request. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). +class BinanceFuturesPositionRiskHttp(BinanceHttpEndpoint): + """ + Endpoint of information of all FUTURES positions. - Returns - ------- - dict[str, Any] + `GET /fapi/v2/positionRisk` + `GET /dapi/v1/positionRisk` - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#current-open-orders-user_data + References + ---------- + https://binance-docs.github.io/apidocs/futures/en/#position-information-v2-user_data + https://binance-docs.github.io/apidocs/delivery/en/#position-information-user_data + """ - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "openOrders", - payload=payload, + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + url_path = base_endpoint + "positionRisk" + super().__init__( + client, + methods, + url_path, ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceFuturesPositionRisk]) - return self._decoder_order.decode(raw) - - async def get_orders( - self, - symbol: str, - order_id: Optional[str] = None, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = None, - recv_window: Optional[int] = None, - ) -> list[BinanceFuturesOrder]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Get all account orders (open, or closed). - - All Orders (USER_DATA). + Parameters of positionRisk GET request. Parameters ---------- - symbol : str - The symbol for the request. - order_id : str, optional - The order ID for the request. - start_time : int, optional - The start time (UNIX milliseconds) filter for the request. - end_time : int, optional - The end time (UNIX milliseconds) filter for the request. - limit : int, optional - The limit for the response. - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + symbol : BinanceSymbol, optional + The symbol of the request. + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - list[dict[str, Any]] - - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#all-orders-user_data - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if order_id is not None: - payload["orderId"] = order_id - if start_time is not None: - payload["startTime"] = str(start_time) - if end_time is not None: - payload["endTime"] = str(end_time) - if limit is not None: - payload["limit"] = str(limit) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "allOrders", - payload=payload, - ) - - return self._decoder_order.decode(raw) - async def account(self, recv_window: Optional[int] = None) -> BinanceFuturesAccountInfo: - """ - Get current account information. + timestamp: str + symbol: Optional[BinanceSymbol] = None + recvWindow: Optional[str] = None - Account Information (USER_DATA). - `GET /api/v3/account`. + async def _get(self, parameters: GetParameters) -> list[BinanceFuturesPositionRisk]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) - Parameters - ---------- - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). - Returns - ------- - BinanceFuturesAccountInfo +class BinanceFuturesAccountHttpAPI(BinanceAccountHttpAPI): + """ + Provides access to the `Binance Futures` Account/Trade HTTP REST API. - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#account-information-user_data + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + account_type : BinanceAccountType + The Binance account type. + """ - """ - payload: dict[str, str] = {} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "account", - payload=payload, + def __init__( + self, + client: BinanceHttpClient, + clock: LiveClock, + account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, + ): + super().__init__( + client=client, + clock=clock, + account_type=account_type, ) + if not account_type.is_futures: + raise RuntimeError( # pragma: no cover (design-time error) + f"`BinanceAccountType` not FUTURES_USDT or FUTURES_COIN, was {account_type}", # pragma: no cover + ) + v2_endpoint_base = self.base_endpoint + if account_type == BinanceAccountType.FUTURES_USDT: + v2_endpoint_base = "/fapi/v2/" - return self._decoder_account.decode(raw) + # Create endpoints + self._endpoint_futures_position_mode = BinanceFuturesPositionModeHttp( + client, + self.base_endpoint, + ) + self._endpoint_futures_all_open_orders = BinanceFuturesAllOpenOrdersHttp( + client, + self.base_endpoint, + ) + self._endpoint_futures_account = BinanceFuturesAccountHttp(client, v2_endpoint_base) + self._endpoint_futures_position_risk = BinanceFuturesPositionRiskHttp( + client, + v2_endpoint_base, + ) - async def get_account_trades( + async def query_futures_hedge_mode( self, - symbol: str, - from_id: Optional[str] = None, - order_id: Optional[str] = None, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = None, - recv_window: Optional[int] = None, - ) -> list[BinanceFuturesAccountTrade]: - """ - Get trades for a specific account and symbol. - - Account Trade List (USER_DATA) - - Parameters - ---------- - symbol : str - The symbol for the request. - from_id : str, optional - The trade match ID to query from. - order_id : str, optional - The order ID for the trades. This can only be used in combination with symbol. - start_time : int, optional - The start time (UNIX milliseconds) filter for the request. - end_time : int, optional - The end time (UNIX milliseconds) filter for the request. - limit : int, optional - The limit for the response. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - list[BinanceFuturesAccountTrade] + recv_window: Optional[str] = None, + ) -> BinanceFuturesDualSidePosition: + """Check Binance Futures hedge mode (dualSidePosition).""" + return await self._endpoint_futures_position_mode._get( + parameters=self._endpoint_futures_position_mode.GetParameters( + timestamp=self._timestamp(), + recvWindow=recv_window, + ), + ) - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#account-trade-list-user_data + async def set_futures_hedge_mode( + self, + dual_side_position: bool, + recv_window: Optional[str] = None, + ) -> BinanceStatusCode: + """Set Binance Futures hedge mode (dualSidePosition).""" + return await self._endpoint_futures_position_mode._post( + parameters=self._endpoint_futures_position_mode.PostParameters( + timestamp=self._timestamp(), + dualSidePosition=str(dual_side_position).lower(), + recvWindow=recv_window, + ), + ) - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if from_id is not None: - payload["fromId"] = from_id - if order_id is not None: - payload["orderId"] = order_id - if start_time is not None: - payload["startTime"] = str(start_time) - if end_time is not None: - payload["endTime"] = str(end_time) - if limit is not None: - payload["limit"] = str(limit) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "userTrades", - payload=payload, + async def cancel_all_open_orders( + self, + symbol: str, + recv_window: Optional[str] = None, + ) -> bool: + """Delete all Futures open orders. Returns whether successful.""" + response = await self._endpoint_futures_all_open_orders._delete( + parameters=self._endpoint_futures_all_open_orders.DeleteParameters( + timestamp=self._timestamp(), + symbol=BinanceSymbol(symbol), + recvWindow=recv_window, + ), ) + return response.code == 200 - return self._decoder_trade.decode(raw) + async def query_futures_account_info( + self, + recv_window: Optional[str] = None, + ) -> BinanceFuturesAccountInfo: + """Check Binance Futures account information.""" + return await self._endpoint_futures_account._get( + parameters=self._endpoint_futures_account.GetParameters( + timestamp=self._timestamp(), + recvWindow=recv_window, + ), + ) - async def get_position_risk( + async def query_futures_position_risk( self, symbol: Optional[str] = None, - recv_window: Optional[int] = None, + recv_window: Optional[str] = None, ) -> list[BinanceFuturesPositionRisk]: - """ - Get current position information. - - Position Information V2 (USER_DATA)** - - `GET /fapi/v2/positionRisk` - - Parameters - ---------- - symbol : str, optional - The trading pair. If None then queries for all symbols. - recv_window : int, optional - The acceptable receive window for the response. - - Returns - ------- - list[BinanceFuturesPositionRisk] - - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#position-information-v2-user_data - - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) - if recv_window is not None: - payload["recv_window"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "positionRisk", - payload=payload, + """Check all Futures position's info for a symbol.""" + return await self._endpoint_futures_position_risk._get( + parameters=self._endpoint_futures_position_risk.GetParameters( + timestamp=self._timestamp(), + symbol=BinanceSymbol(symbol), + recvWindow=recv_window, + ), ) - - return self._decoder_position.decode(raw) - - async def get_order_rate_limit(self, recv_window: Optional[int] = None) -> dict[str, Any]: - """ - Get the user's current order count usage for all intervals. - - Query Current Order Count Usage (TRADE). - `GET /api/v3/rateLimit/order`. - - Parameters - ---------- - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#query-current-order-count-usage-trade - - """ - payload: dict[str, str] = {} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "rateLimit/order", - payload=payload, - ) - - return msgspec.json.decode(raw) diff --git a/nautilus_trader/adapters/binance/futures/http/market.py b/nautilus_trader/adapters/binance/futures/http/market.py index 9bf6356bdcba..40b508b3d7ff 100644 --- a/nautilus_trader/adapters/binance/futures/http/market.py +++ b/nautilus_trader/adapters/binance/futures/http/market.py @@ -13,468 +13,84 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from typing import Any, Optional - import msgspec from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.functions import convert_symbols_list_to_json_array -from nautilus_trader.adapters.binance.common.functions import format_symbol -from nautilus_trader.adapters.binance.common.schemas import BinanceTrade +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesExchangeInfo from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.core.correctness import PyCondition +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.adapters.binance.http.market import BinanceMarketHttpAPI -class BinanceFuturesMarketHttpAPI: +class BinanceFuturesExchangeInfoHttp(BinanceHttpEndpoint): """ - Provides access to the `Binance Market` HTTP REST API. + Endpoint of FUTURES exchange trading rules and symbol information. - Parameters + `GET /fapi/v1/exchangeInfo` + `GET /dapi/v1/exchangeInfo` + + References ---------- - client : BinanceHttpClient - The Binance REST API client. + https://binance-docs.github.io/apidocs/futures/en/#exchange-information + https://binance-docs.github.io/apidocs/delivery/en/#exchange-information """ def __init__( self, client: BinanceHttpClient, - account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, + base_endpoint: str, ): - PyCondition.not_none(client, "client") - - self.client = client - self.account_type = account_type - - if self.account_type == BinanceAccountType.FUTURES_USDT: - self.BASE_ENDPOINT = "/fapi/v1/" - elif self.account_type == BinanceAccountType.FUTURES_COIN: - self.BASE_ENDPOINT = "/dapi/v1/" - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BinanceAccountType`, was {account_type}", # pragma: no cover - ) - - self._decoder_exchange_info = msgspec.json.Decoder(BinanceFuturesExchangeInfo) - self._decoder_trades = msgspec.json.Decoder(list[BinanceTrade]) - - async def ping(self) -> dict[str, Any]: - """ - Test the connectivity to the REST API. - - `GET /api/v3/ping` - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#test-connectivity - - """ - raw: bytes = await self.client.query(url_path=self.BASE_ENDPOINT + "ping") - return msgspec.json.decode(raw) - - async def time(self) -> dict[str, Any]: - """ - Test connectivity to the Rest API and get the current server time. - - Check Server Time. - `GET /api/v3/time` - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#check-server-time - - """ - raw: bytes = await self.client.query(url_path=self.BASE_ENDPOINT + "time") - return msgspec.json.decode(raw) - - async def exchange_info( - self, - symbol: Optional[str] = None, - symbols: Optional[list[str]] = None, - ) -> BinanceFuturesExchangeInfo: - """ - Get current exchange trading rules and symbol information. - Only either `symbol` or `symbols` should be passed. - - Exchange Information. - `GET /api/v3/exchangeinfo` - - Parameters - ---------- - symbol : str, optional - The trading pair. - symbols : list[str], optional - The list of trading pairs. - - Returns - ------- - BinanceFuturesExchangeInfo - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#exchange-information - - """ - if symbol and symbols: - raise ValueError("`symbol` and `symbols` cannot be sent together") - - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) - if symbols is not None: - payload["symbols"] = convert_symbols_list_to_json_array(symbols) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "exchangeInfo", - payload=payload, - ) - - return self._decoder_exchange_info.decode(raw) - - async def depth(self, symbol: str, limit: Optional[int] = None) -> dict[str, Any]: - """ - Get orderbook. - - `GET /api/v3/depth` - - Parameters - ---------- - symbol : str - The trading pair. - limit : int, optional, default 100 - The limit for the response. Default 100; max 5000. - Valid limits:[5, 10, 20, 50, 100, 500, 1000, 5000]. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#order-book - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if limit is not None: - payload["limit"] = str(limit) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "depth", - payload=payload, - ) - - return msgspec.json.decode(raw) - - async def trades(self, symbol: str, limit: Optional[int] = None) -> list[BinanceTrade]: - """ - Get recent market trades. - - Recent Trades List. - `GET /api/v3/trades` - - Parameters - ---------- - symbol : str - The trading pair. - limit : int, optional - The limit for the response. Default 500; max 1000. - - Returns - ------- - list[BinanceTrade] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#recent-trades-list - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if limit is not None: - payload["limit"] = str(limit) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "trades", - payload=payload, - ) - - return self._decoder_trades.decode(raw) - - async def historical_trades( - self, - symbol: str, - from_id: Optional[int] = None, - limit: Optional[int] = None, - ) -> dict[str, Any]: - """ - Get older market trades. - - Old Trade Lookup. - `GET /api/v3/historicalTrades` - - Parameters - ---------- - symbol : str - The trading pair. - from_id : int, optional - The trade ID to fetch from. Default gets most recent trades. - limit : int, optional - The limit for the response. Default 500; max 1000. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#old-trade-lookup - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if limit is not None: - payload["limit"] = str(limit) - if from_id is not None: - payload["fromId"] = str(from_id) - - raw: bytes = await self.client.limit_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "historicalTrades", - payload=payload, - ) - - return msgspec.json.decode(raw) - - async def agg_trades( - self, - symbol: str, - from_id: Optional[int] = None, - start_time_ms: Optional[int] = None, - end_time_ms: Optional[int] = None, - limit: Optional[int] = None, - ) -> dict[str, Any]: - """ - Get recent aggregated market trades. - - Compressed/Aggregate Trades List. - `GET /api/v3/aggTrades` - - Parameters - ---------- - symbol : str - The trading pair. - from_id : int, optional - The trade ID to fetch from. Default gets most recent trades. - start_time_ms : int, optional - The UNIX timestamp (milliseconds) to get aggregate trades from INCLUSIVE. - end_time_ms: int, optional - The UNIX timestamp (milliseconds) to get aggregate trades until INCLUSIVE. - limit : int, optional - The limit for the response. Default 500; max 1000. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#compressed-aggregate-trades-list - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if from_id is not None: - payload["fromId"] = str(from_id) - if start_time_ms is not None: - payload["startTime"] = str(start_time_ms) - if end_time_ms is not None: - payload["endTime"] = str(end_time_ms) - if limit is not None: - payload["limit"] = str(limit) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "aggTrades", - payload=payload, - ) - - return msgspec.json.decode(raw) - - async def klines( - self, - symbol: str, - interval: str, - start_time_ms: Optional[int] = None, - end_time_ms: Optional[int] = None, - limit: Optional[int] = None, - ) -> list[list[Any]]: - """ - Kline/Candlestick Data. - - `GET /api/v3/klines` - - Parameters - ---------- - symbol : str - The trading pair. - interval : str - The interval of kline, e.g 1m, 5m, 1h, 1d, etc. - start_time_ms : int, optional - The UNIX timestamp (milliseconds) to get aggregate trades from INCLUSIVE. - end_time_ms: int, optional - The UNIX timestamp (milliseconds) to get aggregate trades until INCLUSIVE. - limit : int, optional - The limit for the response. Default 500; max 1000. - - Returns - ------- - list[list[Any]] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - - """ - payload: dict[str, str] = { - "symbol": format_symbol(symbol), - "interval": interval, + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, } - if start_time_ms is not None: - payload["startTime"] = str(start_time_ms) - if end_time_ms is not None: - payload["endTime"] = str(end_time_ms) - if limit is not None: - payload["limit"] = str(limit) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "klines", - payload=payload, + url_path = base_endpoint + "exchangeInfo" + super().__init__( + client, + methods, + url_path, ) + self._get_resp_decoder = msgspec.json.Decoder(BinanceFuturesExchangeInfo) - return msgspec.json.decode(raw) - - async def avg_price(self, symbol: str) -> dict[str, Any]: - """ - Get the current average price for the given symbol. - - `GET /api/v3/avgPrice` - - Parameters - ---------- - symbol : str - The trading pair. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#current-average-price - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "avgPrice", - payload=payload, - ) - - return msgspec.json.decode(raw) - - async def ticker_24hr(self, symbol: Optional[str] = None) -> dict[str, Any]: - """ - 24hr Ticker Price Change Statistics. - - `GET /api/v3/ticker/24hr` + async def _get(self) -> BinanceFuturesExchangeInfo: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, None) + return self._get_resp_decoder.decode(raw) - Parameters - ---------- - symbol : str, optional - The trading pair. - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#24hr-ticker-price-change-statistics - - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "ticker/24hr", - payload=payload, - ) - - return msgspec.json.decode(raw) - - async def ticker_price(self, symbol: Optional[str] = None) -> dict[str, Any]: - """ - Symbol Price Ticker. - - `GET /api/v3/ticker/price` - - Parameters - ---------- - symbol : str, optional - The trading pair. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#symbol-price-ticker +class BinanceFuturesMarketHttpAPI(BinanceMarketHttpAPI): + """ + Provides access to the `Binance Futures` HTTP REST API. - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint. + """ - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "ticker/price", - payload=payload, + def __init__( + self, + client: BinanceHttpClient, + account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, + ): + super().__init__( + client=client, + account_type=account_type, ) - return msgspec.json.decode(raw) - - async def book_ticker(self, symbol: Optional[str] = None) -> dict[str, Any]: - """ - Symbol Order Book Ticker. - - `GET /api/v3/ticker/bookTicker` - - Parameters - ---------- - symbol : str, optional - The trading pair. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker - - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol).upper() + if not account_type.is_futures: + raise RuntimeError( # pragma: no cover (design-time error) + f"`BinanceAccountType` not FUTURES_USDT or FUTURES_COIN, was {account_type}", # pragma: no cover + ) - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "ticker/bookTicker", - payload=payload, + self._endpoint_futures_exchange_info = BinanceFuturesExchangeInfoHttp( + client, + self.base_endpoint, ) - return msgspec.json.decode(raw) + async def query_futures_exchange_info(self) -> BinanceFuturesExchangeInfo: + """Retrieve Binance Futures exchange information.""" + return await self._endpoint_futures_exchange_info._get() diff --git a/nautilus_trader/adapters/binance/futures/http/user.py b/nautilus_trader/adapters/binance/futures/http/user.py index fb71ef9ba3c9..9b82301613b0 100644 --- a/nautilus_trader/adapters/binance/futures/http/user.py +++ b/nautilus_trader/adapters/binance/futures/http/user.py @@ -13,17 +13,13 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from typing import Any - -import msgspec from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.schemas import BinanceListenKey from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.core.correctness import PyCondition +from nautilus_trader.adapters.binance.http.user import BinanceUserDataHttpAPI -class BinanceFuturesUserDataHttpAPI: +class BinanceFuturesUserDataHttpAPI(BinanceUserDataHttpAPI): """ Provides access to the `Binance Futures` User Data HTTP REST API. @@ -31,6 +27,8 @@ class BinanceFuturesUserDataHttpAPI: ---------- client : BinanceHttpClient The Binance REST API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint. """ def __init__( @@ -38,101 +36,12 @@ def __init__( client: BinanceHttpClient, account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, ): - PyCondition.not_none(client, "client") - - self.client = client - self.account_type = account_type + super().__init__( + client=client, + account_type=account_type, + ) - if account_type == BinanceAccountType.FUTURES_USDT: - self.BASE_ENDPOINT = "/fapi/v1/" - elif account_type == BinanceAccountType.FUTURES_COIN: - self.BASE_ENDPOINT = "/dapi/v1/" - else: + if not account_type.is_futures: raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BinanceAccountType`, was {account_type}", # pragma: no cover + f"`BinanceAccountType` not FUTURES_USDT or FUTURES_COIN, was {account_type}", # pragma: no cover (design-time error) # noqa ) - - async def create_listen_key(self) -> BinanceListenKey: - """ - Create a new listen key for the Binance FUTURES_USDT or FUTURES_COIN API. - - Start a new user data stream. The stream will close after 60 minutes - unless a keepalive is sent. If the account has an active listenKey, - that listenKey will be returned and its validity will be extended for 60 - minutes. - - Create a ListenKey (USER_STREAM). - - Returns - ------- - BinanceListenKey - - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#start-user-data-stream-user_stream - - """ - raw: bytes = await self.client.send_request( - http_method="POST", - url_path=self.BASE_ENDPOINT + "listenKey", - ) - - return msgspec.json.decode(raw, type=BinanceListenKey) - - async def ping_listen_key(self, key: str) -> dict[str, Any]: - """ - Ping/Keep-alive a listen key for the Binance FUTURES_USDT or FUTURES_COIN API. - - Keep-alive a user data stream to prevent a time-out. User data streams - will close after 60 minutes. It's recommended to send a ping about every - 30 minutes. - - Ping/Keep-alive a ListenKey (USER_STREAM). - - Parameters - ---------- - key : str - The listen key for the request. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#keepalive-user-data-stream-user_stream - - """ - raw: bytes = await self.client.send_request( - http_method="PUT", - url_path=self.BASE_ENDPOINT + "listenKey", - payload={"listenKey": key}, - ) - - return msgspec.json.decode(raw) - - async def close_listen_key(self, key: str) -> dict[str, Any]: - """ - Close a user data stream for the Binance FUTURES_USDT or FUTURES_COIN API. - - Parameters - ---------- - key : str - The listen key for the request. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/futures/en/#close-user-data-stream-user_stream - - """ - raw: bytes = await self.client.send_request( - http_method="DELETE", - url_path=self.BASE_ENDPOINT + "listenKey", - payload={"listenKey": key}, - ) - - return msgspec.json.decode(raw) diff --git a/nautilus_trader/adapters/binance/futures/http/wallet.py b/nautilus_trader/adapters/binance/futures/http/wallet.py index 9b94489f5b91..90b94a16490e 100644 --- a/nautilus_trader/adapters/binance/futures/http/wallet.py +++ b/nautilus_trader/adapters/binance/futures/http/wallet.py @@ -17,7 +17,66 @@ import msgspec +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.futures.schemas.wallet import BinanceFuturesCommissionRate from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.common.clock import LiveClock + + +class BinanceFuturesCommissionRateHttp(BinanceHttpEndpoint): + """ + Endpoint of maker/taker commission rate information. + + `GET /fapi/v1/commissionRate` + `GET /dapi/v1/commissionRate` + + References + ---------- + https://binance-docs.github.io/apidocs/futures/en/#user-commission-rate-user_data + https://binance-docs.github.io/apidocs/delivery/en/#user-commission-rate-user_data + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + super().__init__( + client, + methods, + base_endpoint + "commissionRate", + ) + self._get_resp_decoder = msgspec.json.Decoder(BinanceFuturesCommissionRate) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for fetching commission rate. + + Parameters + ---------- + symbol : BinanceSymbol + Receive commission rate of the provided symbol. + timestamp : str + Millisecond timestamp of the request. + recvWindow : str, optional + The number of milliseconds after timestamp the request is valid. + """ + + timestamp: str + symbol: BinanceSymbol + recvWindow: Optional[str] = None + + async def _get(self, parameters: GetParameters) -> BinanceFuturesCommissionRate: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) class BinanceFuturesWalletHttpAPI: @@ -30,45 +89,45 @@ class BinanceFuturesWalletHttpAPI: The Binance REST API client. """ - def __init__(self, client: BinanceHttpClient): - self.client = client - - async def commission_rate( + def __init__( self, - symbol: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> list[dict[str, str]]: - """ - Fetch trade fee. + client: BinanceHttpClient, + clock: LiveClock, + account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, + ): + self.client = client + self._clock = clock - `GET /sapi/v1/asset/tradeFee` + if account_type == BinanceAccountType.FUTURES_USDT: + self.base_endpoint = "/fapi/v1/" + elif account_type == BinanceAccountType.FUTURES_COIN: + self.base_endpoint = "/dapi/v1/" - Parameters - ---------- - symbol : str, optional - The trading pair. If None then queries for all symbols. - recv_window : int, optional - The acceptable receive window for the response. + if not account_type.is_futures: + raise RuntimeError( # pragma: no cover (design-time error) + f"`BinanceAccountType` not FUTURES_USDT or FUTURES_COIN, was {account_type}", # pragma: no cover + ) - Returns - ------- - list[dict[str, str]] + self._endpoint_futures_commission_rate = BinanceFuturesCommissionRateHttp( + client, + self.base_endpoint, + ) - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#trade-fee-user_data + def _timestamp(self) -> str: + """Create Binance timestamp from internal clock.""" + return str(self._clock.timestamp_ms()) - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = symbol - if recv_window is not None: - payload["recv_window"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path="/fapi/v1/commissionRate", - payload=payload, + async def query_futures_commission_rate( + self, + symbol: str, + recv_window: Optional[str] = None, + ) -> BinanceFuturesCommissionRate: + """Get Futures commission rates for a given symbol.""" + rate = await self._endpoint_futures_commission_rate._get( + parameters=self._endpoint_futures_commission_rate.GetParameters( + timestamp=self._timestamp(), + symbol=BinanceSymbol(symbol), + recvWindow=recv_window, + ), ) - - return msgspec.json.decode(raw) + return rate diff --git a/nautilus_trader/adapters/binance/futures/parsing/__init__.py b/nautilus_trader/adapters/binance/futures/parsing/__init__.py deleted file mode 100644 index ca16b56e4794..000000000000 --- a/nautilus_trader/adapters/binance/futures/parsing/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- diff --git a/nautilus_trader/adapters/binance/futures/parsing/account.py b/nautilus_trader/adapters/binance/futures/parsing/account.py deleted file mode 100644 index 6cede2ce4623..000000000000 --- a/nautilus_trader/adapters/binance/futures/parsing/account.py +++ /dev/null @@ -1,72 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from decimal import Decimal - -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesAssetInfo -from nautilus_trader.adapters.binance.futures.schemas.user import BinanceFuturesBalance -from nautilus_trader.model.currency import Currency -from nautilus_trader.model.objects import AccountBalance -from nautilus_trader.model.objects import MarginBalance -from nautilus_trader.model.objects import Money - - -def parse_account_balances_http(assets: list[BinanceFuturesAssetInfo]) -> list[AccountBalance]: - balances: list[AccountBalance] = [] - for a in assets: - currency = Currency.from_str(a.asset) - total = Decimal(a.walletBalance) - locked = Decimal(a.initialMargin) + Decimal(a.maintMargin) - free = total - locked - - balance = AccountBalance( - total=Money(total, currency), - locked=Money(locked, currency), - free=Money(free, currency), - ) - balances.append(balance) - - return balances - - -def parse_account_balances_ws(raw_balances: list[BinanceFuturesBalance]) -> list[AccountBalance]: - balances: list[AccountBalance] = [] - for b in raw_balances: - currency = Currency.from_str(b.a) - free = Decimal(b.wb) - locked = Decimal(0) # TODO(cs): Pending refactoring of accounting - total: Decimal = free + locked - - balance = AccountBalance( - total=Money(total, currency), - locked=Money(locked, currency), - free=Money(free, currency), - ) - balances.append(balance) - - return balances - - -def parse_account_margins_http(assets: list[BinanceFuturesAssetInfo]) -> list[MarginBalance]: - margins: list[MarginBalance] = [] - for a in assets: - currency: Currency = Currency.from_str(a.asset) - margin = MarginBalance( - initial=Money(Decimal(a.initialMargin), currency), - maintenance=Money(Decimal(a.maintMargin), currency), - ) - margins.append(margin) - - return margins diff --git a/nautilus_trader/adapters/binance/futures/parsing/data.py b/nautilus_trader/adapters/binance/futures/parsing/data.py deleted file mode 100644 index e623cd26b6cf..000000000000 --- a/nautilus_trader/adapters/binance/futures/parsing/data.py +++ /dev/null @@ -1,281 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from datetime import datetime as dt -from decimal import Decimal - -import msgspec - -from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE -from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.enums import BinanceSymbolFilterType -from nautilus_trader.adapters.binance.common.functions import parse_symbol -from nautilus_trader.adapters.binance.common.schemas import BinanceOrderBookData -from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesMarkPriceData -from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesSymbolInfo -from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesTradeData -from nautilus_trader.adapters.binance.futures.schemas.market import BinanceSymbolFilter -from nautilus_trader.adapters.binance.futures.types import BinanceFuturesMarkPriceUpdate -from nautilus_trader.core.correctness import PyCondition -from nautilus_trader.core.datetime import millis_to_nanos -from nautilus_trader.model.currency import Currency -from nautilus_trader.model.data.tick import TradeTick -from nautilus_trader.model.enums import AggressorSide -from nautilus_trader.model.enums import BookType -from nautilus_trader.model.enums import CurrencyType -from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import Symbol -from nautilus_trader.model.identifiers import TradeId -from nautilus_trader.model.instruments.crypto_future import CryptoFuture -from nautilus_trader.model.instruments.crypto_perpetual import CryptoPerpetual -from nautilus_trader.model.objects import PRICE_MAX -from nautilus_trader.model.objects import PRICE_MIN -from nautilus_trader.model.objects import QUANTITY_MAX -from nautilus_trader.model.objects import QUANTITY_MIN -from nautilus_trader.model.objects import Money -from nautilus_trader.model.objects import Price -from nautilus_trader.model.objects import Quantity -from nautilus_trader.model.orderbook.data import OrderBookSnapshot - - -def parse_perpetual_instrument_http( - symbol_info: BinanceFuturesSymbolInfo, - ts_event: int, - ts_init: int, -) -> CryptoPerpetual: - # Create base asset - base_currency = Currency( - code=symbol_info.baseAsset, - precision=symbol_info.baseAssetPrecision, - iso4217=0, # Currently undetermined for crypto assets - name=symbol_info.baseAsset, - currency_type=CurrencyType.CRYPTO, - ) - - # Create quote asset - quote_currency = Currency( - code=symbol_info.quoteAsset, - precision=symbol_info.quotePrecision, - iso4217=0, # Currently undetermined for crypto assets - name=symbol_info.quoteAsset, - currency_type=CurrencyType.CRYPTO, - ) - - native_symbol = Symbol(symbol_info.symbol) - symbol = parse_symbol(symbol_info.symbol, BinanceAccountType.FUTURES_USDT) - instrument_id = InstrumentId(symbol=Symbol(symbol), venue=BINANCE_VENUE) - - # Parse instrument filters - filters: dict[BinanceSymbolFilterType, BinanceSymbolFilter] = { - f.filterType: f for f in symbol_info.filters - } - price_filter: BinanceSymbolFilter = filters[BinanceSymbolFilterType.PRICE_FILTER] - lot_size_filter: BinanceSymbolFilter = filters[BinanceSymbolFilterType.LOT_SIZE] - min_notional_filter: BinanceSymbolFilter = filters[BinanceSymbolFilterType.MIN_NOTIONAL] - - tick_size = price_filter.tickSize.rstrip("0") - step_size = lot_size_filter.stepSize.rstrip("0") - PyCondition.in_range(float(tick_size), PRICE_MIN, PRICE_MAX, "tick_size") - PyCondition.in_range(float(step_size), QUANTITY_MIN, QUANTITY_MAX, "step_size") - - price_precision = abs(Decimal(tick_size).as_tuple().exponent) - size_precision = abs(Decimal(step_size).as_tuple().exponent) - price_increment = Price.from_str(tick_size) - size_increment = Quantity.from_str(step_size) - max_quantity = Quantity(float(lot_size_filter.maxQty), precision=size_precision) - min_quantity = Quantity(float(lot_size_filter.minQty), precision=size_precision) - min_notional = None - if filters.get(BinanceSymbolFilterType.MIN_NOTIONAL): - min_notional = Money(min_notional_filter.minNotional, currency=quote_currency) - max_price = Price(float(price_filter.maxPrice), precision=price_precision) - min_price = Price(float(price_filter.minPrice), precision=price_precision) - - # Futures commissions - maker_fee = Decimal("0.000200") # TODO - taker_fee = Decimal("0.000400") # TODO - - if symbol_info.marginAsset == symbol_info.baseAsset: - settlement_currency = base_currency - elif symbol_info.marginAsset == symbol_info.quoteAsset: - settlement_currency = quote_currency - else: - raise ValueError(f"Unrecognized margin asset {symbol_info.marginAsset}") - - # Create instrument - return CryptoPerpetual( - instrument_id=instrument_id, - native_symbol=native_symbol, - base_currency=base_currency, - quote_currency=quote_currency, - settlement_currency=settlement_currency, - is_inverse=False, # No inverse instruments trade on Binance - price_precision=price_precision, - size_precision=size_precision, - price_increment=price_increment, - size_increment=size_increment, - max_quantity=max_quantity, - min_quantity=min_quantity, - max_notional=None, - min_notional=min_notional, - max_price=max_price, - min_price=min_price, - margin_init=Decimal(float(symbol_info.requiredMarginPercent) / 100), - margin_maint=Decimal(float(symbol_info.maintMarginPercent) / 100), - maker_fee=maker_fee, - taker_fee=taker_fee, - ts_event=ts_event, - ts_init=ts_init, - info=msgspec.json.decode(msgspec.json.encode(symbol_info)), - ) - - -def parse_futures_instrument_http( - symbol_info: BinanceFuturesSymbolInfo, - ts_event: int, - ts_init: int, -) -> CryptoFuture: - # Create base asset - base_currency = Currency( - code=symbol_info.baseAsset, - precision=symbol_info.baseAssetPrecision, - iso4217=0, # Currently undetermined for crypto assets - name=symbol_info.baseAsset, - currency_type=CurrencyType.CRYPTO, - ) - - # Create quote asset - quote_currency = Currency( - code=symbol_info.quoteAsset, - precision=symbol_info.quotePrecision, - iso4217=0, # Currently undetermined for crypto assets - name=symbol_info.quoteAsset, - currency_type=CurrencyType.CRYPTO, - ) - - native_symbol = Symbol(symbol_info.symbol) - symbol = parse_symbol(symbol_info.symbol, BinanceAccountType.FUTURES_USDT) - instrument_id = InstrumentId(symbol=Symbol(symbol), venue=BINANCE_VENUE) - - # Parse instrument filters - filters: dict[BinanceSymbolFilterType, BinanceSymbolFilter] = { - f.filterType: f for f in symbol_info.filters - } - price_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.PRICE_FILTER) - lot_size_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.LOT_SIZE) - min_notional_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.MIN_NOTIONAL) - - tick_size = price_filter.tickSize.rstrip("0") - step_size = lot_size_filter.stepSize.rstrip("0") - PyCondition.in_range(float(tick_size), PRICE_MIN, PRICE_MAX, "tick_size") - PyCondition.in_range(float(step_size), QUANTITY_MIN, QUANTITY_MAX, "step_size") - - price_precision = abs(Decimal(tick_size).as_tuple().exponent) - size_precision = abs(Decimal(step_size).as_tuple().exponent) - price_increment = Price.from_str(tick_size) - size_increment = Quantity.from_str(step_size) - max_quantity = Quantity(float(lot_size_filter.maxQty), precision=size_precision) - min_quantity = Quantity(float(lot_size_filter.minQty), precision=size_precision) - min_notional = None - if filters.get(BinanceSymbolFilterType.MIN_NOTIONAL): - min_notional = Money(min_notional_filter.minNotional, currency=quote_currency) - max_price = Price(float(price_filter.maxPrice), precision=price_precision) - min_price = Price(float(price_filter.minPrice), precision=price_precision) - - # Futures commissions - maker_fee = Decimal("0.000200") # TODO - taker_fee = Decimal("0.000400") # TODO - - if symbol_info.marginAsset == symbol_info.baseAsset: - settlement_currency = base_currency - elif symbol_info.marginAsset == symbol_info.quoteAsset: - settlement_currency = quote_currency - else: - raise ValueError(f"Unrecognized margin asset {symbol_info.marginAsset}") - - # Create instrument - return CryptoFuture( - instrument_id=instrument_id, - native_symbol=native_symbol, - underlying=base_currency, - quote_currency=quote_currency, - settlement_currency=settlement_currency, - expiry_date=dt.strptime(symbol_info.symbol.partition("_")[2], "%y%m%d").date(), - price_precision=price_precision, - size_precision=size_precision, - price_increment=price_increment, - size_increment=size_increment, - max_quantity=max_quantity, - min_quantity=min_quantity, - max_notional=None, - min_notional=min_notional, - max_price=max_price, - min_price=min_price, - margin_init=Decimal(float(symbol_info.requiredMarginPercent) / 100), - margin_maint=Decimal(float(symbol_info.maintMarginPercent) / 100), - maker_fee=maker_fee, - taker_fee=taker_fee, - ts_event=ts_event, - ts_init=ts_init, - info=msgspec.json.decode(msgspec.json.encode(symbol_info)), - ) - - -def parse_futures_book_snapshot( - instrument_id: InstrumentId, - data: BinanceOrderBookData, - ts_init: int, -) -> OrderBookSnapshot: - return OrderBookSnapshot( - instrument_id=instrument_id, - book_type=BookType.L2_MBP, - bids=[[float(o[0]), float(o[1])] for o in data.b], - asks=[[float(o[0]), float(o[1])] for o in data.a], - ts_event=millis_to_nanos(data.T), - ts_init=ts_init, - sequence=data.u, - ) - - -def parse_futures_mark_price_ws( - instrument_id: InstrumentId, - data: BinanceFuturesMarkPriceData, - ts_init: int, -) -> BinanceFuturesMarkPriceUpdate: - return BinanceFuturesMarkPriceUpdate( - instrument_id=instrument_id, - mark=Price.from_str(data.p), - index=Price.from_str(data.i), - estimated_settle=Price.from_str(data.P), - funding_rate=Decimal(data.r), - ts_next_funding=millis_to_nanos(data.T), - ts_event=millis_to_nanos(data.E), - ts_init=ts_init, - ) - - -def parse_futures_trade_tick_ws( - instrument_id: InstrumentId, - data: BinanceFuturesTradeData, - ts_init: int, -) -> TradeTick: - return TradeTick( - instrument_id=instrument_id, - price=Price.from_str(data.p), - size=Quantity.from_str(data.q), - aggressor_side=AggressorSide.SELLER if data.m else AggressorSide.BUYER, - trade_id=TradeId(str(data.t)), - ts_event=millis_to_nanos(data.T), - ts_init=ts_init, - ) diff --git a/nautilus_trader/adapters/binance/futures/parsing/execution.py b/nautilus_trader/adapters/binance/futures/parsing/execution.py deleted file mode 100644 index 76cd590f8586..000000000000 --- a/nautilus_trader/adapters/binance/futures/parsing/execution.py +++ /dev/null @@ -1,207 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from decimal import Decimal - -from nautilus_trader.adapters.binance.common.enums import BinanceOrderStatus -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesOrderType -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesTimeInForce -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesWorkingType -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesAccountTrade -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesOrder -from nautilus_trader.adapters.binance.futures.schemas.account import BinanceFuturesPositionRisk -from nautilus_trader.core.datetime import millis_to_nanos -from nautilus_trader.core.uuid import UUID4 -from nautilus_trader.execution.reports import OrderStatusReport -from nautilus_trader.execution.reports import PositionStatusReport -from nautilus_trader.execution.reports import TradeReport -from nautilus_trader.model.currency import Currency -from nautilus_trader.model.enums import LiquiditySide -from nautilus_trader.model.enums import OrderSide -from nautilus_trader.model.enums import OrderStatus -from nautilus_trader.model.enums import OrderType -from nautilus_trader.model.enums import PositionSide -from nautilus_trader.model.enums import TimeInForce -from nautilus_trader.model.enums import TrailingOffsetType -from nautilus_trader.model.enums import TriggerType -from nautilus_trader.model.identifiers import AccountId -from nautilus_trader.model.identifiers import ClientOrderId -from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import PositionId -from nautilus_trader.model.identifiers import TradeId -from nautilus_trader.model.identifiers import VenueOrderId -from nautilus_trader.model.objects import Money -from nautilus_trader.model.objects import Price -from nautilus_trader.model.objects import Quantity -from nautilus_trader.model.orders.base import Order - - -def binance_order_type(order: Order) -> BinanceFuturesOrderType: - if order.order_type == OrderType.MARKET: - return BinanceFuturesOrderType.MARKET - elif order.order_type == OrderType.LIMIT: - return BinanceFuturesOrderType.LIMIT - elif order.order_type == OrderType.STOP_MARKET: - return BinanceFuturesOrderType.STOP_MARKET - elif order.order_type == OrderType.STOP_LIMIT: - return BinanceFuturesOrderType.STOP - elif order.order_type == OrderType.MARKET_IF_TOUCHED: - return BinanceFuturesOrderType.TAKE_PROFIT_MARKET - elif order.order_type == OrderType.LIMIT_IF_TOUCHED: - return BinanceFuturesOrderType.TAKE_PROFIT - elif order.order_type == OrderType.TRAILING_STOP_MARKET: - return BinanceFuturesOrderType.TRAILING_STOP_MARKET - else: - raise RuntimeError("invalid `OrderType`") # pragma: no cover (design-time error) - - -def parse_order_type(order_type: BinanceFuturesOrderType) -> OrderType: - if order_type == BinanceFuturesOrderType.STOP: - return OrderType.STOP_LIMIT - elif order_type == BinanceFuturesOrderType.STOP_MARKET: - return OrderType.STOP_MARKET - elif order_type == BinanceFuturesOrderType.TAKE_PROFIT: - return OrderType.LIMIT_IF_TOUCHED - elif order_type == BinanceFuturesOrderType.TAKE_PROFIT_MARKET: - return OrderType.MARKET_IF_TOUCHED - else: - return OrderType[order_type.value] - - -def parse_order_status(status: BinanceOrderStatus) -> OrderStatus: - if status == BinanceOrderStatus.NEW: - return OrderStatus.ACCEPTED - elif status == BinanceOrderStatus.CANCELED: - return OrderStatus.CANCELED - elif status == BinanceOrderStatus.PARTIALLY_FILLED: - return OrderStatus.PARTIALLY_FILLED - elif status == BinanceOrderStatus.FILLED: - return OrderStatus.FILLED - elif status == BinanceOrderStatus.NEW_ADL: - return OrderStatus.FILLED - elif status == BinanceOrderStatus.NEW_INSURANCE: - return OrderStatus.FILLED - elif status == BinanceOrderStatus.EXPIRED: - return OrderStatus.EXPIRED - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"unrecognized order status, was {status}", # pragma: no cover - ) - - -def parse_time_in_force(time_in_force: BinanceFuturesTimeInForce) -> TimeInForce: - if time_in_force == BinanceFuturesTimeInForce.GTX: - return TimeInForce.GTC - else: - return TimeInForce[time_in_force.value] - - -def parse_trigger_type(working_type: BinanceFuturesWorkingType) -> TriggerType: - if working_type == BinanceFuturesWorkingType.CONTRACT_PRICE: - return TriggerType.LAST_TRADE - elif working_type == BinanceFuturesWorkingType.MARK_PRICE: - return TriggerType.MARK_PRICE - else: - return TriggerType.NO_TRIGGER # pragma: no cover (design-time error) - - -def parse_order_report_http( - account_id: AccountId, - instrument_id: InstrumentId, - data: BinanceFuturesOrder, - report_id: UUID4, - ts_init: int, -) -> OrderStatusReport: - price = Decimal(data.price) - trigger_price = Decimal(data.stopPrice) - avg_px = Decimal(data.avgPrice) - time_in_force = BinanceFuturesTimeInForce(data.timeInForce.upper()) - return OrderStatusReport( - account_id=account_id, - instrument_id=instrument_id, - client_order_id=ClientOrderId(data.clientOrderId) if data.clientOrderId != "" else None, - venue_order_id=VenueOrderId(str(data.orderId)), - order_side=OrderSide[data.side.upper()], - order_type=parse_order_type(data.type), - time_in_force=parse_time_in_force(time_in_force), - order_status=parse_order_status(data.status), - price=Price.from_str(data.price) if price is not None else None, - quantity=Quantity.from_str(data.origQty), - filled_qty=Quantity.from_str(data.executedQty), - avg_px=avg_px if avg_px > 0 else None, - post_only=time_in_force == BinanceFuturesTimeInForce.GTX, - reduce_only=data.reduceOnly, - report_id=report_id, - ts_accepted=millis_to_nanos(data.time), - ts_last=millis_to_nanos(data.updateTime), - ts_init=ts_init, - trigger_price=Price.from_str(str(trigger_price)) if trigger_price > 0 else None, - trigger_type=parse_trigger_type(data.workingType), - trailing_offset=Decimal(data.priceRate) * 100 if data.priceRate is not None else None, - trailing_offset_type=TrailingOffsetType.BASIS_POINTS - if data.priceRate is not None - else TrailingOffsetType.NO_TRAILING_OFFSET, - ) - - -def parse_trade_report_http( - account_id: AccountId, - instrument_id: InstrumentId, - data: BinanceFuturesAccountTrade, - report_id: UUID4, - ts_init: int, -) -> TradeReport: - return TradeReport( - account_id=account_id, - instrument_id=instrument_id, - venue_order_id=VenueOrderId(str(data.orderId)), - venue_position_id=PositionId(f"{instrument_id}-{data.positionSide.value}"), - trade_id=TradeId(str(data.id)), - order_side=OrderSide[data.side.value], - last_qty=Quantity.from_str(data.qty), - last_px=Price.from_str(data.price), - commission=Money(data.commission, Currency.from_str(data.commissionAsset)), - liquidity_side=LiquiditySide.MAKER if data.maker else LiquiditySide.TAKER, - report_id=report_id, - ts_event=millis_to_nanos(data.time), - ts_init=ts_init, - ) - - -def parse_position_report_http( - account_id: AccountId, - instrument_id: InstrumentId, - data: BinanceFuturesPositionRisk, - report_id: UUID4, - ts_init: int, -) -> PositionStatusReport: - net_size = Decimal(data.positionAmt) - - if net_size > 0: - position_side = PositionSide.LONG - elif net_size < 0: - position_side = PositionSide.SHORT - else: - position_side = PositionSide.FLAT - - return PositionStatusReport( - account_id=account_id, - instrument_id=instrument_id, - position_side=position_side, - quantity=Quantity.from_str(str(abs(net_size))), - report_id=report_id, - ts_last=ts_init, - ts_init=ts_init, - ) diff --git a/nautilus_trader/adapters/binance/futures/providers.py b/nautilus_trader/adapters/binance/futures/providers.py index 1674339b3120..c8df2fb6504e 100644 --- a/nautilus_trader/adapters/binance/futures/providers.py +++ b/nautilus_trader/adapters/binance/futures/providers.py @@ -13,26 +13,42 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -import time -from typing import Any, Optional +from datetime import datetime as dt +from decimal import Decimal +from typing import Optional + +import msgspec from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceSymbolFilterType +from nautilus_trader.adapters.binance.common.schemas.market import BinanceSymbolFilter +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesContractStatus from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesContractType from nautilus_trader.adapters.binance.futures.http.market import BinanceFuturesMarketHttpAPI from nautilus_trader.adapters.binance.futures.http.wallet import BinanceFuturesWalletHttpAPI -from nautilus_trader.adapters.binance.futures.parsing.data import parse_futures_instrument_http -from nautilus_trader.adapters.binance.futures.parsing.data import parse_perpetual_instrument_http -from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesExchangeInfo from nautilus_trader.adapters.binance.futures.schemas.market import BinanceFuturesSymbolInfo +from nautilus_trader.adapters.binance.futures.schemas.wallet import BinanceFuturesCommissionRate from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.error import BinanceClientError +from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.logging import Logger from nautilus_trader.common.providers import InstrumentProvider from nautilus_trader.config import InstrumentProviderConfig from nautilus_trader.core.correctness import PyCondition from nautilus_trader.core.datetime import millis_to_nanos from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import Symbol +from nautilus_trader.model.instruments.crypto_future import CryptoFuture +from nautilus_trader.model.instruments.crypto_perpetual import CryptoPerpetual +from nautilus_trader.model.objects import PRICE_MAX +from nautilus_trader.model.objects import PRICE_MIN +from nautilus_trader.model.objects import QUANTITY_MAX +from nautilus_trader.model.objects import QUANTITY_MIN +from nautilus_trader.model.objects import Money +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity class BinanceFuturesInstrumentProvider(InstrumentProvider): @@ -53,6 +69,7 @@ def __init__( self, client: BinanceHttpClient, logger: Logger, + clock: LiveClock, account_type: BinanceAccountType = BinanceAccountType.FUTURES_USDT, config: Optional[InstrumentProviderConfig] = None, ): @@ -64,22 +81,49 @@ def __init__( self._client = client self._account_type = account_type + self._clock = clock - self._http_wallet = BinanceFuturesWalletHttpAPI(self._client) + self._http_wallet = BinanceFuturesWalletHttpAPI( + self._client, + clock=self._clock, + account_type=account_type, + ) self._http_market = BinanceFuturesMarketHttpAPI(self._client, account_type=account_type) self._log_warnings = config.log_warnings if config else True + self._decoder = msgspec.json.Decoder() + self._encoder = msgspec.json.Encoder() + async def load_all_async(self, filters: Optional[dict] = None) -> None: filters_str = "..." if not filters else f" with filters {filters}..." self._log.info(f"Loading all instruments{filters_str}") # Get exchange info for all assets - exchange_info: BinanceFuturesExchangeInfo = await self._http_market.exchange_info() + exchange_info = await self._http_market.query_futures_exchange_info() + + self._log.warning( + "Currently not requesting actual trade fees. All instruments will have zero fees.", + ) for symbol_info in exchange_info.symbols: + fee: Optional[BinanceFuturesCommissionRate] = None + # TODO(cs): This won't work for 174 instruments, we'll have to pre-request these + # in some other way. + # if not self._client.base_url.__contains__("testnet.binancefuture.com"): + # try: + # # Get current commission rates for the symbol + # fee = await self._http_wallet.query_futures_commission_rate(symbol_info.symbol) + # print(fee) + # except BinanceClientError as e: + # self._log.error( + # "Cannot load instruments: API key authentication failed " + # f"(this is needed to fetch the applicable account fee tier). {e.message}", + # ) + # return + self._parse_instrument( symbol_info=symbol_info, - fees=None, + fee=fee, ts_event=millis_to_nanos(exchange_info.serverTime), ) @@ -100,18 +144,36 @@ async def load_ids_async( self._log.info(f"Loading instruments {instrument_ids}{filters_str}.") # Extract all symbol strings - symbols: list[str] = [ - instrument_id.symbol.value.replace("-PERP", "") for instrument_id in instrument_ids + symbols = [ + str(BinanceSymbol(instrument_id.symbol.value)) for instrument_id in instrument_ids ] # Get exchange info for all assets - exchange_info: BinanceFuturesExchangeInfo = await self._http_market.exchange_info( - symbols=symbols, + exchange_info = await self._http_market.query_futures_exchange_info() + symbol_info_dict: dict[str, BinanceFuturesSymbolInfo] = { + info.symbol: info for info in exchange_info.symbols + } + + self._log.warning( + "Currently not requesting actual trade fees. All instruments will have zero fees.", ) - for symbol_info in exchange_info.symbols: + for symbol in symbols: + fee: Optional[BinanceFuturesCommissionRate] = None + # TODO(cs): This won't work for 174 instruments, we'll have to pre-request these + # in some other way. + # if not self._client.base_url.__contains__("testnet.binancefuture.com"): + # try: + # # Get current commission rates for the symbol + # fee = await self._http_wallet.query_futures_commission_rate(symbol) + # except BinanceClientError as e: + # self._log.error( + # "Cannot load instruments: API key authentication failed " + # f"(this is needed to fetch the applicable account fee tier). {e.message}", + # ) + self._parse_instrument( - symbol_info=symbol_info, - fees=None, + symbol_info=symbol_info_dict[symbol], + fee=fee, ts_event=millis_to_nanos(exchange_info.serverTime), ) @@ -122,24 +184,36 @@ async def load_async(self, instrument_id: InstrumentId, filters: Optional[dict] filters_str = "..." if not filters else f" with filters {filters}..." self._log.debug(f"Loading instrument {instrument_id}{filters_str}.") - symbol = instrument_id.symbol.value.replace("-PERP", "") + symbol = str(BinanceSymbol(instrument_id.symbol.value)) # Get exchange info for all assets - exchange_info: BinanceFuturesExchangeInfo = await self._http_market.exchange_info( - symbol=symbol, + exchange_info = await self._http_market.query_futures_exchange_info() + symbol_info_dict: dict[str, BinanceFuturesSymbolInfo] = { + info.symbol: info for info in exchange_info.symbols + } + + fee: Optional[BinanceFuturesCommissionRate] = None + if not self._client.base_url.__contains__("testnet.binancefuture.com"): + try: + # Get current commission rates for the symbol + fee = await self._http_wallet.query_futures_commission_rate(symbol) + except BinanceClientError as e: + self._log.error( + "Cannot load instruments: API key authentication failed " + f"(this is needed to fetch the applicable account fee tier). {e.message}", + ) + + self._parse_instrument( + symbol_info=symbol_info_dict[symbol], + ts_event=millis_to_nanos(exchange_info.serverTime), + fee=fee, ) - for symbol_info in exchange_info.symbols: - self._parse_instrument( - symbol_info=symbol_info, - fees=None, - ts_event=millis_to_nanos(exchange_info.serverTime), - ) - def _parse_instrument( + def _parse_instrument( # noqa (C901 too complex) self, symbol_info: BinanceFuturesSymbolInfo, - fees: Optional[dict[str, Any]], ts_event: int, + fee: Optional[BinanceFuturesCommissionRate] = None, ) -> None: contract_type_str = symbol_info.contractType @@ -147,15 +221,89 @@ def _parse_instrument( contract_type_str == "" or symbol_info.status == BinanceFuturesContractStatus.PENDING_TRADING ): + self._log.debug(f"Instrument not yet defined: {symbol_info.symbol}") return # Not yet defined + ts_init = self._clock.timestamp_ns() try: + # Create quote and base assets + base_currency = symbol_info.parse_to_base_currency() + quote_currency = symbol_info.parse_to_quote_currency() + + binance_symbol = BinanceSymbol(symbol_info.symbol).parse_binance_to_internal( + self._account_type, + ) + native_symbol = Symbol(binance_symbol) + instrument_id = InstrumentId(symbol=native_symbol, venue=BINANCE_VENUE) + + # Parse instrument filters + filters: dict[BinanceSymbolFilterType, BinanceSymbolFilter] = { + f.filterType: f for f in symbol_info.filters + } + price_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.PRICE_FILTER) + lot_size_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.LOT_SIZE) + min_notional_filter: BinanceSymbolFilter = filters.get( + BinanceSymbolFilterType.MIN_NOTIONAL, + ) + + tick_size = price_filter.tickSize.rstrip("0") + step_size = lot_size_filter.stepSize.rstrip("0") + PyCondition.in_range(float(tick_size), PRICE_MIN, PRICE_MAX, "tick_size") + PyCondition.in_range(float(step_size), QUANTITY_MIN, QUANTITY_MAX, "step_size") + + price_precision = abs(int(Decimal(tick_size).as_tuple().exponent)) + size_precision = abs(int(Decimal(step_size).as_tuple().exponent)) + price_increment = Price.from_str(tick_size) + size_increment = Quantity.from_str(step_size) + max_quantity = Quantity(float(lot_size_filter.maxQty), precision=size_precision) + min_quantity = Quantity(float(lot_size_filter.minQty), precision=size_precision) + min_notional = None + if filters.get(BinanceSymbolFilterType.MIN_NOTIONAL): + min_notional = Money(min_notional_filter.minNotional, currency=quote_currency) + max_price = Price(float(price_filter.maxPrice), precision=price_precision) + min_price = Price(float(price_filter.minPrice), precision=price_precision) + + # Futures commissions + maker_fee = Decimal(0) + taker_fee = Decimal(0) + if fee: + assert fee.symbol == symbol_info.symbol + maker_fee = Decimal(fee.makerCommissionRate) + taker_fee = Decimal(fee.takerCommissionRate) + + if symbol_info.marginAsset == symbol_info.baseAsset: + settlement_currency = base_currency + elif symbol_info.marginAsset == symbol_info.quoteAsset: + settlement_currency = quote_currency + else: + raise ValueError(f"Unrecognized margin asset {symbol_info.marginAsset}") + contract_type = BinanceFuturesContractType(contract_type_str) if contract_type == BinanceFuturesContractType.PERPETUAL: - instrument = parse_perpetual_instrument_http( - symbol_info=symbol_info, + instrument = CryptoPerpetual( + instrument_id=instrument_id, + native_symbol=native_symbol, + base_currency=base_currency, + quote_currency=quote_currency, + settlement_currency=settlement_currency, + is_inverse=False, # No inverse instruments trade on Binance + price_precision=price_precision, + size_precision=size_precision, + price_increment=price_increment, + size_increment=size_increment, + max_quantity=max_quantity, + min_quantity=min_quantity, + max_notional=None, + min_notional=min_notional, + max_price=max_price, + min_price=min_price, + margin_init=Decimal(float(symbol_info.requiredMarginPercent) / 100), + margin_maint=Decimal(float(symbol_info.maintMarginPercent) / 100), + maker_fee=maker_fee, + taker_fee=taker_fee, ts_event=ts_event, - ts_init=time.time_ns(), + ts_init=ts_init, + info=self._decoder.decode(self._encoder.encode(symbol_info)), ) self.add_currency(currency=instrument.base_currency) elif contract_type in ( @@ -164,10 +312,30 @@ def _parse_instrument( BinanceFuturesContractType.NEXT_MONTH, BinanceFuturesContractType.NEXT_QUARTER, ): - instrument = parse_futures_instrument_http( - symbol_info=symbol_info, + instrument = CryptoFuture( + instrument_id=instrument_id, + native_symbol=native_symbol, + underlying=base_currency, + quote_currency=quote_currency, + settlement_currency=settlement_currency, + expiry_date=dt.strptime(symbol_info.symbol.partition("_")[2], "%y%m%d").date(), + price_precision=price_precision, + size_precision=size_precision, + price_increment=price_increment, + size_increment=size_increment, + max_quantity=max_quantity, + min_quantity=min_quantity, + max_notional=None, + min_notional=min_notional, + max_price=max_price, + min_price=min_price, + margin_init=Decimal(float(symbol_info.requiredMarginPercent) / 100), + margin_maint=Decimal(float(symbol_info.maintMarginPercent) / 100), + maker_fee=maker_fee, + taker_fee=taker_fee, ts_event=ts_event, - ts_init=time.time_ns(), + ts_init=ts_init, + info=self._decoder.decode(self._encoder.encode(symbol_info)), ) self.add_currency(currency=instrument.underlying) else: diff --git a/nautilus_trader/adapters/binance/futures/rules.py b/nautilus_trader/adapters/binance/futures/rules.py deleted file mode 100644 index 2145847a58ee..000000000000 --- a/nautilus_trader/adapters/binance/futures/rules.py +++ /dev/null @@ -1,35 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from nautilus_trader.model.enums import OrderType -from nautilus_trader.model.enums import TimeInForce - - -BINANCE_FUTURES_VALID_TIF = ( - TimeInForce.GTC, - TimeInForce.GTD, # Will be transformed to GTC with warning - TimeInForce.FOK, - TimeInForce.IOC, -) - -BINANCE_FUTURES_VALID_ORDER_TYPES = ( - OrderType.MARKET, - OrderType.LIMIT, - OrderType.STOP_MARKET, - OrderType.STOP_LIMIT, - OrderType.MARKET_IF_TOUCHED, - OrderType.LIMIT_IF_TOUCHED, - OrderType.TRAILING_STOP_MARKET, -) diff --git a/nautilus_trader/adapters/binance/futures/schemas/__init__.py b/nautilus_trader/adapters/binance/futures/schemas/__init__.py index e69de29bb2d1..ca16b56e4794 100644 --- a/nautilus_trader/adapters/binance/futures/schemas/__init__.py +++ b/nautilus_trader/adapters/binance/futures/schemas/__init__.py @@ -0,0 +1,14 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- diff --git a/nautilus_trader/adapters/binance/futures/schemas/account.py b/nautilus_trader/adapters/binance/futures/schemas/account.py index c9dbee5ffccc..3d9dbdf2534a 100644 --- a/nautilus_trader/adapters/binance/futures/schemas/account.py +++ b/nautilus_trader/adapters/binance/futures/schemas/account.py @@ -13,15 +13,22 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +from decimal import Decimal from typing import Optional import msgspec -from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide -from nautilus_trader.adapters.binance.common.enums import BinanceOrderStatus -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesOrderType +from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesEnumParser from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesPositionSide -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesWorkingType +from nautilus_trader.core.uuid import UUID4 +from nautilus_trader.execution.reports import PositionStatusReport +from nautilus_trader.model.currency import Currency +from nautilus_trader.model.identifiers import AccountId +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.objects import AccountBalance +from nautilus_trader.model.objects import MarginBalance +from nautilus_trader.model.objects import Money +from nautilus_trader.model.objects import Quantity ################################################################################ @@ -29,7 +36,7 @@ ################################################################################ -class BinanceFuturesAssetInfo(msgspec.Struct): +class BinanceFuturesBalanceInfo(msgspec.Struct, frozen=True): """ HTTP response 'inner struct' from `Binance Futures` GET /fapi/v2/account (HMAC SHA256). """ @@ -50,8 +57,26 @@ class BinanceFuturesAssetInfo(msgspec.Struct): marginAvailable: Optional[bool] = None updateTime: Optional[int] = None # last update time - -class BinanceFuturesAccountInfo(msgspec.Struct, kw_only=True): + def parse_to_account_balance(self) -> AccountBalance: + currency = Currency.from_str(self.asset) + total = Decimal(self.walletBalance) + locked = Decimal(self.initialMargin) + Decimal(self.maintMargin) + free = total - locked + return AccountBalance( + total=Money(total, currency), + locked=Money(locked, currency), + free=Money(free, currency), + ) + + def parse_to_margin_balance(self) -> MarginBalance: + currency: Currency = Currency.from_str(self.asset) + return MarginBalance( + initial=Money(Decimal(self.initialMargin), currency), + maintenance=Money(Decimal(self.maintMargin), currency), + ) + + +class BinanceFuturesAccountInfo(msgspec.Struct, kw_only=True, frozen=True): """ HTTP response from `Binance Futures` GET /fapi/v2/account (HMAC SHA256). """ @@ -77,61 +102,16 @@ class BinanceFuturesAccountInfo(msgspec.Struct, kw_only=True): totalCrossUnPnl: Optional[str] = None availableBalance: Optional[str] = None # available balance, only for USDT asset maxWithdrawAmount: Optional[str] = None # maximum amount for transfer out, only for USDT asset - assets: list[BinanceFuturesAssetInfo] + assets: list[BinanceFuturesBalanceInfo] + def parse_to_account_balances(self) -> list[AccountBalance]: + return [asset.parse_to_account_balance() for asset in self.assets] -class BinanceFuturesOrder(msgspec.Struct, kw_only=True): - """ - HTTP response from `Binance Futures` GET /fapi/v1/order (HMAC SHA256). - """ + def parse_to_margin_balances(self) -> list[MarginBalance]: + return [asset.parse_to_margin_balance() for asset in self.assets] - avgPrice: str - clientOrderId: str - cumQuote: str - executedQty: str - orderId: int - origQty: str - origType: str - price: str - reduceOnly: bool - side: str - positionSide: str - status: BinanceOrderStatus - stopPrice: str - closePosition: bool - symbol: str - time: int - timeInForce: str - type: BinanceFuturesOrderType - activatePrice: Optional[str] = None - priceRate: Optional[str] = None - updateTime: int - workingType: BinanceFuturesWorkingType - priceProtect: bool - -class BinanceFuturesAccountTrade(msgspec.Struct): - """ - HTTP response from ` Binance Futures` GET /fapi/v1/userTrades (HMAC SHA256). - """ - - buyer: bool - commission: str - commissionAsset: str - id: int - maker: bool - orderId: int - price: str - qty: str - quoteQty: str - realizedPnl: str - side: BinanceOrderSide - positionSide: BinanceFuturesPositionSide - symbol: str - time: int - - -class BinanceFuturesPositionRisk(msgspec.Struct, kw_only=True): +class BinanceFuturesPositionRisk(msgspec.Struct, kw_only=True, frozen=True): """ HTTP response from ` Binance Futures` GET /fapi/v2/positionRisk (HMAC SHA256). """ @@ -149,3 +129,33 @@ class BinanceFuturesPositionRisk(msgspec.Struct, kw_only=True): unRealizedProfit: str positionSide: BinanceFuturesPositionSide updateTime: int + + def parse_to_position_status_report( + self, + account_id: AccountId, + instrument_id: InstrumentId, + enum_parser: BinanceFuturesEnumParser, + report_id: UUID4, + ts_init: int, + ) -> PositionStatusReport: + position_side = enum_parser.parse_futures_position_side( + self.positionSide, + ) + net_size = Decimal(self.positionAmt) + return PositionStatusReport( + account_id=account_id, + instrument_id=instrument_id, + position_side=position_side, + quantity=Quantity.from_str(str(abs(net_size))), + report_id=report_id, + ts_last=ts_init, + ts_init=ts_init, + ) + + +class BinanceFuturesDualSidePosition(msgspec.Struct, frozen=True): + """ + HTTP response from `Binance Futures` GET /fapi/v1/positionSide/dual (HMAC SHA256). + """ + + dualSidePosition: bool diff --git a/nautilus_trader/adapters/binance/futures/schemas/market.py b/nautilus_trader/adapters/binance/futures/schemas/market.py index 07a1e87bf163..d99c9b07d68a 100644 --- a/nautilus_trader/adapters/binance/futures/schemas/market.py +++ b/nautilus_trader/adapters/binance/futures/schemas/market.py @@ -13,17 +13,27 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +from decimal import Decimal from typing import Optional import msgspec -from nautilus_trader.adapters.binance.common.enums import BinanceExchangeFilterType -from nautilus_trader.adapters.binance.common.enums import BinanceRateLimitInterval -from nautilus_trader.adapters.binance.common.enums import BinanceRateLimitType -from nautilus_trader.adapters.binance.common.enums import BinanceSymbolFilterType +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.adapters.binance.common.enums import BinanceTimeInForce +from nautilus_trader.adapters.binance.common.schemas.market import BinanceExchangeFilter +from nautilus_trader.adapters.binance.common.schemas.market import BinanceRateLimit +from nautilus_trader.adapters.binance.common.schemas.market import BinanceSymbolFilter from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesContractStatus -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesOrderType -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesTimeInForce +from nautilus_trader.adapters.binance.futures.types import BinanceFuturesMarkPriceUpdate +from nautilus_trader.core.datetime import millis_to_nanos +from nautilus_trader.model.currency import Currency +from nautilus_trader.model.data.tick import TradeTick +from nautilus_trader.model.enums import AggressorSide +from nautilus_trader.model.enums import CurrencyType +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import TradeId +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity ################################################################################ @@ -31,50 +41,7 @@ ################################################################################ -class BinanceExchangeFilter(msgspec.Struct): - """HTTP response 'inner struct' from `Binance Futures` GET /fapi/v1/exchangeInfo.""" - - filterType: BinanceExchangeFilterType - maxNumOrders: Optional[int] = None - maxNumAlgoOrders: Optional[int] = None - - -class BinanceSymbolFilter(msgspec.Struct): - """HTTP response 'inner struct' from `Binance Futures` GET /fapi/v1/exchangeInfo.""" - - filterType: BinanceSymbolFilterType - minPrice: Optional[str] = None - maxPrice: Optional[str] = None - tickSize: Optional[str] = None - multiplierUp: Optional[str] = None - multiplierDown: Optional[str] = None - avgPriceMins: Optional[int] = None - bidMultiplierUp: Optional[str] = None - bidMultiplierDown: Optional[str] = None - askMultiplierUp: Optional[str] = None - askMultiplierDown: Optional[str] = None - minQty: Optional[str] = None - maxQty: Optional[str] = None - stepSize: Optional[str] = None - minNotional: Optional[str] = None - applyToMarket: Optional[bool] = None - limit: Optional[int] = None - maxNumOrders: Optional[int] = None - maxNumAlgoOrders: Optional[int] = None - maxNumIcebergOrders: Optional[int] = None - maxPosition: Optional[str] = None - - -class BinanceRateLimit(msgspec.Struct): - """HTTP response 'inner struct' from `Binance Futures` GET /fapi/v1/exchangeInfo.""" - - rateLimitType: BinanceRateLimitType - interval: BinanceRateLimitInterval - intervalNum: int - limit: int - - -class BinanceFuturesAsset(msgspec.Struct): +class BinanceFuturesAsset(msgspec.Struct, frozen=True): """HTTP response 'inner struct' from `Binance Futures` GET /fapi/v1/exchangeInfo.""" asset: str @@ -82,7 +49,7 @@ class BinanceFuturesAsset(msgspec.Struct): autoAssetExchange: str -class BinanceFuturesSymbolInfo(msgspec.Struct, kw_only=True): +class BinanceFuturesSymbolInfo(msgspec.Struct, kw_only=True, frozen=True): """HTTP response 'inner struct' from `Binance Futures` GET /fapi/v1/exchangeInfo.""" symbol: str @@ -107,11 +74,29 @@ class BinanceFuturesSymbolInfo(msgspec.Struct, kw_only=True): liquidationFee: str marketTakeBound: str filters: list[BinanceSymbolFilter] - orderTypes: list[BinanceFuturesOrderType] - timeInForce: list[BinanceFuturesTimeInForce] - - -class BinanceFuturesExchangeInfo(msgspec.Struct, kw_only=True): + orderTypes: list[BinanceOrderType] + timeInForce: list[BinanceTimeInForce] + + def parse_to_base_currency(self): + return Currency( + code=self.baseAsset, + precision=self.baseAssetPrecision, + iso4217=0, # Currently undetermined for crypto assets + name=self.baseAsset, + currency_type=CurrencyType.CRYPTO, + ) + + def parse_to_quote_currency(self): + return Currency( + code=self.quoteAsset, + precision=self.quotePrecision, + iso4217=0, # Currently undetermined for crypto assets + name=self.quoteAsset, + currency_type=CurrencyType.CRYPTO, + ) + + +class BinanceFuturesExchangeInfo(msgspec.Struct, kw_only=True, frozen=True): """HTTP response from `Binance Futures` GET /fapi/v1/exchangeInfo.""" timezone: str @@ -122,7 +107,7 @@ class BinanceFuturesExchangeInfo(msgspec.Struct, kw_only=True): symbols: list[BinanceFuturesSymbolInfo] -class BinanceFuturesMarkFunding(msgspec.Struct): +class BinanceFuturesMarkFunding(msgspec.Struct, frozen=True): """HTTP response from `Binance Future` GET /fapi/v1/premiumIndex.""" symbol: str @@ -135,7 +120,7 @@ class BinanceFuturesMarkFunding(msgspec.Struct): time: int -class BinanceFuturesFundRate(msgspec.Struct): +class BinanceFuturesFundRate(msgspec.Struct, frozen=True): """HTTP response from `Binance Future` GET /fapi/v1/fundingRate.""" symbol: str @@ -148,7 +133,7 @@ class BinanceFuturesFundRate(msgspec.Struct): ################################################################################ -class BinanceFuturesTradeData(msgspec.Struct): +class BinanceFuturesTradeData(msgspec.Struct, frozen=True): """ WebSocket message 'inner struct' for `Binance Futures` Trade Streams. @@ -173,18 +158,33 @@ class BinanceFuturesTradeData(msgspec.Struct): t: int # Trade ID p: str # Price q: str # Quantity - X: BinanceFuturesOrderType # Buyer order type + X: BinanceOrderType # Buyer order type m: bool # Is the buyer the market maker? - -class BinanceFuturesTradeMsg(msgspec.Struct): + def parse_to_trade_tick( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> TradeTick: + return TradeTick( + instrument_id=instrument_id, + price=Price.from_str(self.p), + size=Quantity.from_str(self.q), + aggressor_side=AggressorSide.SELLER if self.m else AggressorSide.BUYER, + trade_id=TradeId(str(self.t)), + ts_event=millis_to_nanos(self.T), + ts_init=ts_init, + ) + + +class BinanceFuturesTradeMsg(msgspec.Struct, frozen=True): """WebSocket message from `Binance Futures` Trade Streams.""" stream: str data: BinanceFuturesTradeData -class BinanceFuturesMarkPriceData(msgspec.Struct): +class BinanceFuturesMarkPriceData(msgspec.Struct, frozen=True): """WebSocket message 'inner struct' for `Binance Futures` Mark Price Update events.""" e: str # Event type @@ -196,8 +196,24 @@ class BinanceFuturesMarkPriceData(msgspec.Struct): r: str # Funding rate T: int # Next funding time - -class BinanceFuturesMarkPriceMsg(msgspec.Struct): + def parse_to_binance_futures_mark_price_update( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> BinanceFuturesMarkPriceUpdate: + return BinanceFuturesMarkPriceUpdate( + instrument_id=instrument_id, + mark=Price.from_str(self.p), + index=Price.from_str(self.i), + estimated_settle=Price.from_str(self.P), + funding_rate=Decimal(self.r), + ts_next_funding=millis_to_nanos(self.T), + ts_event=millis_to_nanos(self.E), + ts_init=ts_init, + ) + + +class BinanceFuturesMarkPriceMsg(msgspec.Struct, frozen=True): """WebSocket message from `Binance Futures` Mark Price Update events.""" stream: str diff --git a/nautilus_trader/adapters/binance/futures/schemas/user.py b/nautilus_trader/adapters/binance/futures/schemas/user.py index 4ecd5f2430d2..bf89283a48bf 100644 --- a/nautilus_trader/adapters/binance/futures/schemas/user.py +++ b/nautilus_trader/adapters/binance/futures/schemas/user.py @@ -13,19 +13,40 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +from decimal import Decimal from typing import Optional import msgspec +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser from nautilus_trader.adapters.binance.common.enums import BinanceExecutionType from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide from nautilus_trader.adapters.binance.common.enums import BinanceOrderStatus +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.adapters.binance.common.enums import BinanceTimeInForce +from nautilus_trader.adapters.binance.common.execution import BinanceCommonExecutionClient from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesEventType -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesOrderType from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesPositionSide from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesPositionUpdateReason -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesTimeInForce from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesWorkingType +from nautilus_trader.core.datetime import millis_to_nanos +from nautilus_trader.core.uuid import UUID4 +from nautilus_trader.execution.reports import OrderStatusReport +from nautilus_trader.model.currency import Currency +from nautilus_trader.model.enums import LiquiditySide +from nautilus_trader.model.enums import OrderSide +from nautilus_trader.model.enums import OrderStatus +from nautilus_trader.model.enums import TrailingOffsetType +from nautilus_trader.model.identifiers import AccountId +from nautilus_trader.model.identifiers import ClientOrderId +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import PositionId +from nautilus_trader.model.identifiers import TradeId +from nautilus_trader.model.identifiers import VenueOrderId +from nautilus_trader.model.objects import AccountBalance +from nautilus_trader.model.objects import Money +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity ################################################################################ @@ -33,24 +54,20 @@ ################################################################################ -class BinanceFuturesUserMsgData(msgspec.Struct): - """ - Inner struct for execution WebSocket messages from `Binance` - """ +class BinanceFuturesUserMsgData(msgspec.Struct, frozen=True): + """Inner struct for execution WebSocket messages from `Binance`.""" e: BinanceFuturesEventType -class BinanceFuturesUserMsgWrapper(msgspec.Struct): - """ - Provides a wrapper for execution WebSocket messages from `Binance`. - """ +class BinanceFuturesUserMsgWrapper(msgspec.Struct, frozen=True): + """Provides a wrapper for execution WebSocket messages from `Binance`.""" stream: str data: BinanceFuturesUserMsgData -class MarginCallPosition(msgspec.Struct): +class MarginCallPosition(msgspec.Struct, frozen=True): """Inner struct position for `Binance Futures` Margin Call events.""" s: str # Symbol @@ -63,7 +80,7 @@ class MarginCallPosition(msgspec.Struct): mm: str # Maintenance Margin Required -class BinanceFuturesMarginCallMsg(msgspec.Struct): +class BinanceFuturesMarginCallMsg(msgspec.Struct, frozen=True): """WebSocket message for `Binance Futures` Margin Call events.""" e: str # Event Type @@ -72,7 +89,7 @@ class BinanceFuturesMarginCallMsg(msgspec.Struct): p: list[MarginCallPosition] -class BinanceFuturesBalance(msgspec.Struct): +class BinanceFuturesBalance(msgspec.Struct, frozen=True): """Inner struct balance for `Binance Futures` Balance and Position update event.""" a: str # Asset @@ -80,8 +97,20 @@ class BinanceFuturesBalance(msgspec.Struct): cw: str # Cross Wallet Balance bc: str # Balance Change except PnL and Commission + def parse_to_account_balance(self) -> AccountBalance: + currency = Currency.from_str(self.a) + free = Decimal(self.wb) + locked = Decimal(0) # TODO(cs): Pending refactoring of accounting + total: Decimal = free + locked + + return AccountBalance( + total=Money(total, currency), + locked=Money(locked, currency), + free=Money(free, currency), + ) -class BinanceFuturesPosition(msgspec.Struct): + +class BinanceFuturesPosition(msgspec.Struct, frozen=True): """Inner struct position for `Binance Futures` Balance and Position update event.""" s: str # Symbol @@ -94,15 +123,18 @@ class BinanceFuturesPosition(msgspec.Struct): ps: BinanceFuturesPositionSide -class BinanceFuturesAccountUpdateData(msgspec.Struct): +class BinanceFuturesAccountUpdateData(msgspec.Struct, frozen=True): """WebSocket message for `Binance Futures` Balance and Position Update events.""" m: BinanceFuturesPositionUpdateReason B: list[BinanceFuturesBalance] P: list[BinanceFuturesPosition] + def parse_to_account_balances(self) -> list[AccountBalance]: + return [balance.parse_to_account_balance() for balance in self.B] + -class BinanceFuturesAccountUpdateMsg(msgspec.Struct): +class BinanceFuturesAccountUpdateMsg(msgspec.Struct, frozen=True): """WebSocket message for `Binance Futures` Balance and Position Update events.""" e: str # Event Type @@ -110,15 +142,24 @@ class BinanceFuturesAccountUpdateMsg(msgspec.Struct): T: int # Transaction Time a: BinanceFuturesAccountUpdateData + def handle_account_update(self, exec_client: BinanceCommonExecutionClient): + """Handle BinanceFuturesAccountUpdateMsg as payload of ACCOUNT_UPDATE.""" + exec_client.generate_account_state( + balances=self.a.parse_to_account_balances(), + margins=[], + reported=True, + ts_event=millis_to_nanos(self.T), + ) -class BinanceFuturesAccountUpdateWrapper(msgspec.Struct): + +class BinanceFuturesAccountUpdateWrapper(msgspec.Struct, frozen=True): """WebSocket message wrapper for `Binance Futures` Balance and Position Update events.""" stream: str data: BinanceFuturesAccountUpdateMsg -class BinanceFuturesOrderData(msgspec.Struct, kw_only=True): +class BinanceFuturesOrderData(msgspec.Struct, kw_only=True, frozen=True): """ WebSocket message 'inner struct' for `Binance Futures` Order Update events. @@ -130,8 +171,8 @@ class BinanceFuturesOrderData(msgspec.Struct, kw_only=True): s: str # Symbol c: str # Client Order ID S: BinanceOrderSide - o: BinanceFuturesOrderType - f: BinanceFuturesTimeInForce + o: BinanceOrderType + f: BinanceTimeInForce q: str # Original Quantity p: str # Original Price ap: str # Average Price @@ -151,7 +192,7 @@ class BinanceFuturesOrderData(msgspec.Struct, kw_only=True): m: bool # Is trade the maker side R: bool # Is reduce only wt: BinanceFuturesWorkingType - ot: BinanceFuturesOrderType + ot: BinanceOrderType ps: BinanceFuturesPositionSide cp: Optional[bool] = None # If Close-All, pushed with conditional order AP: Optional[str] = None # Activation Price, only pushed with TRAILING_STOP_MARKET order @@ -161,8 +202,126 @@ class BinanceFuturesOrderData(msgspec.Struct, kw_only=True): ss: int # ignore rp: str # Realized Profit of the trade - -class BinanceFuturesOrderUpdateMsg(msgspec.Struct): + def parse_to_order_status_report( + self, + account_id: AccountId, + instrument_id: InstrumentId, + client_order_id: ClientOrderId, + venue_order_id: VenueOrderId, + ts_event: int, + ts_init: int, + enum_parser: BinanceEnumParser, + ) -> OrderStatusReport: + price = Price.from_str(self.p) if self.p is not None else None + trigger_price = Price.from_str(self.sp) if self.sp is not None else None + trailing_offset = Decimal(self.cr) * 100 if self.cr is not None else None + order_side = OrderSide.BUY if self.S == BinanceOrderSide.BUY else OrderSide.SELL + post_only = self.f == BinanceTimeInForce.GTX + + return OrderStatusReport( + account_id=account_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + order_side=order_side, + order_type=enum_parser.parse_binance_order_type(self.o), + time_in_force=enum_parser.parse_binance_time_in_force(self.f), + order_status=OrderStatus.ACCEPTED, + price=price, + trigger_price=trigger_price, + trigger_type=enum_parser.parse_binance_trigger_type(self.wt.value), + trailing_offset=trailing_offset, + trailing_offset_type=TrailingOffsetType.BASIS_POINTS, + quantity=Quantity.from_str(self.q), + filled_qty=Quantity.from_str(self.z), + avg_px=None, + post_only=post_only, + reduce_only=self.R, + report_id=UUID4(), + ts_accepted=ts_event, + ts_last=ts_event, + ts_init=ts_init, + ) + + def handle_order_trade_update( + self, + exec_client: BinanceCommonExecutionClient, + ): + """Handle BinanceFuturesOrderData as payload of ORDER_TRADE_UPDATE event.""" + client_order_id = ClientOrderId(self.c) if self.c != "" else None + ts_event = millis_to_nanos(self.T) + venue_order_id = VenueOrderId(str(self.i)) + instrument_id = exec_client._get_cached_instrument_id(self.s) + strategy_id = exec_client._cache.strategy_id_for_order(client_order_id) + if strategy_id is None: + report = self.parse_to_order_status_report( + account_id=exec_client.account_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ts_init=exec_client._clock.timestamp_ns(), + enum_parser=exec_client._enum_parser, + ) + exec_client._send_order_status_report(report) + elif self.x == BinanceExecutionType.NEW: + exec_client.generate_order_accepted( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ) + elif self.x == BinanceExecutionType.TRADE: + instrument = exec_client._instrument_provider.find(instrument_id=instrument_id) + + # Determine commission + commission_asset: str = self.N + commission_amount: str = self.n + if commission_asset is not None: + commission = Money.from_str(f"{commission_amount} {commission_asset}") + else: + # Commission in margin collateral currency + commission = Money(0, instrument.quote_currency) + + exec_client.generate_order_filled( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + venue_position_id=PositionId(f"{instrument_id}-{self.ps.value}"), + trade_id=TradeId(str(self.t)), # Trade ID + order_side=exec_client._enum_parser.parse_binance_order_side(self.S), + order_type=exec_client._enum_parser.parse_binance_order_type(self.o), + last_qty=Quantity.from_str(self.l), + last_px=Price.from_str(self.L), + quote_currency=instrument.quote_currency, + commission=commission, + liquidity_side=LiquiditySide.MAKER if self.m else LiquiditySide.TAKER, + ts_event=ts_event, + ) + elif self.x == BinanceExecutionType.CANCELED: + exec_client.generate_order_canceled( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ) + elif self.x == BinanceExecutionType.EXPIRED: + exec_client.generate_order_expired( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ) + else: + # Event not handled + exec_client._log.warning(f"Received unhandled {self}") + + +class BinanceFuturesOrderUpdateMsg(msgspec.Struct, frozen=True): """WebSocket message for `Binance Futures` Order Update events.""" e: str # Event Type @@ -171,7 +330,7 @@ class BinanceFuturesOrderUpdateMsg(msgspec.Struct): o: BinanceFuturesOrderData -class BinanceFuturesOrderUpdateWrapper(msgspec.Struct): +class BinanceFuturesOrderUpdateWrapper(msgspec.Struct, frozen=True): """WebSocket message wrapper for `Binance Futures` Order Update events.""" stream: str diff --git a/nautilus_trader/adapters/binance/spot/rules.py b/nautilus_trader/adapters/binance/futures/schemas/wallet.py similarity index 68% rename from nautilus_trader/adapters/binance/spot/rules.py rename to nautilus_trader/adapters/binance/futures/schemas/wallet.py index dd6c51006254..d1931fb82d94 100644 --- a/nautilus_trader/adapters/binance/spot/rules.py +++ b/nautilus_trader/adapters/binance/futures/schemas/wallet.py @@ -13,20 +13,17 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from nautilus_trader.model.enums import OrderType -from nautilus_trader.model.enums import TimeInForce +import msgspec -BINANCE_SPOT_VALID_TIF = ( - TimeInForce.GTC, - TimeInForce.GTD, # Will be transformed to GTC with warning - TimeInForce.FOK, - TimeInForce.IOC, -) +################################################################################ +# HTTP responses +################################################################################ -BINANCE_SPOT_VALID_ORDER_TYPES = ( - OrderType.MARKET, - OrderType.LIMIT, - OrderType.LIMIT_IF_TOUCHED, - OrderType.STOP_LIMIT, -) + +class BinanceFuturesCommissionRate(msgspec.Struct, frozen=True): + """Schema of a single `Binance Futures` commissionRate.""" + + symbol: str + makerCommissionRate: str + takerCommissionRate: str diff --git a/nautilus_trader/adapters/binance/http/account.py b/nautilus_trader/adapters/binance/http/account.py new file mode 100644 index 000000000000..999ff248d9b6 --- /dev/null +++ b/nautilus_trader/adapters/binance/http/account.py @@ -0,0 +1,653 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from typing import Optional + +import msgspec + +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceNewOrderRespType +from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.enums import BinanceTimeInForce +from nautilus_trader.adapters.binance.common.schemas.account import BinanceOrder +from nautilus_trader.adapters.binance.common.schemas.account import BinanceUserTrade +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.common.clock import LiveClock +from nautilus_trader.core.correctness import PyCondition + + +class BinanceOrderHttp(BinanceHttpEndpoint): + """ + Endpoint for managing orders. + + `GET /api/v3/order` + `GET /api/v3/order/test` + `GET /fapi/v1/order` + `GET /dapi/v1/order` + + `POST /api/v3/order` + `POST /fapi/v1/order` + `POST /dapi/v1/order` + + `DELETE /api/v3/order` + `DELETE /fapi/v1/order` + `DELETE /dapi/v1/order` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#new-order-trade + https://binance-docs.github.io/apidocs/futures/en/#new-order-trade + https://binance-docs.github.io/apidocs/delivery/en/#new-order-trade + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + testing_endpoint: Optional[bool] = False, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + BinanceMethodType.POST: BinanceSecurityType.TRADE, + BinanceMethodType.DELETE: BinanceSecurityType.TRADE, + } + url_path = base_endpoint + "order" + if testing_endpoint: + url_path = url_path + "/test" + super().__init__( + client, + methods, + url_path, + ) + self._resp_decoder = msgspec.json.Decoder(BinanceOrder) + + class GetDeleteParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + Order management GET & DELETE endpoint parameters + + Parameters + ---------- + symbol : BinanceSymbol + The symbol of the order + timestamp : str + The millisecond timestamp of the request + orderId : str, optional + the order identifier + origClientOrderId : str, optional + the client specified order identifier + recvWindow : str, optional + the millisecond timeout window. + + Warnings + -------- + Either orderId or origClientOrderId must be sent. + """ + + symbol: BinanceSymbol + timestamp: str + orderId: Optional[str] = None + origClientOrderId: Optional[str] = None + recvWindow: Optional[str] = None + + class PostParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + Order creation POST endpoint parameters. + + Parameters + ---------- + symbol : BinanceSymbol + The symbol of the order + timestamp : str + The millisecond timestamp of the request + side : BinanceOrderSide + The market side of the order (BUY, SELL) + type : BinanceOrderType + The type of the order (LIMIT, STOP_LOSS..) + timeInForce : BinanceTimeInForce, optional + Mandatory for LIMIT, STOP_LOSS_LIMIT, TAKE_PROFIT_LIMIT orders. + The time in force of the order (GTC, IOC..) + quantity : str, optional + Mandatory for all order types, except STOP_MARKET/TAKE_PROFIT_MARKET + and TRAILING_STOP_MARKET orders + The order quantity in base asset units for the request + quoteOrderQty : str, optional + Only for SPOT/MARGIN orders. + Can be used alternatively to `quantity` for MARKET orders + The order quantity in quote asset units for the request + price : str, optional + Mandatory for LIMIT, STOP_LOSS_LIMIT, TAKE_PROFIT_LIMIT, LIMIT_MAKER, + STOP, TAKE_PROFIT orders + The order price for the request + newClientOrderId : str, optional + The client order ID for the request. A unique ID among open orders. + Automatically generated if not provided. + strategyId : int, optional + Only for SPOT/MARGIN orders. + The client strategy ID for the request. + strategyType : int, optional + Only for SPOT/MARGIN orders + The client strategy type for thr request. Cannot be less than 1000000 + stopPrice : str, optional + Mandatory for STOP_LOSS, STOP_LOSS_LIMIT, TAKE_PROFIT, TAKE_PROFIT_LIMIT, + STOP, STOP_MARKET, TAKE_PROFIT_MARKET. + The order stop price for the request. + trailingDelta : str, optional + Only for SPOT/MARGIN orders + Can be used instead of or in addition to stopPrice for STOP_LOSS, + STOP_LOSS_LIMIT, TAKE_PROFIT, TAKE_PROFIT_LIMIT orders. + The order trailing delta of the request. + icebergQty : str, optional + Only for SPOT/MARGIN orders + Can be used with LIMIT, STOP_LOSS_LIMIT, and TAKE_PROFIT_LIMIT to + create an iceberg order. + reduceOnly : str ('true', 'false'), optional + Only for FUTURES orders + Cannot be sent in Hedge Mode, cannot be sent with closePosition = 'true' + closePosition : str ('true', 'false'), optional + Only for FUTURES orders + Can be used with STOP_MARKET or TAKE_PROFIT_MARKET orders + Whether to close all open positions for the given symbol. + activationPrice : str, optional + Only for FUTURES orders + Can be used with TRAILING_STOP_MARKET orders. + Defaults to the latest price. + callbackRate : str, optional + Only for FUTURES orders + Mandatory for TRAILING_STOP_MARKET orders. + The order trailing delta of the request. + workingType : str ("MARK_PRICE", "CONTRACT_PRICE"), optional + Only for FUTURES orders + The trigger type for the order. + Defaults to "CONTRACT_PRICE" + priceProtect : str ('true', 'false'), optional + Only for FUTURES orders + Whether price protection is active. + Defaults to 'false' + newOrderRespType : NewOrderRespType, optional + The response type for the order request. + SPOT/MARGIN MARKET, LIMIT orders default to FULL. + All others default to ACK. + FULL response only for SPOT/MARGIN orders. + recvWindow : str, optional + The response receive window in milliseconds for the request. + Cannot exceed 60000. + """ + + symbol: BinanceSymbol + timestamp: str + side: BinanceOrderSide + type: BinanceOrderType + timeInForce: Optional[BinanceTimeInForce] = None + quantity: Optional[str] = None + quoteOrderQty: Optional[str] = None + price: Optional[str] = None + newClientOrderId: Optional[str] = None + strategyId: Optional[int] = None + strategyType: Optional[int] = None + stopPrice: Optional[str] = None + trailingDelta: Optional[str] = None + icebergQty: Optional[str] = None + reduceOnly: Optional[str] = None + closePosition: Optional[str] = None + activationPrice: Optional[str] = None + callbackRate: Optional[str] = None + workingType: Optional[str] = None + priceProtect: Optional[str] = None + newOrderRespType: Optional[BinanceNewOrderRespType] = None + recvWindow: Optional[str] = None + + async def _get(self, parameters: GetDeleteParameters) -> BinanceOrder: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) + + async def _delete(self, parameters: GetDeleteParameters) -> BinanceOrder: + method_type = BinanceMethodType.DELETE + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) + + async def _post(self, parameters: PostParameters) -> BinanceOrder: + method_type = BinanceMethodType.POST + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) + + +class BinanceAllOrdersHttp(BinanceHttpEndpoint): + """ + Endpoint of all account orders, active, cancelled or filled. + + `GET /api/v3/allOrders` + `GET /fapi/v1/allOrders` + `GET /dapi/v1/allOrders` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#all-orders-user_data + https://binance-docs.github.io/apidocs/futures/en/#all-orders-user_data + https://binance-docs.github.io/apidocs/delivery/en/#all-orders-user_data + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + url_path = base_endpoint + "allOrders" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceOrder]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + Parameters of allOrders GET request. + + Parameters + ---------- + symbol : BinanceSymbol + The symbol of the orders + timestamp : str + The millisecond timestamp of the request + orderId : str, optional + The order ID for the request. + If included, request will return orders from this orderId INCLUSIVE + startTime : str, optional + The start time (UNIX milliseconds) filter for the request. + endTime : str, optional + The end time (UNIX milliseconds) filter for the request. + limit : int, optional + The limit for the response. + Default 500, max 1000 + recvWindow : str, optional + The response receive window for the request (cannot be greater than 60000). + """ + + symbol: BinanceSymbol + timestamp: str + orderId: Optional[str] = None + startTime: Optional[str] = None + endTime: Optional[str] = None + limit: Optional[int] = None + recvWindow: Optional[str] = None + + async def _get(self, parameters: GetParameters) -> list[BinanceOrder]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceOpenOrdersHttp(BinanceHttpEndpoint): + """ + Endpoint of all open orders on a symbol. + + `GET /api/v3/openOrders` + `GET /fapi/v1/openOrders` + `GET /dapi/v1/openOrders` + + Warnings + -------- + Care should be taken when accessing this endpoint with no symbol specified. + The weight usage can be very large, which may cause rate limits to be hit. + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#current-open-orders-user_data + https://binance-docs.github.io/apidocs/futures/en/#current-all-open-orders-user_data + https://binance-docs.github.io/apidocs/futures/en/#current-all-open-orders-user_data + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + methods: Optional[dict[BinanceMethodType, BinanceSecurityType]] = None, + ): + if methods is None: + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + url_path = base_endpoint + "openOrders" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceOrder]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + Parameters of openOrders GET request. + + Parameters + ---------- + timestamp : str + The millisecond timestamp of the request + symbol : BinanceSymbol, optional + The symbol of the orders + recvWindow : str, optional + The response receive window for the request (cannot be greater than 60000). + """ + + timestamp: str + symbol: Optional[BinanceSymbol] = None + recvWindow: Optional[str] = None + + async def _get(self, parameters: GetParameters) -> list[BinanceOrder]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceUserTradesHttp(BinanceHttpEndpoint): + """ + Endpoint of trades for a specific account and symbol. + + `GET /api/v3/myTrades` + `GET /fapi/v1/userTrades` + `GET /dapi/v1/userTrades` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#account-trade-list-user_data + https://binance-docs.github.io/apidocs/futures/en/#account-trade-list-user_data + https://binance-docs.github.io/apidocs/delivery/en/#account-trade-list-user_data + """ + + def __init__( + self, + client: BinanceHttpClient, + url_path: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceUserTrade]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + Parameters of allOrders GET request. + + Parameters + ---------- + symbol : BinanceSymbol + The symbol of the orders + timestamp : str + The millisecond timestamp of the request + orderId : str, optional + The order ID for the request. + If included, request will return orders from this orderId INCLUSIVE + startTime : str, optional + The start time (UNIX milliseconds) filter for the request. + endTime : str, optional + The end time (UNIX milliseconds) filter for the request. + fromId : str, optional + TradeId to fetch from. Default gets most recent trades. + limit : int, optional + The limit for the response. + Default 500, max 1000 + recvWindow : str, optional + The response receive window for the request (cannot be greater than 60000). + """ + + symbol: BinanceSymbol + timestamp: str + orderId: Optional[str] = None + startTime: Optional[str] = None + endTime: Optional[str] = None + fromId: Optional[str] = None + limit: Optional[int] = None + recvWindow: Optional[str] = None + + async def _get(self, parameters: GetParameters) -> list[BinanceUserTrade]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceAccountHttpAPI: + """ + Provides access to the Binance Account/Trade HTTP REST API. + + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint prefix + + Warnings + -------- + This class should not be used directly, but through a concrete subclass. + """ + + def __init__( + self, + client: BinanceHttpClient, + clock: LiveClock, + account_type: BinanceAccountType, + ): + PyCondition.not_none(client, "client") + self.client = client + self._clock = clock + + if account_type.is_spot_or_margin: + self.base_endpoint = "/api/v3/" + user_trades_url = self.base_endpoint + "myTrades" + elif account_type == BinanceAccountType.FUTURES_USDT: + self.base_endpoint = "/fapi/v1/" + user_trades_url = self.base_endpoint + "userTrades" + elif account_type == BinanceAccountType.FUTURES_COIN: + self.base_endpoint = "/dapi/v1/" + user_trades_url = self.base_endpoint + "userTrades" + else: + raise RuntimeError( # pragma: no cover (design-time error) + f"invalid `BinanceAccountType`, was {account_type}", # pragma: no cover + ) + + # Create endpoints + self._endpoint_order = BinanceOrderHttp(client, self.base_endpoint) + self._endpoint_all_orders = BinanceAllOrdersHttp(client, self.base_endpoint) + self._endpoint_open_orders = BinanceOpenOrdersHttp(client, self.base_endpoint) + self._endpoint_user_trades = BinanceUserTradesHttp(client, user_trades_url) + + def _timestamp(self) -> str: + """Create Binance timestamp from internal clock.""" + return str(self._clock.timestamp_ms()) + + async def query_order( + self, + symbol: str, + order_id: Optional[str] = None, + orig_client_order_id: Optional[str] = None, + recv_window: Optional[str] = None, + ) -> BinanceOrder: + """Check an order status.""" + if order_id is None and orig_client_order_id is None: + raise RuntimeError( + "Either orderId or origClientOrderId must be sent.", + ) + binance_order = await self._endpoint_order._get( + parameters=self._endpoint_order.GetDeleteParameters( + symbol=BinanceSymbol(symbol), + timestamp=self._timestamp(), + orderId=order_id, + origClientOrderId=orig_client_order_id, + recvWindow=recv_window, + ), + ) + return binance_order + + async def cancel_all_open_orders( + self, + symbol: str, + recv_window: Optional[str] = None, + ) -> bool: + # Implement in child class + raise NotImplementedError + + async def cancel_order( + self, + symbol: str, + order_id: Optional[str] = None, + orig_client_order_id: Optional[str] = None, + recv_window: Optional[str] = None, + ) -> BinanceOrder: + """Cancel an active order.""" + if order_id is None and orig_client_order_id is None: + raise RuntimeError( + "Either orderId or origClientOrderId must be sent.", + ) + binance_order = await self._endpoint_order._delete( + parameters=self._endpoint_order.GetDeleteParameters( + symbol=BinanceSymbol(symbol), + timestamp=self._timestamp(), + orderId=order_id, + origClientOrderId=orig_client_order_id, + recvWindow=recv_window, + ), + ) + return binance_order + + async def new_order( + self, + symbol: str, + side: BinanceOrderSide, + order_type: BinanceOrderType, + time_in_force: Optional[BinanceTimeInForce] = None, + quantity: Optional[str] = None, + quote_order_qty: Optional[str] = None, + price: Optional[str] = None, + new_client_order_id: Optional[str] = None, + strategy_id: Optional[int] = None, + strategy_type: Optional[int] = None, + stop_price: Optional[str] = None, + trailing_delta: Optional[str] = None, + iceberg_qty: Optional[str] = None, + reduce_only: Optional[str] = None, + close_position: Optional[str] = None, + activation_price: Optional[str] = None, + callback_rate: Optional[str] = None, + working_type: Optional[str] = None, + price_protect: Optional[str] = None, + new_order_resp_type: Optional[BinanceNewOrderRespType] = None, + recv_window: Optional[str] = None, + ) -> BinanceOrder: + """Send in a new order to Binance.""" + binance_order = await self._endpoint_order._post( + parameters=self._endpoint_order.PostParameters( + symbol=BinanceSymbol(symbol), + timestamp=self._timestamp(), + side=side, + type=order_type, + timeInForce=time_in_force, + quantity=quantity, + quoteOrderQty=quote_order_qty, + price=price, + newClientOrderId=new_client_order_id, + strategyId=strategy_id, + strategyType=strategy_type, + stopPrice=stop_price, + trailingDelta=trailing_delta, + icebergQty=iceberg_qty, + reduceOnly=reduce_only, + closePosition=close_position, + activationPrice=activation_price, + callbackRate=callback_rate, + workingType=working_type, + priceProtect=price_protect, + newOrderRespType=new_order_resp_type, + recvWindow=recv_window, + ), + ) + return binance_order + + async def query_all_orders( + self, + symbol: str, + order_id: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + limit: Optional[int] = None, + recv_window: Optional[str] = None, + ) -> list[BinanceOrder]: + """Query all orders, active or filled.""" + return await self._endpoint_all_orders._get( + parameters=self._endpoint_all_orders.GetParameters( + symbol=BinanceSymbol(symbol), + timestamp=self._timestamp(), + orderId=order_id, + startTime=start_time, + endTime=end_time, + limit=limit, + recvWindow=recv_window, + ), + ) + + async def query_open_orders( + self, + symbol: Optional[str] = None, + recv_window: Optional[str] = None, + ) -> list[BinanceOrder]: + """Query open orders.""" + return await self._endpoint_open_orders._get( + parameters=self._endpoint_open_orders.GetParameters( + symbol=BinanceSymbol(symbol), + timestamp=self._timestamp(), + recvWindow=recv_window, + ), + ) + + async def query_user_trades( + self, + symbol: str, + order_id: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + from_id: Optional[str] = None, + limit: Optional[int] = None, + recv_window: Optional[str] = None, + ) -> list[BinanceUserTrade]: + """Query user's trade history for a symbol, with provided filters.""" + if (order_id or from_id) is not None and (start_time or end_time) is not None: + raise RuntimeError( + "Cannot specify both order_id/from_id AND start_time/end_time parameters.", + ) + return await self._endpoint_user_trades._get( + parameters=self._endpoint_user_trades.GetParameters( + symbol=BinanceSymbol(symbol), + timestamp=self._timestamp(), + orderId=order_id, + startTime=start_time, + endTime=end_time, + fromId=from_id, + limit=limit, + recvWindow=recv_window, + ), + ) diff --git a/nautilus_trader/adapters/binance/http/client.py b/nautilus_trader/adapters/binance/http/client.py index 94eaf684968a..79fe2d914190 100644 --- a/nautilus_trader/adapters/binance/http/client.py +++ b/nautilus_trader/adapters/binance/http/client.py @@ -106,7 +106,6 @@ async def sign_request( ) -> Any: if payload is None: payload = {} - payload["timestamp"] = str(self._clock.timestamp_ms()) query_string = self._prepare_params(payload) signature = self._get_sign(query_string) payload["signature"] = signature @@ -130,7 +129,6 @@ async def limited_encoded_sign_request( """ if payload is None: payload = {} - payload["timestamp"] = str(self._clock.timestamp_ms()) query_string = self._prepare_params(payload) signature = self._get_sign(query_string) url_path = url_path + "?" + query_string + "&signature=" + signature diff --git a/nautilus_trader/adapters/binance/http/endpoint.py b/nautilus_trader/adapters/binance/http/endpoint.py new file mode 100644 index 000000000000..10bd625a9eeb --- /dev/null +++ b/nautilus_trader/adapters/binance/http/endpoint.py @@ -0,0 +1,78 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from typing import Any + +import msgspec + +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbols +from nautilus_trader.adapters.binance.http.client import BinanceHttpClient + + +def enc_hook(obj: Any) -> Any: + if isinstance(obj, BinanceSymbol): + return str(obj) # serialize BinanceSymbol as string. + elif isinstance(obj, BinanceSymbols): + return str(obj) # serialize BinanceSymbol as string. + else: + raise TypeError(f"Objects of type {type(obj)} are not supported") + + +class BinanceHttpEndpoint: + """ + Base functionality of endpoints connecting to the Binance REST API. + + Warnings + -------- + This class should not be used directly, but through a concrete subclass. + """ + + def __init__( + self, + client: BinanceHttpClient, + methods_desc: dict[BinanceMethodType, BinanceSecurityType], + url_path: str, + ): + self.client = client + self.methods_desc = methods_desc + self.url_path = url_path + + self.decoder = msgspec.json.Decoder() + self.encoder = msgspec.json.Encoder(enc_hook=enc_hook) + + self._method_request = { + BinanceSecurityType.NONE: self.client.send_request, + BinanceSecurityType.USER_STREAM: self.client.send_request, + BinanceSecurityType.MARKET_DATA: self.client.send_request, + BinanceSecurityType.TRADE: self.client.sign_request, + BinanceSecurityType.MARGIN: self.client.sign_request, + BinanceSecurityType.USER_DATA: self.client.sign_request, + } + + async def _method(self, method_type: BinanceMethodType, parameters: Any) -> bytes: + payload: dict = self.decoder.decode(self.encoder.encode(parameters)) + if self.methods_desc[method_type] is None: + raise RuntimeError( + f"{method_type.name} not available for {self.url_path}", + ) + raw: bytes = await self._method_request[self.methods_desc[method_type]]( + http_method=method_type.name, + url_path=self.url_path, + payload=payload, + ) + return raw diff --git a/nautilus_trader/adapters/binance/http/error.py b/nautilus_trader/adapters/binance/http/error.py index 3394263be60b..32929af9bdc8 100644 --- a/nautilus_trader/adapters/binance/http/error.py +++ b/nautilus_trader/adapters/binance/http/error.py @@ -15,9 +15,7 @@ class BinanceError(Exception): - """ - The base class for all `Binance` specific errors. - """ + """The base class for all `Binance` specific errors.""" def __init__(self, status, message, headers): self.status = status @@ -26,18 +24,14 @@ def __init__(self, status, message, headers): class BinanceServerError(BinanceError): - """ - Represents an `Binance` specific 500 series HTTP error. - """ + """Represents an `Binance` specific 500 series HTTP error.""" def __init__(self, status, message, headers): super().__init__(status, message, headers) class BinanceClientError(BinanceError): - """ - Represents an `Binance` specific 400 series HTTP error. - """ + """Represents an `Binance` specific 400 series HTTP error.""" def __init__(self, status, message, headers): super().__init__(status, message, headers) diff --git a/nautilus_trader/adapters/binance/http/market.py b/nautilus_trader/adapters/binance/http/market.py new file mode 100644 index 000000000000..62ab8bcaf424 --- /dev/null +++ b/nautilus_trader/adapters/binance/http/market.py @@ -0,0 +1,918 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from typing import Optional + +import msgspec + +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceKlineInterval +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.schemas.market import BinanceAggTrade +from nautilus_trader.adapters.binance.common.schemas.market import BinanceDepth +from nautilus_trader.adapters.binance.common.schemas.market import BinanceKline +from nautilus_trader.adapters.binance.common.schemas.market import BinanceTicker24hr +from nautilus_trader.adapters.binance.common.schemas.market import BinanceTickerBook +from nautilus_trader.adapters.binance.common.schemas.market import BinanceTickerPrice +from nautilus_trader.adapters.binance.common.schemas.market import BinanceTime +from nautilus_trader.adapters.binance.common.schemas.market import BinanceTrade +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbols +from nautilus_trader.adapters.binance.common.types import BinanceBar +from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.core.correctness import PyCondition +from nautilus_trader.model.data.bar import BarType +from nautilus_trader.model.data.tick import TradeTick +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.orderbook.data import OrderBookSnapshot + + +class BinancePingHttp(BinanceHttpEndpoint): + """ + Endpoint for testing connectivity to the REST API. + + `GET /api/v3/ping` + `GET /fapi/v1/ping` + `GET /dapi/v1/ping` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#test-connectivity + https://binance-docs.github.io/apidocs/futures/en/#test-connectivity + https://binance-docs.github.io/apidocs/delivery/en/#test-connectivity + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "ping" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder() + + async def _get(self) -> dict: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, None) + return self._get_resp_decoder.decode(raw) + + +class BinanceTimeHttp(BinanceHttpEndpoint): + """ + Endpoint for testing connectivity to the REST API and receiving current server time. + + `GET /api/v3/time` + `GET /fapi/v1/time` + `GET /dapi/v1/time` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#check-server-time + https://binance-docs.github.io/apidocs/futures/en/#check-server-time + https://binance-docs.github.io/apidocs/delivery/en/#check-server-time + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "time" + super().__init__(client, methods, url_path) + self._get_resp_decoder = msgspec.json.Decoder(BinanceTime) + + async def _get(self) -> BinanceTime: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, None) + return self._get_resp_decoder.decode(raw) + + +class BinanceDepthHttp(BinanceHttpEndpoint): + """ + Endpoint of orderbook depth. + + `GET /api/v3/depth` + `GET /fapi/v1/depth` + `GET /dapi/v1/depth` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#order-book + https://binance-docs.github.io/apidocs/futures/en/#order-book + https://binance-docs.github.io/apidocs/delivery/en/#order-book + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "depth" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(BinanceDepth) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + Orderbook depth GET endpoint parameters. + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. + limit : int, optional, default 100 + The limit for the response. + SPOT/MARGIN (GET /api/v3/depth) + Default 100; max 5000. + FUTURES (GET /*api/v1/depth) + Default 500; max 1000. + Valid limits:[5, 10, 20, 50, 100, 500, 1000]. + """ + + symbol: BinanceSymbol + limit: Optional[int] = None + + async def _get(self, parameters: GetParameters) -> BinanceDepth: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceTradesHttp(BinanceHttpEndpoint): + """ + Endpoint of recent market trades. + + `GET /api/v3/trades` + `GET /fapi/v1/trades` + `GET /dapi/v1/trades` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#recent-trades-list + https://binance-docs.github.io/apidocs/futures/en/#recent-trades-list + https://binance-docs.github.io/apidocs/delivery/en/#recent-trades-list + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "trades" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceTrade]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for recent trades + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. + limit : int, optional + The limit for the response. Default 500; max 1000. + """ + + symbol: BinanceSymbol + limit: Optional[int] = None + + async def _get(self, parameters: GetParameters) -> list[BinanceTrade]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceHistoricalTradesHttp(BinanceHttpEndpoint): + """ + Endpoint of older market historical trades + + `GET /api/v3/historicalTrades` + `GET /fapi/v1/historicalTrades` + `GET /dapi/v1/historicalTrades` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#old-trade-lookup-market_data + https://binance-docs.github.io/apidocs/futures/en/#old-trades-lookup-market_data + https://binance-docs.github.io/apidocs/delivery/en/#old-trades-lookup-market_data + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.MARKET_DATA, + } + url_path = base_endpoint + "historicalTrades" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceTrade]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for historical trades + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. + limit : int, optional + The limit for the response. Default 500; max 1000. + fromId : str, optional + Trade id to fetch from. Default gets most recent trades + """ + + symbol: BinanceSymbol + limit: Optional[int] = None + fromId: Optional[str] = None + + async def _get(self, parameters: GetParameters) -> list[BinanceTrade]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceAggTradesHttp(BinanceHttpEndpoint): + """ + Endpoint of compressed and aggregated market trades. + Market trades that fill in 100ms with the same price and same taking side + will have the quantity aggregated. + + `GET /api/v3/aggTrades` + `GET /fapi/v1/aggTrades` + `GET /dapi/v1/aggTrades` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#compressed-aggregate-trades-list + https://binance-docs.github.io/apidocs/futures/en/#compressed-aggregate-trades-list + https://binance-docs.github.io/apidocs/delivery/en/#compressed-aggregate-trades-list + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "aggTrades" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceAggTrade]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for aggregate trades. + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. + limit : int, optional + The limit for the response. Default 500; max 1000. + fromId : str, optional + Trade id to fetch from INCLUSIVE. + startTime : str, optional + Timestamp in ms to get aggregate trades from INCLUSIVE. + endTime : str, optional + Timestamp in ms to get aggregate trades until INCLUSIVE. + """ + + symbol: BinanceSymbol + limit: Optional[int] = None + fromId: Optional[str] = None + startTime: Optional[str] = None + endTime: Optional[str] = None + + async def _get(self, parameters: GetParameters) -> list[BinanceAggTrade]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceKlinesHttp(BinanceHttpEndpoint): + """ + Endpoint of Kline/candlestick bars for a symbol. + Klines are uniquely identified by their open time. + + `GET /api/v3/klines` + `GET /fapi/v1/klines` + `GET /dapi/v1/klines` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data + https://binance-docs.github.io/apidocs/futures/en/#kline-candlestick-data + https://binance-docs.github.io/apidocs/delivery/en/#kline-candlestick-data + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "klines" + super().__init__( + client, + methods, + url_path, + ) + self._get_resp_decoder = msgspec.json.Decoder(list[BinanceKline]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for klines. + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. + interval : str + The interval of kline, e.g 1m, 5m, 1h, 1d, etc. + limit : int, optional + The limit for the response. Default 500; max 1000. + startTime : str, optional + Timestamp in ms to get klines from INCLUSIVE. + endTime : str, optional + Timestamp in ms to get klines until INCLUSIVE. + """ + + symbol: BinanceSymbol + interval: BinanceKlineInterval + limit: Optional[int] = None + startTime: Optional[str] = None + endTime: Optional[str] = None + + async def _get(self, parameters: GetParameters) -> list[BinanceKline]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) + + +class BinanceTicker24hrHttp(BinanceHttpEndpoint): + """ + Endpoint of 24-hour rolling window price change statistics. + + `GET /api/v3/ticker/24hr` + `GET /fapi/v1/ticker/24hr` + `GET /dapi/v1/ticker/24hr` + + Warnings + -------- + Care should be taken when accessing this endpoint with no symbol specified. + The weight usage can be very large, which may cause rate limits to be hit. + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#24hr-ticker-price-change-statistics + https://binance-docs.github.io/apidocs/futures/en/#24hr-ticker-price-change-statistics + https://binance-docs.github.io/apidocs/delivery/en/#24hr-ticker-price-change-statistics + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "ticker/24hr" + super().__init__( + client, + methods, + url_path, + ) + self._get_obj_resp_decoder = msgspec.json.Decoder(BinanceTicker24hr) + self._get_arr_resp_decoder = msgspec.json.Decoder(list[BinanceTicker24hr]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for 24hr ticker. + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. When given, endpoint will return a single BinanceTicker24hr + When omitted, endpoint will return a list of BinanceTicker24hr for all trading pairs. + symbols : BinanceSymbols + SPOT/MARGIN only! + List of trading pairs. When given, endpoint will return a list of BinanceTicker24hr. + type : str + SPOT/MARGIN only! + Select between FULL and MINI 24hr ticker responses to save bandwidth. + """ + + symbol: Optional[BinanceSymbol] = None + symbols: Optional[BinanceSymbols] = None # SPOT/MARGIN only + type: Optional[str] = None # SPOT/MARIN only + + async def _get(self, parameters: GetParameters) -> list[BinanceTicker24hr]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + if parameters.symbol is not None: + return [self._get_obj_resp_decoder.decode(raw)] + else: + return self._get_arr_resp_decoder.decode(raw) + + +class BinanceTickerPriceHttp(BinanceHttpEndpoint): + """ + Endpoint of latest price for a symbol or symbols. + + `GET /api/v3/ticker/price` + `GET /fapi/v1/ticker/price` + `GET /dapi/v1/ticker/price` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#symbol-price-ticker + https://binance-docs.github.io/apidocs/futures/en/#symbol-price-ticker + https://binance-docs.github.io/apidocs/delivery/en/#symbol-price-ticker + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "ticker/price" + super().__init__( + client, + methods, + url_path, + ) + self._get_obj_resp_decoder = msgspec.json.Decoder(BinanceTickerPrice) + self._get_arr_resp_decoder = msgspec.json.Decoder(list[BinanceTickerPrice]) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for price ticker. + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. When given, endpoint will return a single BinanceTickerPrice. + When omitted, endpoint will return a list of BinanceTickerPrice for all trading pairs. + symbols : str + SPOT/MARGIN only! + List of trading pairs. When given, endpoint will return a list of BinanceTickerPrice. + """ + + symbol: Optional[BinanceSymbol] = None + symbols: Optional[BinanceSymbols] = None # SPOT/MARGIN only + + async def _get(self, parameters: GetParameters) -> list[BinanceTickerPrice]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + if parameters.symbol is not None: + return [self._get_obj_resp_decoder.decode(raw)] + else: + return self._get_arr_resp_decoder.decode(raw) + + +class BinanceTickerBookHttp(BinanceHttpEndpoint): + """ + Endpoint of best price/qty on the order book for a symbol or symbols. + + `GET /api/v3/ticker/bookTicker` + `GET /fapi/v1/ticker/bookTicker` + `GET /dapi/v1/ticker/bookTicker` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker + https://binance-docs.github.io/apidocs/futures/en/#symbol-order-book-ticker + https://binance-docs.github.io/apidocs/delivery/en/#symbol-order-book-ticker + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "ticker/price" + super().__init__( + client, + methods, + url_path, + ) + self._get_arr_resp_decoder = msgspec.json.Decoder(list[BinanceTickerBook]) + self._get_obj_resp_decoder = msgspec.json.Decoder(BinanceTickerBook) + + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for order book ticker. + + Parameters + ---------- + symbol : str + The trading pair. When given, endpoint will return a single BinanceTickerBook + When omitted, endpoint will return a list of BinanceTickerBook for all trading pairs. + symbols : str + SPOT/MARGIN only! + List of trading pairs. When given, endpoint will return a list of BinanceTickerBook. + """ + + symbol: Optional[BinanceSymbol] = None + symbols: Optional[BinanceSymbols] = None # SPOT/MARGIN only + + async def _get(self, parameters: GetParameters) -> list[BinanceTickerBook]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + if parameters.symbol is not None: + return [self._get_obj_resp_decoder.decode(raw)] + else: + return self._get_arr_resp_decoder.decode(raw) + + +class BinanceMarketHttpAPI: + """ + Provides access to the Binance Market HTTP REST API. + + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint prefix. + + Warnings + -------- + This class should not be used directly, but through a concrete subclass. + """ + + def __init__( + self, + client: BinanceHttpClient, + account_type: BinanceAccountType, + ): + PyCondition.not_none(client, "client") + self.client = client + + if account_type.is_spot_or_margin: + self.base_endpoint = "/api/v3/" + elif account_type == BinanceAccountType.FUTURES_USDT: + self.base_endpoint = "/fapi/v1/" + elif account_type == BinanceAccountType.FUTURES_COIN: + self.base_endpoint = "/dapi/v1/" + else: + raise RuntimeError( # pragma: no cover (design-time error) + f"invalid `BinanceAccountType`, was {account_type}", # pragma: no cover + ) + + # Create Endpoints + self._endpoint_ping = BinancePingHttp(client, self.base_endpoint) + self._endpoint_time = BinanceTimeHttp(client, self.base_endpoint) + self._endpoint_depth = BinanceDepthHttp(client, self.base_endpoint) + self._endpoint_trades = BinanceTradesHttp(client, self.base_endpoint) + self._endpoint_historical_trades = BinanceHistoricalTradesHttp(client, self.base_endpoint) + self._endpoint_agg_trades = BinanceAggTradesHttp(client, self.base_endpoint) + self._endpoint_klines = BinanceKlinesHttp(client, self.base_endpoint) + self._endpoint_ticker_24hr = BinanceTicker24hrHttp(client, self.base_endpoint) + self._endpoint_ticker_price = BinanceTickerPriceHttp(client, self.base_endpoint) + self._endpoint_ticker_book = BinanceTickerBookHttp(client, self.base_endpoint) + + async def ping(self) -> dict: + """Ping Binance REST API.""" + return await self._endpoint_ping._get() + + async def request_server_time(self) -> int: + """Request server time from Binance.""" + response = await self._endpoint_time._get() + return response.serverTime + + async def query_depth( + self, + symbol: str, + limit: Optional[int] = None, + ) -> BinanceDepth: + """Query order book depth for a symbol.""" + return await self._endpoint_depth._get( + parameters=self._endpoint_depth.GetParameters( + symbol=BinanceSymbol(symbol), + limit=limit, + ), + ) + + async def request_order_book_snapshot( + self, + instrument_id: InstrumentId, + ts_init: int, + limit: Optional[int] = None, + ) -> OrderBookSnapshot: + """Request snapshot of order book depth.""" + depth = await self.query_depth(instrument_id.symbol.value, limit) + return depth.parse_to_order_book_snapshot( + instrument_id=instrument_id, + ts_init=ts_init, + ) + + async def query_trades( + self, + symbol: str, + limit: Optional[int] = None, + ) -> list[BinanceTrade]: + """Query trades for symbol.""" + return await self._endpoint_trades._get( + parameters=self._endpoint_trades.GetParameters( + symbol=BinanceSymbol(symbol), + limit=limit, + ), + ) + + async def request_trade_ticks( + self, + instrument_id: InstrumentId, + ts_init: int, + limit: Optional[int] = None, + ) -> list[TradeTick]: + """Request TradeTicks from Binance.""" + trades = await self.query_trades(instrument_id.symbol.value, limit) + return [ + trade.parse_to_trade_tick( + instrument_id=instrument_id, + ts_init=ts_init, + ) + for trade in trades + ] + + async def query_agg_trades( + self, + symbol: str, + limit: Optional[int] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + from_id: Optional[str] = None, + ) -> list[BinanceAggTrade]: + """Query aggregated trades for symbol.""" + return await self._endpoint_agg_trades._get( + parameters=self._endpoint_agg_trades.GetParameters( + symbol=BinanceSymbol(symbol), + limit=limit, + startTime=start_time, + endTime=end_time, + fromId=from_id, + ), + ) + + async def request_agg_trade_ticks( + self, + instrument_id: InstrumentId, + ts_init: int, + limit: int = 1000, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + from_id: Optional[str] = None, + ) -> list[TradeTick]: + """ + Request TradeTicks from Binance aggregated trades. + If start_time and end_time are both specified, will fetch *all* TradeTicks + in the interval, making multiple requests if necessary. + """ + ticks: list[TradeTick] = [] + next_start_time = start_time + + if from_id is not None and (start_time or end_time) is not None: + raise RuntimeError( + "Cannot specify both fromId and startTime or endTime.", + ) + + # Only split into separate requests if both start_time and end_time are specified + should_loop = (start_time is not None and end_time is not None) is True + max_interval = (1000 * 60 * 60) - 1 # 1ms under an hour, as specified in Futures docs. + last_id = 0 + interval_limited = False + + def _calculate_next_end_time(start_time: str, end_time: str): + next_interval = int(start_time) + max_interval + interval_limited = next_interval < int(end_time) + next_end_time = str(next_interval) if interval_limited is True else end_time + return next_end_time, interval_limited + + if should_loop: + next_end_time, interval_limited = _calculate_next_end_time(start_time, end_time) + else: + next_end_time = end_time + + while True: + response = await self.query_agg_trades( + instrument_id.symbol.value, + limit, + start_time=next_start_time, + end_time=next_end_time, + from_id=from_id, + ) + + for trade in response: + if not trade.a > last_id: + # Skip duplicate trades + continue + ticks.append( + trade.parse_to_trade_tick( + instrument_id=instrument_id, + ts_init=ts_init, + ), + ) + + if len(response) < limit and interval_limited is False: + # end loop regardless when limit is not hit + break + if not should_loop: + break + else: + last = response[-1] + last_id = last.a + next_start_time = str(last.T) + next_end_time, interval_limited = _calculate_next_end_time( + next_start_time, + end_time, + ) + continue + + return ticks + + async def query_historical_trades( + self, + symbol: str, + limit: Optional[int] = None, + from_id: Optional[str] = None, + ) -> list[BinanceTrade]: + """Query historical trades for symbol.""" + return await self._endpoint_historical_trades._get( + parameters=self._endpoint_historical_trades.GetParameters( + symbol=BinanceSymbol(symbol), + limit=limit, + fromId=from_id, + ), + ) + + async def request_historical_trade_ticks( + self, + instrument_id: InstrumentId, + ts_init: int, + limit: Optional[int] = None, + from_id: Optional[str] = None, + ) -> list[TradeTick]: + """Request historical TradeTicks from Binance.""" + historical_trades = await self.query_historical_trades( + symbol=instrument_id.symbol.value, + limit=limit, + from_id=from_id, + ) + return [ + trade.parse_to_trade_tick( + instrument_id=instrument_id, + ts_init=ts_init, + ) + for trade in historical_trades + ] + + async def query_klines( + self, + symbol: str, + interval: BinanceKlineInterval, + limit: Optional[int] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + ) -> list[BinanceKline]: + """Query klines for a symbol over an interval.""" + return await self._endpoint_klines._get( + parameters=self._endpoint_klines.GetParameters( + symbol=BinanceSymbol(symbol), + interval=interval, + limit=limit, + startTime=start_time, + endTime=end_time, + ), + ) + + async def request_binance_bars( + self, + bar_type: BarType, + ts_init: int, + interval: BinanceKlineInterval, + limit: Optional[int] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + ) -> list[BinanceBar]: + """Request Binance Bars from Klines.""" + klines = await self.query_klines( + symbol=bar_type.instrument_id.symbol.value, + interval=interval, + limit=limit, + start_time=start_time, + end_time=end_time, + ) + bars: list[BinanceBar] = [kline.parse_to_binance_bar(bar_type, ts_init) for kline in klines] + return bars + + async def query_ticker_24hr( + self, + symbol: Optional[str] = None, + symbols: Optional[list[str]] = None, + response_type: Optional[str] = None, + ) -> list[BinanceTicker24hr]: + """Query 24hr ticker for symbol or symbols.""" + if symbol is not None and symbols is not None: + raise RuntimeError( + "Cannot specify both symbol and symbols parameters.", + ) + return await self._endpoint_ticker_24hr._get( + parameters=self._endpoint_ticker_24hr.GetParameters( + symbol=BinanceSymbol(symbol), + symbols=BinanceSymbols(symbols), + type=response_type, + ), + ) + + async def query_ticker_price( + self, + symbol: Optional[str] = None, + symbols: Optional[list[str]] = None, + ) -> list[BinanceTickerPrice]: + """Query price ticker for symbol or symbols.""" + if symbol is not None and symbols is not None: + raise RuntimeError( + "Cannot specify both symbol and symbols parameters.", + ) + return await self._endpoint_ticker_price._get( + parameters=self._endpoint_ticker_price.GetParameters( + symbol=BinanceSymbol(symbol), + symbols=BinanceSymbols(symbols), + ), + ) + + async def query_ticker_book( + self, + symbol: Optional[str] = None, + symbols: Optional[list[str]] = None, + ) -> list[BinanceTickerBook]: + """Query book ticker for symbol or symbols.""" + if symbol is not None and symbols is not None: + raise RuntimeError( + "Cannot specify both symbol and symbols parameters.", + ) + return await self._endpoint_ticker_book._get( + parameters=self._endpoint_ticker_book.GetParameters( + symbol=BinanceSymbol(symbol), + symbols=BinanceSymbols(symbols), + ), + ) diff --git a/nautilus_trader/adapters/binance/http/user.py b/nautilus_trader/adapters/binance/http/user.py new file mode 100644 index 000000000000..ff8d27f37d23 --- /dev/null +++ b/nautilus_trader/adapters/binance/http/user.py @@ -0,0 +1,205 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from typing import Optional + +import msgspec + +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.common.schemas.user import BinanceListenKey +from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.core.correctness import PyCondition + + +class BinanceListenKeyHttp(BinanceHttpEndpoint): + """ + Endpoint for managing user data streams (listenKey). + + `POST /api/v3/userDataStream` + `POST /sapi/v3/userDataStream` + `POST /sapi/v3/userDataStream/isolated` + `POST /fapi/v1/listenKey` + `POST /dapi/v1/listenKey` + + `PUT /api/v3/userDataStream` + `PUT /sapi/v3/userDataStream` + `PUT /sapi/v3/userDataStream/isolated` + `PUT /fapi/v1/listenKey` + `PUT /dapi/v1/listenKey` + + `DELETE /api/v3/userDataStream` + `DELETE /sapi/v3/userDataStream` + `DELETE /sapi/v3/userDataStream/isolated` + `DELETE /fapi/v1/listenKey` + `DELETE /dapi/v1/listenKey` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#listen-key-spot + https://binance-docs.github.io/apidocs/spot/en/#listen-key-margin + https://binance-docs.github.io/apidocs/futures/en/#start-user-data-stream-user_stream + https://binance-docs.github.io/apidocs/delivery/en/#start-user-data-stream-user_stream + """ + + def __init__( + self, + client: BinanceHttpClient, + url_path: str, + ): + methods = { + BinanceMethodType.POST: BinanceSecurityType.USER_STREAM, + BinanceMethodType.PUT: BinanceSecurityType.USER_STREAM, + BinanceMethodType.DELETE: BinanceSecurityType.USER_STREAM, + } + super().__init__( + client, + methods, + url_path, + ) + self._post_resp_decoder = msgspec.json.Decoder(BinanceListenKey) + self._put_resp_decoder = msgspec.json.Decoder() + self._delete_resp_decoder = msgspec.json.Decoder() + + class PostParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + POST parameters for creating listen keys. + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. Only required for ISOLATED MARGIN accounts! + """ + + symbol: Optional[BinanceSymbol] = None # MARGIN_ISOLATED only, mandatory + + class PutDeleteParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + PUT & DELETE parameters for managing listen keys. + + Parameters + ---------- + symbol : BinanceSymbol + The trading pair. Only required for ISOLATED MARGIN accounts! + listenKey : str + The listen key to manage. Only required for SPOT/MARGIN accounts! + """ + + symbol: Optional[BinanceSymbol] = None # MARGIN_ISOLATED only, mandatory + listenKey: Optional[str] = None # SPOT/MARGIN only, mandatory + + async def _post(self, parameters: Optional[PostParameters] = None) -> BinanceListenKey: + method_type = BinanceMethodType.POST + raw = await self._method(method_type, parameters) + return self._post_resp_decoder.decode(raw) + + async def _put(self, parameters: Optional[PutDeleteParameters] = None) -> dict: + method_type = BinanceMethodType.PUT + raw = await self._method(method_type, parameters) + return self._put_resp_decoder.decode(raw) + + async def _delete(self, parameters: Optional[PutDeleteParameters] = None) -> dict: + method_type = BinanceMethodType.DELETE + raw = await self._method(method_type, parameters) + return self._delete_resp_decoder.decode(raw) + + +class BinanceUserDataHttpAPI: + """ + Provides access to the `Binance` User HTTP REST API. + + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint. + + Warnings + -------- + This class should not be used directly, but through a concrete subclass. + """ + + def __init__( + self, + client: BinanceHttpClient, + account_type: BinanceAccountType, + ): + PyCondition.not_none(client, "client") + self.client = client + self.account_type = account_type + + if account_type == BinanceAccountType.SPOT: + self.base_endpoint = "/api/v3/" + listen_key_url = self.base_endpoint + "userDataStream" + elif account_type == BinanceAccountType.MARGIN_CROSS: + self.base_endpoint = "/sapi/v1/" + listen_key_url = self.base_endpoint + "userDataStream" + elif account_type == BinanceAccountType.MARGIN_ISOLATED: + self.base_endpoint = "/sapi/v1/" + listen_key_url = self.base_endpoint + "userDataStream/isolated" + elif account_type == BinanceAccountType.FUTURES_USDT: + self.base_endpoint = "/fapi/v1/" + listen_key_url = self.base_endpoint + "listenKey" + elif account_type == BinanceAccountType.FUTURES_COIN: + self.base_endpoint = "/dapi/v1/" + listen_key_url = self.base_endpoint + "listenKey" + else: + raise RuntimeError( # pragma: no cover (design-time error) + f"invalid `BinanceAccountType`, was {account_type}", # pragma: no cover (design-time error) # noqa + ) + + self._endpoint_listenkey = BinanceListenKeyHttp(client, listen_key_url) + + async def create_listen_key( + self, + symbol: Optional[str] = None, + ) -> BinanceListenKey: + """Create Binance ListenKey.""" + key = await self._endpoint_listenkey._post( + parameters=self._endpoint_listenkey.PostParameters( + symbol=BinanceSymbol(symbol), + ), + ) + return key + + async def keepalive_listen_key( + self, + symbol: Optional[str] = None, + listen_key: Optional[str] = None, + ): + """Ping/Keepalive Binance ListenKey.""" + await self._endpoint_listenkey._put( + parameters=self._endpoint_listenkey.PutDeleteParameters( + symbol=BinanceSymbol(symbol), + listenKey=listen_key, + ), + ) + + async def delete_listen_key( + self, + symbol: Optional[str] = None, + listen_key: Optional[str] = None, + ): + """Delete Binance ListenKey.""" + await self._endpoint_listenkey._delete( + parameters=self._endpoint_listenkey.PutDeleteParameters( + symbol=BinanceSymbol(symbol), + listenKey=listen_key, + ), + ) diff --git a/nautilus_trader/adapters/binance/spot/data.py b/nautilus_trader/adapters/binance/spot/data.py index ea03e0670483..3d33068c462b 100644 --- a/nautilus_trader/adapters/binance/spot/data.py +++ b/nautilus_trader/adapters/binance/spot/data.py @@ -14,63 +14,30 @@ # ------------------------------------------------------------------------------------------------- import asyncio -from typing import Any, Optional +from typing import Optional import msgspec -import pandas as pd -from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE +from nautilus_trader.adapters.binance.common.data import BinanceCommonDataClient from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.functions import parse_symbol -from nautilus_trader.adapters.binance.common.parsing.data import parse_bar_http -from nautilus_trader.adapters.binance.common.parsing.data import parse_bar_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_diff_depth_stream_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_quote_tick_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_ticker_24hr_ws -from nautilus_trader.adapters.binance.common.parsing.data import parse_trade_tick_http -from nautilus_trader.adapters.binance.common.schemas import BinanceCandlestickMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceDataMsgWrapper -from nautilus_trader.adapters.binance.common.schemas import BinanceOrderBookMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceQuoteMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceTickerMsg -from nautilus_trader.adapters.binance.common.schemas import BinanceTrade -from nautilus_trader.adapters.binance.common.types import BinanceBar -from nautilus_trader.adapters.binance.common.types import BinanceTicker from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.spot.enums import BinanceSpotEnumParser from nautilus_trader.adapters.binance.spot.http.market import BinanceSpotMarketHttpAPI -from nautilus_trader.adapters.binance.spot.parsing.data import parse_spot_book_snapshot -from nautilus_trader.adapters.binance.spot.parsing.data import parse_spot_trade_tick_ws -from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotOrderBookMsg +from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotOrderBookPartialDepthMsg from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotTradeMsg -from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.enums import LogColor from nautilus_trader.common.logging import Logger from nautilus_trader.common.providers import InstrumentProvider -from nautilus_trader.core.asynchronous import sleep0 -from nautilus_trader.core.datetime import secs_to_millis -from nautilus_trader.core.uuid import UUID4 -from nautilus_trader.live.data_client import LiveMarketDataClient -from nautilus_trader.model.data.bar import BarType -from nautilus_trader.model.data.base import DataType -from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.core.correctness import PyCondition from nautilus_trader.model.data.tick import TradeTick -from nautilus_trader.model.enums import BarAggregation -from nautilus_trader.model.enums import BookType -from nautilus_trader.model.enums import PriceType -from nautilus_trader.model.enums import bar_aggregation_to_str -from nautilus_trader.model.identifiers import ClientId from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import Symbol -from nautilus_trader.model.instruments.base import Instrument from nautilus_trader.model.orderbook.data import OrderBookData -from nautilus_trader.model.orderbook.data import OrderBookDeltas from nautilus_trader.model.orderbook.data import OrderBookSnapshot from nautilus_trader.msgbus.bus import MessageBus -class BinanceSpotDataClient(LiveMarketDataClient): +class BinanceSpotDataClient(BinanceCommonDataClient): """ Provides a data client for the `Binance Spot/Margin` exchange. @@ -94,6 +61,9 @@ class BinanceSpotDataClient(LiveMarketDataClient): The account type for the client. base_url_ws : str, optional The base URL for the WebSocket client. + use_agg_trade_ticks : bool, default False + Whether to use aggregated trade tick endpoints instead of raw trade ticks. + TradeId of ticks will be the Aggregate tradeId returned by Binance. """ def __init__( @@ -107,471 +77,49 @@ def __init__( instrument_provider: InstrumentProvider, account_type: BinanceAccountType = BinanceAccountType.SPOT, base_url_ws: Optional[str] = None, + use_agg_trade_ticks: bool = False, ): - super().__init__( - loop=loop, - client_id=ClientId(BINANCE_VENUE.value), - venue=BINANCE_VENUE, - instrument_provider=instrument_provider, - msgbus=msgbus, - cache=cache, - clock=clock, - logger=logger, + PyCondition.true( + account_type.is_spot_or_margin, + "account_type was not SPOT, MARGIN_CROSS or MARGIN_ISOLATED", ) - assert account_type.is_spot or account_type.is_margin, "account type is not for spot/margin" - self._binance_account_type = account_type - self._log.info(f"Account type: {self._binance_account_type.value}.", LogColor.BLUE) - - self._update_instruments_interval: int = 60 * 60 # Once per hour (hardcode) - self._update_instruments_task: Optional[asyncio.Task] = None + # Spot HTTP API + self._spot_http_market = BinanceSpotMarketHttpAPI(client, account_type) - # HTTP API - self._http_client = client - self._http_market = BinanceSpotMarketHttpAPI(client=self._http_client) + # Spot enum parser + self._spot_enum_parser = BinanceSpotEnumParser() - # WebSocket API - self._ws_client = BinanceWebSocketClient( + super().__init__( loop=loop, + client=client, + market=self._spot_http_market, + enum_parser=self._spot_enum_parser, + msgbus=msgbus, + cache=cache, clock=clock, logger=logger, - handler=self._handle_ws_message, - base_url=base_url_ws, - ) - - # Hot caches - self._instrument_ids: dict[str, InstrumentId] = {} - self._book_buffer: dict[InstrumentId, list[OrderBookData]] = {} - - self._log.info(f"Base URL HTTP {self._http_client.base_url}.", LogColor.BLUE) - self._log.info(f"Base URL WebSocket {base_url_ws}.", LogColor.BLUE) - - async def _connect(self) -> None: - # Connect HTTP client - if not self._http_client.connected: - await self._http_client.connect() - - await self._instrument_provider.initialize() - - self._send_all_instruments_to_data_engine() - self._update_instruments_task = self.create_task(self._update_instruments()) - - # Connect WebSocket clients - self.create_task(self._connect_websockets()) - - async def _connect_websockets(self) -> None: - self._log.info("Awaiting subscriptions...") - await asyncio.sleep(4) - if self._ws_client.has_subscriptions: - await self._ws_client.connect() - - async def _update_instruments(self) -> None: - try: - while True: - self._log.debug( - f"Scheduled `update_instruments` to run in " - f"{self._update_instruments_interval}s.", - ) - await asyncio.sleep(self._update_instruments_interval) - await self._instrument_provider.load_all_async() - self._send_all_instruments_to_data_engine() - except asyncio.CancelledError: - self._log.debug("`update_instruments` task was canceled.") - - async def _disconnect(self) -> None: - # Cancel tasks - if self._update_instruments_task: - self._log.debug("Canceling `update_instruments` task...") - self._update_instruments_task.cancel() - self._update_instruments_task.done() - - # Disconnect WebSocket client - if self._ws_client.is_connected: - await self._ws_client.disconnect() - - # Disconnect HTTP client - if self._http_client.connected: - await self._http_client.disconnect() - - # -- SUBSCRIPTIONS ---------------------------------------------------------------------------- - - async def _subscribe_instruments(self) -> None: - pass # Do nothing further - - async def _subscribe_instrument(self, instrument_id: InstrumentId) -> None: - pass # Do nothing further - - async def _subscribe_order_book_deltas( - self, - instrument_id: InstrumentId, - book_type: BookType, - depth: Optional[int] = None, - kwargs: Optional[dict] = None, - ) -> None: - await self._subscribe_order_book( - instrument_id=instrument_id, - book_type=book_type, - depth=depth, - ) - - async def _subscribe_order_book_snapshots( - self, - instrument_id: InstrumentId, - book_type: BookType, - depth: Optional[int] = None, - kwargs: Optional[dict] = None, - ) -> None: - await self._subscribe_order_book( - instrument_id=instrument_id, - book_type=book_type, - depth=depth, - ) - - async def _subscribe_order_book( - self, - instrument_id: InstrumentId, - book_type: BookType, - depth: Optional[int] = None, - ) -> None: - if book_type == BookType.L3_MBO: - self._log.error( - "Cannot subscribe to order book deltas: " - "L3_MBO data is not published by Binance. " - "Valid book types are L1_TBBO, L2_MBP.", - ) - return - - if depth is None or depth == 0: - depth = 20 - - # Add delta stream buffer - self._book_buffer[instrument_id] = [] - - if 0 < depth <= 20: - if depth not in (5, 10, 20): - self._log.error( - "Cannot subscribe to order book snapshots: " - f"invalid `depth`, was {depth}. " - "Valid depths are 5, 10 or 20.", - ) - return - self._ws_client.subscribe_partial_book_depth( - symbol=instrument_id.symbol.value, - depth=depth, - speed=100, - ) - else: - self._ws_client.subscribe_diff_book_depth( - symbol=instrument_id.symbol.value, - speed=100, - ) - - while not self._ws_client.is_connected: - await sleep0() - - data: dict[str, Any] = await self._http_market.depth( - symbol=instrument_id.symbol.value, - limit=depth, - ) - - ts_event: int = self._clock.timestamp_ns() - last_update_id: int = data.get("lastUpdateId", 0) - - snapshot = OrderBookSnapshot( - instrument_id=instrument_id, - book_type=BookType.L2_MBP, - bids=[[float(o[0]), float(o[1])] for o in data.get("bids", [])], - asks=[[float(o[0]), float(o[1])] for o in data.get("asks", [])], - ts_event=ts_event, - ts_init=ts_event, - sequence=last_update_id, - ) - - self._handle_data(snapshot) - - book_buffer = self._book_buffer.pop(instrument_id, []) - for deltas in book_buffer: - if deltas.sequence <= last_update_id: - continue - self._handle_data(deltas) - - async def _subscribe_ticker(self, instrument_id: InstrumentId) -> None: - self._ws_client.subscribe_ticker(instrument_id.symbol.value) - - async def _subscribe_quote_ticks(self, instrument_id: InstrumentId) -> None: - self._ws_client.subscribe_book_ticker(instrument_id.symbol.value) - - async def _subscribe_trade_ticks(self, instrument_id: InstrumentId) -> None: - self._ws_client.subscribe_trades(instrument_id.symbol.value) - - async def _subscribe_bars(self, bar_type: BarType) -> None: - if not bar_type.spec.is_time_aggregated(): - self._log.error( - f"Cannot subscribe to {bar_type}: only time bars are aggregated by Binance.", - ) - return - - if bar_type.spec.aggregation == BarAggregation.MILLISECOND: - self._log.error( - f"Cannot subscribe to {bar_type}: " - f"{bar_aggregation_to_str(bar_type.spec.aggregation)} " - f"bars are not aggregated by Binance.", - ) - return - - if bar_type.spec.aggregation == BarAggregation.SECOND: - resolution = "s" - elif bar_type.spec.aggregation == BarAggregation.MINUTE: - resolution = "m" - elif bar_type.spec.aggregation == BarAggregation.HOUR: - resolution = "h" - elif bar_type.spec.aggregation == BarAggregation.DAY: - resolution = "d" - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BarAggregation`, " # pragma: no cover - f"was {bar_aggregation_to_str(bar_type.spec.aggregation)}", # pragma: no cover - ) - - self._ws_client.subscribe_bars( - symbol=bar_type.instrument_id.symbol.value, - interval=f"{bar_type.spec.step}{resolution}", - ) - - async def _unsubscribe_instruments(self) -> None: - pass # Do nothing further - - async def _unsubscribe_instrument(self, instrument_id: InstrumentId) -> None: - pass # Do nothing further - - async def _unsubscribe_order_book_deltas(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_order_book_snapshots(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_ticker(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_quote_ticks(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_trade_ticks(self, instrument_id: InstrumentId) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - async def _unsubscribe_bars(self, bar_type: BarType) -> None: - pass # TODO: Unsubscribe from Binance if no other subscriptions - - # -- REQUESTS --------------------------------------------------------------------------------- - - async def _request_instrument(self, instrument_id: InstrumentId, correlation_id: UUID4) -> None: - instrument: Optional[Instrument] = self._instrument_provider.find(instrument_id) - if instrument is None: - self._log.error(f"Cannot find instrument for {instrument_id}.") - return - - data_type = DataType( - type=Instrument, - metadata={"instrument_id": instrument_id}, - ) - - self._handle_data_response( - data_type=data_type, - data=[instrument], # Data engine handles lists of instruments - correlation_id=correlation_id, - ) - - async def _request_quote_ticks( - self, - instrument_id: InstrumentId, - limit: int, - correlation_id: UUID4, - from_datetime: Optional[pd.Timestamp] = None, - to_datetime: Optional[pd.Timestamp] = None, - ) -> None: - self._log.error( - "Cannot request historical quote ticks: not published by Binance.", - ) - - async def _request_trade_ticks( - self, - instrument_id: InstrumentId, - limit: int, - correlation_id: UUID4, - from_datetime: Optional[pd.Timestamp] = None, - to_datetime: Optional[pd.Timestamp] = None, - ) -> None: - if limit == 0 or limit > 1000: - limit = 1000 - - if from_datetime is not None or to_datetime is not None: - self._log.warning( - "Trade ticks have been requested with a from/to time range, " - f"however the request will be for the most recent {limit}.", - ) - - response: list[BinanceTrade] = await self._http_market.trades( - instrument_id.symbol.value, - limit, + instrument_provider=instrument_provider, + account_type=account_type, + base_url_ws=base_url_ws, + use_agg_trade_ticks=use_agg_trade_ticks, ) - ticks: list[TradeTick] = [ - parse_trade_tick_http( - trade=trade, - instrument_id=instrument_id, - ts_init=self._clock.timestamp_ns(), - ) - for trade in response - ] - - self._handle_trade_ticks(instrument_id, ticks, correlation_id) - - async def _request_bars( # noqa (too complex) - self, - bar_type: BarType, - limit: int, - correlation_id: UUID4, - from_datetime: Optional[pd.Timestamp] = None, - to_datetime: Optional[pd.Timestamp] = None, - ) -> None: - if bar_type.is_internally_aggregated(): - self._log.error( - f"Cannot request {bar_type}: " - f"only historical bars with EXTERNAL aggregation available from Binance.", - ) - return - - if not bar_type.spec.is_time_aggregated(): - self._log.error( - f"Cannot request {bar_type}: only time bars are aggregated by Binance.", - ) - return - - if bar_type.spec.aggregation == BarAggregation.MILLISECOND: - self._log.error( - f"Cannot request {bar_type}: " - f"{bar_aggregation_to_str(bar_type.spec.aggregation)} " - f"bars are not aggregated by Binance.", - ) - return - - if bar_type.spec.price_type != PriceType.LAST: - self._log.error( - f"Cannot request {bar_type}: " - f"only historical bars for LAST price type available from Binance.", - ) - return - - if limit == 0 or limit > 1000: - limit = 1000 - - if bar_type.spec.aggregation == BarAggregation.SECOND: - resolution = "s" - elif bar_type.spec.aggregation == BarAggregation.MINUTE: - resolution = "m" - elif bar_type.spec.aggregation == BarAggregation.HOUR: - resolution = "h" - elif bar_type.spec.aggregation == BarAggregation.DAY: - resolution = "d" - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BarAggregation`, " # pragma: no cover - f"was {bar_aggregation_to_str(bar_type.spec.aggregation)}", # pragma: no cover - ) - - start_time_ms = None - if from_datetime is not None: - start_time_ms = secs_to_millis(from_datetime.timestamp()) - - end_time_ms = None - if to_datetime is not None: - end_time_ms = secs_to_millis(to_datetime.timestamp()) - - data: list[list[Any]] = await self._http_market.klines( - symbol=bar_type.instrument_id.symbol.value, - interval=f"{bar_type.spec.step}{resolution}", - start_time_ms=start_time_ms, - end_time_ms=end_time_ms, - limit=limit, + # Websocket msgspec decoders + self._decoder_spot_trade = msgspec.json.Decoder(BinanceSpotTradeMsg) + self._decoder_spot_order_book_partial_depth = msgspec.json.Decoder( + BinanceSpotOrderBookPartialDepthMsg, ) - bars: list[BinanceBar] = [ - parse_bar_http( - bar_type, - values=b, - ts_init=self._clock.timestamp_ns(), - ) - for b in data - ] - partial: BinanceBar = bars.pop() - - self._handle_bars(bar_type, bars, partial, correlation_id) + # -- WEBSOCKET HANDLERS --------------------------------------------------------------------------------- - def _send_all_instruments_to_data_engine(self) -> None: - for instrument in self._instrument_provider.get_all().values(): - self._handle_data(instrument) - - for currency in self._instrument_provider.currencies().values(): - self._cache.add_currency(currency) - - def _get_cached_instrument_id(self, symbol: str) -> InstrumentId: - # Parse instrument ID - nautilus_symbol: str = parse_symbol(symbol, account_type=self._binance_account_type) - instrument_id: Optional[InstrumentId] = self._instrument_ids.get(nautilus_symbol) - if not instrument_id: - instrument_id = InstrumentId(Symbol(nautilus_symbol), BINANCE_VENUE) - self._instrument_ids[nautilus_symbol] = instrument_id - return instrument_id - - def _handle_ws_message(self, raw: bytes) -> None: - # TODO(cs): Uncomment for development - # self._log.info(str(raw), LogColor.CYAN) - - wrapper = msgspec.json.decode(raw, type=BinanceDataMsgWrapper) - - try: - if "@depth@" in wrapper.stream: - self._handle_book_diff_update(raw) - elif "@depth" in wrapper.stream: - self._handle_book_update(raw) - elif "@bookTicker" in wrapper.stream: - self._handle_book_ticker(raw) - elif "@trade" in wrapper.stream: - self._handle_trade(raw) - elif "@ticker" in wrapper.stream: - self._handle_ticker(raw) - elif "@kline" in wrapper.stream: - self._handle_kline(raw) - else: - self._log.error( - f"Unrecognized websocket message type: {msgspec.json.decode(raw)['stream']}", - ) - return - except Exception as e: - self._log.error(f"Error handling websocket message, {e}") - - def _handle_book_diff_update(self, raw: bytes) -> None: - msg: BinanceOrderBookMsg = msgspec.json.decode(raw, type=BinanceOrderBookMsg) - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - book_deltas: OrderBookDeltas = parse_diff_depth_stream_ws( - instrument_id=instrument_id, - data=msg.data, - ts_init=self._clock.timestamp_ns(), - ) - book_buffer: Optional[list[OrderBookData]] = self._book_buffer.get(instrument_id) - if book_buffer is not None: - book_buffer.append(book_deltas) - else: - self._handle_data(book_deltas) - - def _handle_book_update(self, raw: bytes) -> None: - msg: BinanceSpotOrderBookMsg = msgspec.json.decode(raw, type=BinanceSpotOrderBookMsg) + def _handle_book_partial_update(self, raw: bytes) -> None: + msg = self._decoder_spot_order_book_partial_depth.decode(raw) instrument_id: InstrumentId = self._get_cached_instrument_id( - msg.stream.partition("@")[0].upper(), + msg.stream.partition("@")[0], ) - book_snapshot: OrderBookSnapshot = parse_spot_book_snapshot( + book_snapshot: OrderBookSnapshot = msg.data.parse_to_order_book_snapshot( instrument_id=instrument_id, - data=msg.data, ts_init=self._clock.timestamp_ns(), ) # Check if book buffer active @@ -581,45 +129,11 @@ def _handle_book_update(self, raw: bytes) -> None: else: self._handle_data(book_snapshot) - def _handle_book_ticker(self, raw: bytes) -> None: - msg: BinanceQuoteMsg = msgspec.json.decode(raw, type=BinanceQuoteMsg) - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - quote_tick: QuoteTick = parse_quote_tick_ws( - instrument_id=instrument_id, - data=msg.data, - ts_init=self._clock.timestamp_ns(), - ) - self._handle_data(quote_tick) - def _handle_trade(self, raw: bytes) -> None: - msg: BinanceSpotTradeMsg = msgspec.json.decode(raw, type=BinanceSpotTradeMsg) + msg = self._decoder_spot_trade.decode(raw) instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - trade_tick: TradeTick = parse_spot_trade_tick_ws( + trade_tick: TradeTick = msg.data.parse_to_trade_tick( instrument_id=instrument_id, - data=msg.data, ts_init=self._clock.timestamp_ns(), ) self._handle_data(trade_tick) - - def _handle_ticker(self, raw: bytes) -> None: - msg: BinanceTickerMsg = msgspec.json.decode(raw, type=BinanceTickerMsg) - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - ticker: BinanceTicker = parse_ticker_24hr_ws( - instrument_id=instrument_id, - data=msg.data, - ts_init=self._clock.timestamp_ns(), - ) - self._handle_data(ticker) - - def _handle_kline(self, raw: bytes) -> None: - msg: BinanceCandlestickMsg = msgspec.json.decode(raw, type=BinanceCandlestickMsg) - if not msg.data.k.x: - return # Not closed yet - - instrument_id: InstrumentId = self._get_cached_instrument_id(msg.data.s) - bar: BinanceBar = parse_bar_ws( - instrument_id=instrument_id, - data=msg.data.k, - ts_init=self._clock.timestamp_ns(), - ) - self._handle_data(bar) diff --git a/nautilus_trader/adapters/binance/spot/enums.py b/nautilus_trader/adapters/binance/spot/enums.py index e2e65b0d26d3..452169a8e6fb 100644 --- a/nautilus_trader/adapters/binance/spot/enums.py +++ b/nautilus_trader/adapters/binance/spot/enums.py @@ -16,6 +16,12 @@ from enum import Enum from enum import unique +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.model.enums import OrderType +from nautilus_trader.model.enums import TimeInForce +from nautilus_trader.model.orders.base import Order + """ Defines `Binance` Spot/Margin specific enums. @@ -56,15 +62,6 @@ class BinanceSpotSymbolStatus(Enum): BREAK = "BREAK" -@unique -class BinanceSpotTimeInForce(Enum): - """Represents a `Binance Spot/Margin` order time in force.""" - - GTC = "GTC" - IOC = "IOC" - FOK = "FOK" - - @unique class BinanceSpotEventType(Enum): """Represents a `Binance Spot/Margin` event type.""" @@ -75,27 +72,61 @@ class BinanceSpotEventType(Enum): listStatus = "listStatus" -@unique -class BinanceSpotOrderType(Enum): - """Represents a `Binance Spot/Margin` order type.""" - - LIMIT = "LIMIT" - MARKET = "MARKET" - STOP = "STOP" - STOP_LOSS = "STOP_LOSS" - STOP_LOSS_LIMIT = "STOP_LOSS_LIMIT" - TAKE_PROFIT = "TAKE_PROFIT" - TAKE_PROFIT_LIMIT = "TAKE_PROFIT_LIMIT" - LIMIT_MAKER = "LIMIT_MAKER" - - -@unique -class BinanceSpotOrderStatus(Enum): - """Represents a `Binance` order status.""" - - NEW = "NEW" - PARTIALLY_FILLED = "PARTIALLY_FILLED" - FILLED = "FILLED" - CANCELED = "CANCELED" - REJECTED = "REJECTED" - EXPIRED = "EXPIRED" +class BinanceSpotEnumParser(BinanceEnumParser): + """ + Provides parsing methods for enums used by the 'Binance Spot/Margin' exchange. + """ + + def __init__(self) -> None: + super().__init__() + + # Spot specific order type conversion + self.spot_ext_to_int_order_type = { + BinanceOrderType.LIMIT: OrderType.LIMIT, + BinanceOrderType.MARKET: OrderType.MARKET, + BinanceOrderType.STOP: OrderType.STOP_MARKET, + BinanceOrderType.STOP_LOSS: OrderType.STOP_MARKET, + BinanceOrderType.STOP_LOSS_LIMIT: OrderType.STOP_LIMIT, + BinanceOrderType.TAKE_PROFIT: OrderType.LIMIT, + BinanceOrderType.TAKE_PROFIT_LIMIT: OrderType.STOP_LIMIT, + BinanceOrderType.LIMIT_MAKER: OrderType.LIMIT, + } + + self.spot_valid_time_in_force = { + TimeInForce.GTC, + TimeInForce.GTD, # Will be transformed to GTC with warning + TimeInForce.FOK, + TimeInForce.IOC, + } + + self.spot_valid_order_types = { + OrderType.MARKET, + OrderType.LIMIT, + OrderType.LIMIT_IF_TOUCHED, + OrderType.STOP_LIMIT, + } + + def parse_binance_order_type(self, order_type: BinanceOrderType) -> OrderType: + try: + return self.spot_ext_to_int_order_type[order_type] + except KeyError: + raise RuntimeError( # pragma: no cover (design-time error) + f"unrecognized Binance Spot/Margin order type, was {order_type}", # pragma: no cover + ) + + def parse_internal_order_type(self, order: Order) -> BinanceOrderType: + if order.order_type == OrderType.MARKET: + return BinanceOrderType.MARKET + elif order.order_type == OrderType.LIMIT: + if order.is_post_only: + return BinanceOrderType.LIMIT_MAKER + else: + return BinanceOrderType.LIMIT + elif order.order_type == OrderType.STOP_LIMIT: + return BinanceOrderType.STOP_LOSS_LIMIT + elif order.order_type == OrderType.LIMIT_IF_TOUCHED: + return BinanceOrderType.TAKE_PROFIT_LIMIT + else: + raise RuntimeError( # pragma: no cover (design-time error) + f"invalid or unsupported `OrderType`, was {order.order_type}", # pragma: no cover + ) diff --git a/nautilus_trader/adapters/binance/spot/execution.py b/nautilus_trader/adapters/binance/spot/execution.py index 7986412bc7d7..e191e8a6aed3 100644 --- a/nautilus_trader/adapters/binance/spot/execution.py +++ b/nautilus_trader/adapters/binance/spot/execution.py @@ -14,92 +14,38 @@ # ------------------------------------------------------------------------------------------------- import asyncio -from decimal import Decimal -from typing import Any, Optional +from typing import Optional import msgspec -import pandas as pd -from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.enums import BinanceExecutionType -from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide -from nautilus_trader.adapters.binance.common.functions import format_symbol -from nautilus_trader.adapters.binance.common.functions import parse_symbol -from nautilus_trader.adapters.binance.futures.enums import BinanceFuturesTimeInForce +from nautilus_trader.adapters.binance.common.execution import BinanceCommonExecutionClient from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.adapters.binance.http.error import BinanceError +from nautilus_trader.adapters.binance.spot.enums import BinanceSpotEnumParser from nautilus_trader.adapters.binance.spot.enums import BinanceSpotEventType from nautilus_trader.adapters.binance.spot.http.account import BinanceSpotAccountHttpAPI from nautilus_trader.adapters.binance.spot.http.market import BinanceSpotMarketHttpAPI from nautilus_trader.adapters.binance.spot.http.user import BinanceSpotUserDataHttpAPI -from nautilus_trader.adapters.binance.spot.parsing.account import parse_account_balances_http -from nautilus_trader.adapters.binance.spot.parsing.account import parse_account_balances_ws -from nautilus_trader.adapters.binance.spot.parsing.execution import binance_order_type -from nautilus_trader.adapters.binance.spot.parsing.execution import parse_order_report_http -from nautilus_trader.adapters.binance.spot.parsing.execution import parse_order_type -from nautilus_trader.adapters.binance.spot.parsing.execution import parse_time_in_force -from nautilus_trader.adapters.binance.spot.parsing.execution import parse_trade_report_http from nautilus_trader.adapters.binance.spot.providers import BinanceSpotInstrumentProvider -from nautilus_trader.adapters.binance.spot.rules import BINANCE_SPOT_VALID_ORDER_TYPES -from nautilus_trader.adapters.binance.spot.rules import BINANCE_SPOT_VALID_TIF from nautilus_trader.adapters.binance.spot.schemas.account import BinanceSpotAccountInfo -from nautilus_trader.adapters.binance.spot.schemas.user import BinanceSpotAccountUpdateMsg from nautilus_trader.adapters.binance.spot.schemas.user import BinanceSpotAccountUpdateWrapper -from nautilus_trader.adapters.binance.spot.schemas.user import BinanceSpotOrderUpdateData from nautilus_trader.adapters.binance.spot.schemas.user import BinanceSpotOrderUpdateWrapper from nautilus_trader.adapters.binance.spot.schemas.user import BinanceSpotUserMsgWrapper -from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogColor from nautilus_trader.common.logging import Logger from nautilus_trader.core.correctness import PyCondition from nautilus_trader.core.datetime import millis_to_nanos -from nautilus_trader.core.datetime import secs_to_millis -from nautilus_trader.core.uuid import UUID4 -from nautilus_trader.execution.messages import CancelAllOrders -from nautilus_trader.execution.messages import CancelOrder -from nautilus_trader.execution.messages import ModifyOrder -from nautilus_trader.execution.messages import SubmitOrder -from nautilus_trader.execution.messages import SubmitOrderList -from nautilus_trader.execution.reports import OrderStatusReport from nautilus_trader.execution.reports import PositionStatusReport -from nautilus_trader.execution.reports import TradeReport -from nautilus_trader.live.execution_client import LiveExecutionClient -from nautilus_trader.model.enums import AccountType -from nautilus_trader.model.enums import LiquiditySide -from nautilus_trader.model.enums import OmsType -from nautilus_trader.model.enums import OrderSide -from nautilus_trader.model.enums import OrderStatus from nautilus_trader.model.enums import OrderType -from nautilus_trader.model.enums import TimeInForce -from nautilus_trader.model.enums import TrailingOffsetType -from nautilus_trader.model.enums import TriggerType -from nautilus_trader.model.enums import order_side_from_str -from nautilus_trader.model.enums import order_side_to_str from nautilus_trader.model.enums import order_type_to_str from nautilus_trader.model.enums import time_in_force_to_str -from nautilus_trader.model.identifiers import AccountId -from nautilus_trader.model.identifiers import ClientId -from nautilus_trader.model.identifiers import ClientOrderId -from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import StrategyId -from nautilus_trader.model.identifiers import Symbol -from nautilus_trader.model.identifiers import TradeId -from nautilus_trader.model.identifiers import VenueOrderId -from nautilus_trader.model.instruments.base import Instrument -from nautilus_trader.model.objects import Money -from nautilus_trader.model.objects import Price -from nautilus_trader.model.objects import Quantity from nautilus_trader.model.orders.base import Order -from nautilus_trader.model.orders.limit import LimitOrder -from nautilus_trader.model.orders.market import MarketOrder -from nautilus_trader.model.orders.stop_limit import StopLimitOrder from nautilus_trader.msgbus.bus import MessageBus -class BinanceSpotExecutionClient(LiveExecutionClient): +class BinanceSpotExecutionClient(BinanceCommonExecutionClient): """ Provides an execution client for the `Binance Spot/Margin` exchange. @@ -117,13 +63,13 @@ class BinanceSpotExecutionClient(LiveExecutionClient): The clock for the client. logger : Logger The logger for the client. - instrument_provider : BinanceInstrumentProvider + instrument_provider : BinanceSpotInstrumentProvider The instrument provider. account_type : BinanceAccountType The account type for the client. base_url_ws : str, optional The base URL for the WebSocket client. - clock_sync_interval_secs : int, default 900 + clock_sync_interval_secs : int, default 0 The interval (seconds) between syncing the Nautilus clock with the Binance server(s) clock. If zero, then will *not* perform syncing. warn_gtd_to_gtc : bool, default True @@ -141,373 +87,114 @@ def __init__( instrument_provider: BinanceSpotInstrumentProvider, account_type: BinanceAccountType = BinanceAccountType.SPOT, base_url_ws: Optional[str] = None, - clock_sync_interval_secs: int = 900, + clock_sync_interval_secs: int = 0, warn_gtd_to_gtc: bool = True, ): - super().__init__( - loop=loop, - client_id=ClientId(BINANCE_VENUE.value), - venue=BINANCE_VENUE, - oms_type=OmsType.NETTING, - instrument_provider=instrument_provider, - account_type=AccountType.CASH, - base_currency=None, - msgbus=msgbus, - cache=cache, - clock=clock, - logger=logger, + PyCondition.true( + account_type.is_spot_or_margin, + "account_type was not SPOT, MARGIN_CROSS or MARGIN_ISOLATED", ) - self._binance_account_type = account_type - self._log.info(f"Account type: {self._binance_account_type.value}.", LogColor.BLUE) - - self._set_account_id(AccountId(f"{BINANCE_VENUE.value}-spot-master")) - - # Settings - self._warn_gtd_to_gtc = warn_gtd_to_gtc + # Spot HTTP API + self._spot_http_account = BinanceSpotAccountHttpAPI(client, clock, account_type) + self._spot_http_market = BinanceSpotMarketHttpAPI(client, account_type) + self._spot_http_user = BinanceSpotUserDataHttpAPI(client, account_type) - # Clock sync - self._clock_sync_interval_secs = clock_sync_interval_secs + # Spot enum parser + self._spot_enum_parser = BinanceSpotEnumParser() - # Tasks - self._task_clock_sync: Optional[asyncio.Task] = None - - # HTTP API - self._http_client = client - self._http_account = BinanceSpotAccountHttpAPI(client=client) - self._http_market = BinanceSpotMarketHttpAPI(client=client) - self._http_user = BinanceSpotUserDataHttpAPI(client=client, account_type=account_type) - - # Listen keys - self._ping_listen_keys_interval: int = 60 * 5 # Once every 5 mins (hardcode) - self._ping_listen_keys_task: Optional[asyncio.Task] = None - self._listen_key: Optional[str] = None - - # WebSocket API - self._ws_client = BinanceWebSocketClient( + # Instantiate common base class + super().__init__( loop=loop, + client=client, + account=self._spot_http_account, + market=self._spot_http_market, + user=self._spot_http_user, + enum_parser=self._spot_enum_parser, + msgbus=msgbus, + cache=cache, clock=clock, logger=logger, - handler=self._handle_user_ws_message, - base_url=base_url_ws, - ) - - # Hot caches - self._instrument_ids: dict[str, InstrumentId] = {} - - self._log.info(f"Base URL HTTP {self._http_client.base_url}.", LogColor.BLUE) - self._log.info(f"Base URL WebSocket {base_url_ws}.", LogColor.BLUE) - - async def _connect(self) -> None: - # Connect HTTP client - if not self._http_client.connected: - await self._http_client.connect() - try: - await self._instrument_provider.initialize() - except BinanceError as e: - self._log.exception(f"Error on connect: {e.message}", e) - return - - # Authenticate API key and update account(s) - info: BinanceSpotAccountInfo = await self._http_account.account(recv_window=5000) - - self._authenticate_api_key(info=info) - self._update_account_state(info=info) - - # Get listen keys - response = await self._http_user.create_listen_key() - - self._listen_key = response["listenKey"] - self._log.info(f"Listen key {self._listen_key}") - self._ping_listen_keys_task = self.create_task(self._ping_listen_keys()) - - # Setup clock sync - if self._clock_sync_interval_secs > 0: - self._task_clock_sync = self.create_task(self._sync_clock_with_binance_server()) + instrument_provider=instrument_provider, + account_type=account_type, + base_url_ws=base_url_ws, + clock_sync_interval_secs=clock_sync_interval_secs, + warn_gtd_to_gtc=warn_gtd_to_gtc, + ) + + # Register spot websocket user data event handlers + self._spot_user_ws_handlers = { + BinanceSpotEventType.outboundAccountPosition: self._handle_account_update, + BinanceSpotEventType.executionReport: self._handle_execution_report, + BinanceSpotEventType.listStatus: self._handle_list_status, + BinanceSpotEventType.balanceUpdate: self._handle_balance_update, + } - # Connect WebSocket client - self._ws_client.subscribe(key=self._listen_key) - await self._ws_client.connect() + # Websocket spot schema decoders + self._decoder_spot_user_msg_wrapper = msgspec.json.Decoder(BinanceSpotUserMsgWrapper) + self._decoder_spot_order_update_wrapper = msgspec.json.Decoder( + BinanceSpotOrderUpdateWrapper, + ) + self._decoder_spot_account_update_wrapper = msgspec.json.Decoder( + BinanceSpotAccountUpdateWrapper, + ) - def _authenticate_api_key(self, info: BinanceSpotAccountInfo) -> None: - if info.canTrade: + async def _update_account_state(self) -> None: + account_info: BinanceSpotAccountInfo = ( + await self._spot_http_account.query_spot_account_info( + recv_window=str(5000), + ) + ) + if account_info.canTrade: self._log.info("Binance API key authenticated.", LogColor.GREEN) self._log.info(f"API key {self._http_client.api_key} has trading permissions.") else: self._log.error("Binance API key does not have trading permissions.") - - def _update_account_state(self, info: BinanceSpotAccountInfo) -> None: self.generate_account_state( - balances=parse_account_balances_http(raw_balances=info.balances), + balances=account_info.parse_to_account_balances(), margins=[], reported=True, - ts_event=millis_to_nanos(info.updateTime), + ts_event=millis_to_nanos(account_info.updateTime), ) - - async def _update_account_state_async(self) -> None: - info: BinanceSpotAccountInfo = await self._http_account.account(recv_window=5000) - self._update_account_state(info=info) - - async def _ping_listen_keys(self) -> None: - try: - while True: - self._log.debug( - f"Scheduled `ping_listen_keys` to run in " - f"{self._ping_listen_keys_interval}s.", - ) - await asyncio.sleep(self._ping_listen_keys_interval) - if self._listen_key: - self._log.debug(f"Pinging WebSocket listen key {self._listen_key}...") - await self._http_user.ping_listen_key(self._listen_key) - except asyncio.CancelledError: - self._log.debug("`ping_listen_keys` task was canceled.") - - async def _sync_clock_with_binance_server(self) -> None: - try: - while True: - # self._log.debug( - # f"Syncing Nautilus clock with Binance server...", - # ) - response: dict[str, int] = await self._http_market.time() - server_time: int = response["serverTime"] - self._log.info(f"Binance server time {server_time} UNIX (ms).") - - nautilus_time = self._clock.timestamp_ms() - self._log.info(f"Nautilus clock time {nautilus_time} UNIX (ms).") - - # offset_ns = millis_to_nanos(nautilus_time - server_time) - # self._log.info(f"Setting Nautilus clock offset {offset_ns} (ns).") - # self._clock.set_offset(offset_ns) - - await asyncio.sleep(self._clock_sync_interval_secs) - except asyncio.CancelledError: - self._log.debug("`sync_clock_with_binance_server` task was canceled.") - - async def _disconnect(self) -> None: - # Cancel tasks - if self._ping_listen_keys_task: - self._log.debug("Canceling `ping_listen_keys` task...") - self._ping_listen_keys_task.cancel() - self._ping_listen_keys_task.done() - - if self._task_clock_sync: - self._log.debug("Canceling `task_clock_sync` task...") - self._task_clock_sync.cancel() - self._task_clock_sync.done() - - # Disconnect WebSocket clients - if self._ws_client.is_connected: - await self._ws_client.disconnect() - - # Disconnect HTTP client - if self._http_client.connected: - await self._http_client.disconnect() + while self.get_account() is None: + await asyncio.sleep(0.1) # -- EXECUTION REPORTS ------------------------------------------------------------------------ - async def generate_order_status_report( - self, - instrument_id: InstrumentId, - client_order_id: Optional[ClientOrderId] = None, - venue_order_id: Optional[VenueOrderId] = None, - ) -> Optional[OrderStatusReport]: - PyCondition.false( - client_order_id is None and venue_order_id is None, - "both `client_order_id` and `venue_order_id` were `None`", - ) - - self._log.info( - f"Generating OrderStatusReport for " - f"{repr(client_order_id) if client_order_id else ''} " - f"{repr(venue_order_id) if venue_order_id else ''}...", - ) - - try: - if venue_order_id is not None: - response = await self._http_account.get_order( - symbol=instrument_id.symbol.value, - order_id=venue_order_id.value, - ) - else: - response = await self._http_account.get_order( - symbol=instrument_id.symbol.value, - orig_client_order_id=client_order_id.value, - ) - except BinanceError as e: - self._log.exception( - f"Cannot generate order status report for {venue_order_id}: {e.message}", - e, - ) - return None - - report: OrderStatusReport = parse_order_report_http( - account_id=self.account_id, - instrument_id=self._get_cached_instrument_id(response["symbol"]), - data=response, - report_id=UUID4(), - ts_init=self._clock.timestamp_ns(), - ) - - self._log.debug(f"Received {report}.") - return report - - async def generate_order_status_reports( # noqa (C901 too complex) - self, - instrument_id: InstrumentId = None, - start: Optional[pd.Timestamp] = None, - end: Optional[pd.Timestamp] = None, - open_only: bool = False, - ) -> list[OrderStatusReport]: - self._log.info(f"Generating OrderStatusReports for {self.id}...") - - open_orders = self._cache.orders_open(venue=self.venue) - active_symbols: set[str] = { - format_symbol(o.instrument_id.symbol.value) for o in open_orders - } - - order_msgs = [] - reports: dict[VenueOrderId, OrderStatusReport] = {} - - try: - open_order_msgs: list[dict[str, Any]] = await self._http_account.get_open_orders( - symbol=instrument_id.symbol.value if instrument_id is not None else None, - ) - if open_order_msgs: - order_msgs.extend(open_order_msgs) - # Add to active symbols - for o in open_order_msgs: - active_symbols.add(o["symbol"]) - - for symbol in active_symbols: - response = await self._http_account.get_orders( - symbol=symbol, - start_time=secs_to_millis(start.timestamp()) if start is not None else None, - end_time=secs_to_millis(end.timestamp()) if end is not None else None, - ) - order_msgs.extend(response) - except BinanceError as e: - self._log.exception(f"Cannot generate order status report: {e.message}", e) - return [] - - for msg in order_msgs: - # Apply filter (always report open orders regardless of start, end filter) - # TODO(cs): Time filter is WIP - # timestamp = pd.to_datetime(data["time"], utc=True) - # if data["status"] not in ("NEW", "PARTIALLY_FILLED", "PENDING_CANCEL"): - # if start is not None and timestamp < start: - # continue - # if end is not None and timestamp > end: - # continue - - report: OrderStatusReport = parse_order_report_http( - account_id=self.account_id, - instrument_id=self._get_cached_instrument_id(msg["symbol"]), - data=msg, - report_id=UUID4(), - ts_init=self._clock.timestamp_ns(), - ) - - self._log.debug(f"Received {report}.") - reports[report.venue_order_id] = report # One report per order - - len_reports = len(reports) - plural = "" if len_reports == 1 else "s" - self._log.info(f"Generated {len(reports)} OrderStatusReport{plural}.") - - return list(reports.values()) - - async def generate_trade_reports( # noqa (C901 too complex) + async def _get_binance_position_status_reports( self, - instrument_id: InstrumentId = None, - venue_order_id: VenueOrderId = None, - start: Optional[pd.Timestamp] = None, - end: Optional[pd.Timestamp] = None, - ) -> list[TradeReport]: - self._log.info(f"Generating TradeReports for {self.id}...") - - open_orders = self._cache.orders_open(venue=self.venue) - active_symbols: set[str] = { - format_symbol(o.instrument_id.symbol.value) for o in open_orders - } - - reports_raw: list[dict[str, Any]] = [] - reports: list[TradeReport] = [] - - try: - for symbol in active_symbols: - response = await self._http_account.get_account_trades( - symbol=symbol, - start_time=secs_to_millis(start.timestamp()) if start is not None else None, - end_time=secs_to_millis(end.timestamp()) if end is not None else None, - ) - reports_raw.extend(response) - except BinanceError as e: - self._log.exception(f"Cannot generate trade report: {e.message}", e) - return [] - - for data in reports_raw: - # Apply filter - # TODO(cs): Time filter is WIP - # timestamp = pd.to_datetime(data["time"], utc=True) - # if start is not None and timestamp < start: - # continue - # if end is not None and timestamp > end: - # continue - - report: TradeReport = parse_trade_report_http( - account_id=self.account_id, - instrument_id=self._get_cached_instrument_id(data["symbol"]), - data=data, - report_id=UUID4(), - ts_init=self._clock.timestamp_ns(), - ) - - self._log.debug(f"Received {report}.") - reports.append(report) - - # Sort in ascending order - reports = sorted(reports, key=lambda x: x.trade_id) - - len_reports = len(reports) - plural = "" if len_reports == 1 else "s" - self._log.info(f"Generated {len(reports)} TradeReport{plural}.") - - return reports - - async def generate_position_status_reports( - self, - instrument_id: InstrumentId = None, - start: Optional[pd.Timestamp] = None, - end: Optional[pd.Timestamp] = None, + symbol: Optional[str] = None, ) -> list[PositionStatusReport]: # Never cash positions + return [] + async def _get_binance_active_position_symbols( + self, + symbol: Optional[str] = None, + ) -> list[str]: + # Never cash positions return [] # -- COMMAND HANDLERS ------------------------------------------------------------------------- - async def _submit_order(self, command: SubmitOrder) -> None: - order: Order = command.order - + def _check_order_validity(self, order: Order): # Check order type valid - if order.order_type not in BINANCE_SPOT_VALID_ORDER_TYPES: + if order.order_type not in self._spot_enum_parser.spot_valid_order_types: self._log.error( f"Cannot submit order: {order_type_to_str(order.order_type)} " f"orders not supported by the Binance Spot/Margin exchange. " - f"Use any of {[order_type_to_str(t) for t in BINANCE_SPOT_VALID_ORDER_TYPES]}", + f"Use any of {[order_type_to_str(t) for t in self._spot_enum_parser.spot_valid_order_types]}", ) return - # Check time in force valid - if order.time_in_force not in BINANCE_SPOT_VALID_TIF: + if order.time_in_force not in self._spot_enum_parser.spot_valid_time_in_force: self._log.error( f"Cannot submit order: " f"{time_in_force_to_str(order.time_in_force)} " f"not supported by the Binance Spot/Margin exchange. " - f"Use any of {BINANCE_SPOT_VALID_TIF}.", + f"Use any of {[time_in_force_to_str(t) for t in self._spot_enum_parser.spot_valid_time_in_force]}.", ) return - # Check post-only if order.order_type == OrderType.STOP_LIMIT and order.is_post_only: self._log.error( @@ -517,321 +204,27 @@ async def _submit_order(self, command: SubmitOrder) -> None: ) return - self._log.debug(f"Submitting {order}.") - - # Generate event here to ensure correct ordering of events - self.generate_order_submitted( - strategy_id=order.strategy_id, - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - ts_event=self._clock.timestamp_ns(), - ) - - try: - if order.order_type == OrderType.MARKET: - await self._submit_market_order(order) - elif order.order_type == OrderType.LIMIT: - await self._submit_limit_order(order) - elif order.order_type in (OrderType.STOP_LIMIT, OrderType.LIMIT_IF_TOUCHED): - await self._submit_stop_limit_order(order) - except BinanceError as e: - self.generate_order_rejected( - strategy_id=order.strategy_id, - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - reason=e.message, - ts_event=self._clock.timestamp_ns(), - ) - - async def _submit_market_order(self, order: MarketOrder) -> None: - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type="MARKET", - quantity=str(order.quantity), - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_limit_order(self, order: LimitOrder) -> None: - time_in_force_str: Optional[str] = self._convert_time_in_force_to_str(order.time_in_force) - if order.is_post_only: - time_in_force_str = None - - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type=binance_order_type(order).value, - time_in_force=time_in_force_str, - quantity=str(order.quantity), - price=str(order.price), - iceberg_qty=str(order.display_qty) if order.display_qty is not None else None, - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_stop_limit_order(self, order: StopLimitOrder) -> None: - time_in_force_str: str = self._convert_time_in_force_to_str(order.time_in_force) - - await self._http_account.new_order( - symbol=format_symbol(order.instrument_id.symbol.value), - side=order_side_to_str(order.side), - type=binance_order_type(order).value, - time_in_force=time_in_force_str, - quantity=str(order.quantity), - price=str(order.price), - stop_price=str(order.trigger_price), - iceberg_qty=str(order.display_qty) if order.display_qty is not None else None, - new_client_order_id=order.client_order_id.value, - recv_window=5000, - ) - - async def _submit_order_list(self, command: SubmitOrderList) -> None: - for order in command.order_list: - self.generate_order_submitted( - strategy_id=order.strategy_id, - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - ts_event=self._clock.timestamp_ns(), - ) - - for order in command.order_list: - if order.linked_order_ids: # TODO(cs): Implement - self._log.warning(f"Cannot yet handle OCO conditional orders, {order}.") - await self._submit_order(order) - - async def _modify_order(self, command: ModifyOrder) -> None: - self._log.error( # pragma: no cover - "Cannot modify order: Not supported by the exchange.", # pragma: no cover - ) - - async def _cancel_order(self, command: CancelOrder) -> None: - self.generate_order_pending_cancel( - strategy_id=command.strategy_id, - instrument_id=command.instrument_id, - client_order_id=command.client_order_id, - venue_order_id=command.venue_order_id, - ts_event=self._clock.timestamp_ns(), - ) - - await self._cancel_order_single( - instrument_id=command.instrument_id, - client_order_id=command.client_order_id, - venue_order_id=command.venue_order_id, - ) - - async def _cancel_all_orders(self, command: CancelAllOrders) -> None: - open_orders_strategy = self._cache.orders_open( - instrument_id=command.instrument_id, - strategy_id=command.strategy_id, - ) - for order in open_orders_strategy: - if order.is_pending_cancel: - continue # Already pending cancel - self.generate_order_pending_cancel( - strategy_id=order.strategy_id, - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - venue_order_id=order.venue_order_id, - ts_event=self._clock.timestamp_ns(), - ) - - # Check total orders for instrument - open_orders_total_count = self._cache.orders_open_count( - instrument_id=command.instrument_id, - ) - - try: - if open_orders_total_count == len(open_orders_strategy): - await self._http_account.cancel_open_orders( - symbol=format_symbol(command.instrument_id.symbol.value), - ) - else: - for order in open_orders_strategy: - await self._cancel_order_single( - instrument_id=order.instrument_id, - client_order_id=order.client_order_id, - venue_order_id=order.venue_order_id, - ) - except BinanceError as e: - self._log.exception(f"Cannot cancel open orders: {e.message}", e) - - async def _cancel_order_single( - self, - instrument_id: InstrumentId, - client_order_id: ClientOrderId, - venue_order_id: Optional[VenueOrderId], - ) -> None: - try: - if venue_order_id is not None: - await self._http_account.cancel_order( - symbol=format_symbol(instrument_id.symbol.value), - order_id=venue_order_id.value, - ) - else: - await self._http_account.cancel_order( - symbol=format_symbol(instrument_id.symbol.value), - orig_client_order_id=client_order_id.value, - ) - except BinanceError as e: - self._log.exception( - f"Cannot cancel order " - f"{repr(client_order_id)}, " - f"{repr(venue_order_id)}: " - f"{e.message}", - e, - ) - - def _convert_time_in_force_to_str(self, time_in_force: TimeInForce): - time_in_force_str: str = time_in_force_to_str(time_in_force) - if time_in_force_str == TimeInForce.GTD.name: - if self._warn_gtd_to_gtc: - self._log.warning("Converting GTD `time_in_force` to GTC.") - time_in_force_str = TimeInForce.GTC.name - return time_in_force_str - - def _get_cached_instrument_id(self, symbol: str) -> InstrumentId: - # Parse instrument ID - nautilus_symbol: str = parse_symbol(symbol, account_type=self._binance_account_type) - instrument_id: Optional[InstrumentId] = self._instrument_ids.get(nautilus_symbol) - if not instrument_id: - instrument_id = InstrumentId(Symbol(nautilus_symbol), BINANCE_VENUE) - self._instrument_ids[nautilus_symbol] = instrument_id - return instrument_id + # -- WEBSOCKET EVENT HANDLERS -------------------------------------------------------------------- def _handle_user_ws_message(self, raw: bytes) -> None: # TODO(cs): Uncomment for development # self._log.info(str(json.dumps(msgspec.json.decode(raw), indent=4)), color=LogColor.MAGENTA) - - wrapper = msgspec.json.decode(raw, type=BinanceSpotUserMsgWrapper) - + wrapper = self._decoder_spot_user_msg_wrapper.decode(raw) try: - if wrapper.data.e == BinanceSpotEventType.outboundAccountPosition: - account_msg = msgspec.json.decode(raw, type=BinanceSpotAccountUpdateWrapper) - self._handle_account_update(account_msg.data) - elif wrapper.data.e == BinanceSpotEventType.executionReport: - order_msg = msgspec.json.decode(raw, type=BinanceSpotOrderUpdateWrapper) - self._handle_execution_report(order_msg.data) - elif wrapper.data.e == BinanceSpotEventType.listStatus: - pass # Implement (OCO order status) - elif wrapper.data.e == BinanceSpotEventType.balanceUpdate: - self.create_task(self._update_account_state_async()) + self._spot_user_ws_handlers[wrapper.data.e](raw) except Exception as e: self._log.exception(f"Error on handling {repr(raw)}", e) - def _handle_account_update(self, msg: BinanceSpotAccountUpdateMsg) -> None: - self.generate_account_state( - balances=parse_account_balances_ws(raw_balances=msg.B), - margins=[], - reported=True, - ts_event=millis_to_nanos(msg.u), - ) - - def _handle_execution_report(self, data: BinanceSpotOrderUpdateData) -> None: - instrument_id: InstrumentId = self._get_cached_instrument_id(data.s) - venue_order_id = VenueOrderId(str(data.i)) - ts_event = millis_to_nanos(data.T) + def _handle_account_update(self, raw: bytes) -> None: + account_msg = self._decoder_spot_account_update_wrapper.decode(raw) + account_msg.data.handle_account_update(self) - # Parse client order ID - client_order_id_str: str = data.c - if not client_order_id_str or not client_order_id_str.startswith("O"): - client_order_id_str = data.C - client_order_id = ClientOrderId(client_order_id_str) + def _handle_execution_report(self, raw: bytes) -> None: + order_msg = self._decoder_spot_order_update_wrapper.decode(raw) + order_msg.data.handle_execution_report(self) - # Fetch strategy ID - strategy_id: StrategyId = self._cache.strategy_id_for_order(client_order_id) - if strategy_id is None: - if strategy_id is None: - self._generate_external_order_report( - instrument_id, - client_order_id, - venue_order_id, - data, - ts_event, - ) - return - - if data.x == BinanceExecutionType.NEW: - self.generate_order_accepted( - strategy_id=strategy_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - ts_event=ts_event, - ) - elif data.x == BinanceExecutionType.TRADE: - instrument: Instrument = self._instrument_provider.find(instrument_id=instrument_id) - - # Determine commission - commission_asset: str = data.N - commission_amount: str = data.n - if commission_asset is not None: - commission = Money.from_str(f"{commission_amount} {commission_asset}") - else: - # Binance typically charges commission as base asset or BNB - commission = Money(0, instrument.base_currency) - - self.generate_order_filled( - strategy_id=strategy_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - venue_position_id=None, # NETTING accounts - trade_id=TradeId(str(data.t)), # Trade ID - order_side=order_side_from_str(data.S.value), - order_type=parse_order_type(data.o), - last_qty=Quantity.from_str(data.l), - last_px=Price.from_str(data.L), - quote_currency=instrument.quote_currency, - commission=commission, - liquidity_side=LiquiditySide.MAKER if data.m else LiquiditySide.TAKER, - ts_event=ts_event, - ) - elif data.x in (BinanceExecutionType.CANCELED, BinanceExecutionType.EXPIRED): - self.generate_order_canceled( - strategy_id=strategy_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - ts_event=ts_event, - ) - else: - self._log.warning(f"Received unhandled {data}") - - def _generate_external_order_report( - self, - instrument_id: InstrumentId, - client_order_id: ClientOrderId, - venue_order_id: VenueOrderId, - data: BinanceSpotOrderUpdateData, - ts_event: int, - ) -> None: - report = OrderStatusReport( - account_id=self.account_id, - instrument_id=instrument_id, - client_order_id=client_order_id, - venue_order_id=venue_order_id, - order_side=OrderSide.BUY if data.S == BinanceOrderSide.BUY else OrderSide.SELL, - order_type=parse_order_type(data.o), - time_in_force=parse_time_in_force(data.f.value), - order_status=OrderStatus.ACCEPTED, - price=Price.from_str(data.p) if data.p is not None else None, - trigger_price=Price.from_str(data.P) if data.P is not None else None, - trigger_type=TriggerType.LAST_TRADE, - trailing_offset=None, - trailing_offset_type=TrailingOffsetType.NO_TRAILING_OFFSET, - quantity=Quantity.from_str(data.q), - filled_qty=Quantity.from_str(data.z), - display_qty=Quantity.from_str(str(Decimal(data.q) - Decimal(data.F))) - if data.F is not None - else None, - avg_px=None, - post_only=data.f == BinanceFuturesTimeInForce.GTX, - reduce_only=False, - report_id=UUID4(), - ts_accepted=ts_event, - ts_last=ts_event, - ts_init=self._clock.timestamp_ns(), - ) + def _handle_list_status(self, raw: bytes) -> None: + self._log.warning("List status (OCO) received.") # Implement - self._send_order_status_report(report) + def _handle_balance_update(self, raw: bytes) -> None: + self.create_task(self._update_account_state_async()) diff --git a/nautilus_trader/adapters/binance/spot/http/account.py b/nautilus_trader/adapters/binance/spot/http/account.py index 165ae4c34986..d6ffc3f2c263 100644 --- a/nautilus_trader/adapters/binance/spot/http/account.py +++ b/nautilus_trader/adapters/binance/spot/http/account.py @@ -17,879 +17,714 @@ import msgspec -from nautilus_trader.adapters.binance.common.functions import format_symbol +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceNewOrderRespType +from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.enums import BinanceTimeInForce +from nautilus_trader.adapters.binance.common.schemas.market import BinanceRateLimit +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.http.account import BinanceAccountHttpAPI +from nautilus_trader.adapters.binance.http.account import BinanceOpenOrdersHttp from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.adapters.binance.http.enums import NewOrderRespType +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint from nautilus_trader.adapters.binance.spot.schemas.account import BinanceSpotAccountInfo +from nautilus_trader.adapters.binance.spot.schemas.account import BinanceSpotOrderOco +from nautilus_trader.common.clock import LiveClock -class BinanceSpotAccountHttpAPI: +class BinanceSpotOpenOrdersHttp(BinanceOpenOrdersHttp): """ - Provides access to the `Binance Spot/Margin` Account/Trade HTTP REST API. + Endpoint of all SPOT/MARGIN open orders on a symbol. - Parameters - ---------- - client : BinanceHttpClient - The Binance REST API client. - """ + `GET /api/v3/openOrders` (inherited) - BASE_ENDPOINT = "/api/v3/" + `DELETE /api/v3/openOrders` - def __init__(self, client: BinanceHttpClient): - self.client = client + Warnings + -------- + Care should be taken when accessing this endpoint with no symbol specified. + The weight usage can be very large, which may cause rate limits to be hit. - # Decoders - self._decoder_account_info = msgspec.json.Decoder(BinanceSpotAccountInfo) + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#current-open-orders-user_data + https://binance-docs.github.io/apidocs/spot/en/#cancel-all-open-orders-on-a-symbol-trade + """ - async def new_order_test( + def __init__( self, - symbol: str, - side: str, - type: str, - time_in_force: Optional[str] = None, - quantity: Optional[str] = None, - quote_order_qty: Optional[str] = None, - price: Optional[str] = None, - new_client_order_id: Optional[str] = None, - stop_price: Optional[str] = None, - iceberg_qty: Optional[str] = None, - new_order_resp_type: NewOrderRespType = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: - """ - Test new order creation and signature/recvWindow. - - Creates and validates a new order but does not send it into the matching engine. + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + BinanceMethodType.DELETE: BinanceSecurityType.TRADE, + } + super().__init__( + client, + base_endpoint, + methods, + ) + self._delete_resp_decoder = msgspec.json.Decoder() - Test New Order (TRADE). - `POST /api/v3/order/test`. + class DeleteParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + Parameters of openOrders SPOT/MARGIN DELETE request. + Includes OCO orders. Parameters ---------- - symbol : str - The symbol for the request. - side : str - The order side for the request. - type : str - The order type for the request. - time_in_force : str, optional - The order time in force for the request. - quantity : str, optional - The order quantity in base asset units for the request. - quote_order_qty : str, optional - The order quantity in quote asset units for the request. - price : str, optional - The order price for the request. - new_client_order_id : str, optional - The client order ID for the request. A unique ID among open orders. - Automatically generated if not provided. - stop_price : str, optional - The order stop price for the request. - Used with STOP_LOSS, STOP_LOSS_LIMIT, TAKE_PROFIT, and TAKE_PROFIT_LIMIT orders. - iceberg_qty : str, optional - The order iceberg (display) quantity for the request. - Used with LIMIT, STOP_LOSS_LIMIT, and TAKE_PROFIT_LIMIT to create an iceberg order. - new_order_resp_type : NewOrderRespType, optional - The response type for the order request. - MARKET and LIMIT order types default to FULL, all other orders default to ACK. - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request + symbol : BinanceSymbol + The symbol of the orders + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). + """ - Returns - ------- - dict[str, Any] + timestamp: str + symbol: BinanceSymbol + recvWindow: Optional[str] = None - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#test-new-order-trade + async def _delete(self, parameters: DeleteParameters) -> list[dict[str, Any]]: + method_type = BinanceMethodType.DELETE + raw = await self._method(method_type, parameters) + return self._delete_resp_decoder.decode(raw) - """ - payload: dict[str, str] = { - "symbol": format_symbol(symbol), - "side": side, - "type": type, - } - if time_in_force is not None: - payload["timeInForce"] = time_in_force - if quantity is not None: - payload["quantity"] = quantity - if quote_order_qty is not None: - payload["quoteOrderQty"] = quote_order_qty - if price is not None: - payload["price"] = price - if new_client_order_id is not None: - payload["newClientOrderId"] = new_client_order_id - if stop_price is not None: - payload["stopPrice"] = stop_price - if iceberg_qty is not None: - payload["icebergQty"] = iceberg_qty - if new_order_resp_type is not None: - payload["newOrderRespType"] = new_order_resp_type.value - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="POST", - url_path=self.BASE_ENDPOINT + "order/test", - payload=payload, - ) - return msgspec.json.decode(raw) +class BinanceSpotOrderOcoHttp(BinanceHttpEndpoint): + """ + Endpoint for creating SPOT/MARGIN OCO orders. + + `POST /api/v3/order/oco` + + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#new-oco-trade + """ - async def new_order( + def __init__( self, - symbol: str, - side: str, - type: str, - time_in_force: Optional[str] = None, - quantity: Optional[str] = None, - quote_order_qty: Optional[str] = None, - price: Optional[str] = None, - new_client_order_id: Optional[str] = None, - stop_price: Optional[str] = None, - iceberg_qty: Optional[str] = None, - new_order_resp_type: NewOrderRespType = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: - """ - Submit a new order. + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.POST: BinanceSecurityType.TRADE, + } + url_path = base_endpoint + "order/oco" + super().__init__( + client, + methods, + url_path, + ) + self._resp_decoder = msgspec.json.Decoder(BinanceSpotOrderOco) - Submit New Order (TRADE). - `POST /api/v3/order`. + class PostParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + OCO order creation POST endpoint parameters. Parameters ---------- - symbol : str - The symbol for the request. - side : str - The order side for the request. - type : str - The order type for the request. - time_in_force : str, optional - The order time in force for the request. - quantity : str, optional + symbol : BinanceSymbol + The symbol of the order. + timestamp : str + The millisecond timestamp of the request. + side : BinanceOrderSide + The market side of the order (BUY, SELL). + quantity : str The order quantity in base asset units for the request. - quote_order_qty : str, optional - The order quantity in quote asset units for the request. - price : str, optional + price : str The order price for the request. - new_client_order_id : str, optional - The client order ID for the request. A unique ID among open orders. - Automatically generated if not provided. - stop_price : str, optional + stopPrice : str The order stop price for the request. - Used with STOP_LOSS, STOP_LOSS_LIMIT, TAKE_PROFIT, and TAKE_PROFIT_LIMIT orders. - iceberg_qty : str, optional - The order iceberg (display) quantity for the request. - Used with LIMIT, STOP_LOSS_LIMIT, and TAKE_PROFIT_LIMIT to create an iceberg order. - new_order_resp_type : NewOrderRespType, optional + listClientOrderId : str, optional + A unique Id for the entire orderList + limitClientOrderId : str, optional + The client order ID for the limit request. A unique ID among open orders. + Automatically generated if not provided. + limitStrategyId : int, optional + The client strategy ID for the limit request. + limitStrategyType : int, optional + The client strategy type for the limit request. Cannot be less than 1000000 + limitIcebergQty : str, optional + Create a limit iceberg order. + trailingDelta : str, optional + Can be used in addition to stopPrice. + The order trailing delta of the request. + stopClientOrderId : str, optional + The client order ID for the stop request. A unique ID among open orders. + Automatically generated if not provided. + stopStrategyId : int, optional + The client strategy ID for the stop request. + stopStrategyType : int, optional + The client strategy type for the stop request. Cannot be less than 1000000. + stopLimitPrice : str, optional + Limit price for the stop order request. + If provided, stopLimitTimeInForce is required. + stopIcebergQty : str, optional + Create a stop iceberg order. + stopLimitTimeInForce : BinanceTimeInForce, optional + The time in force of the stop limit order. + Valid values: (GTC, FOK, IOC). + newOrderRespType : BinanceNewOrderRespType, optional The response type for the order request. - MARKET and LIMIT order types default to FULL, all other orders default to ACK. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). + recvWindow : str, optional + The response receive window in milliseconds for the request. + Cannot exceed 60000. + """ - Returns - ------- - dict[str, Any] + symbol: BinanceSymbol + timestamp: str + side: BinanceOrderSide + quantity: str + price: str + stopPrice: str + listClientOrderId: Optional[str] = None + limitClientOrderId: Optional[str] = None + limitStrategyId: Optional[int] = None + limitStrategyType: Optional[int] = None + limitIcebergQty: Optional[str] = None + trailingDelta: Optional[str] = None + stopClientOrderId: Optional[str] = None + stopStrategyId: Optional[int] = None + stopStrategyType: Optional[int] = None + stopLimitPrice: Optional[str] = None + stopIcebergQty: Optional[str] = None + stopLimitTimeInForce: Optional[BinanceTimeInForce] = None + newOrderRespType: Optional[BinanceNewOrderRespType] = None + recvWindow: Optional[str] = None + + async def _post(self, parameters: PostParameters) -> BinanceSpotOrderOco: + method_type = BinanceMethodType.POST + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) + + +class BinanceSpotOrderListHttp(BinanceHttpEndpoint): + """ + Endpoint for querying and deleting SPOT/MARGIN OCO orders. - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#new-order-trade + `GET /api/v3/orderList` + `DELETE /api/v3/orderList` - """ - payload: dict[str, str] = { - "symbol": format_symbol(symbol), - "side": side, - "type": type, + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#query-oco-user_data + https://binance-docs.github.io/apidocs/spot/en/#cancel-oco-trade + """ + + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + BinanceMethodType.DELETE: BinanceSecurityType.TRADE, } - if time_in_force is not None: - payload["timeInForce"] = time_in_force - if quantity is not None: - payload["quantity"] = quantity - if quote_order_qty is not None: - payload["quoteOrderQty"] = quote_order_qty - if price is not None: - payload["price"] = price - if new_client_order_id is not None: - payload["newClientOrderId"] = new_client_order_id - if stop_price is not None: - payload["stopPrice"] = stop_price - if iceberg_qty is not None: - payload["icebergQty"] = iceberg_qty - if new_order_resp_type is not None: - payload["newOrderRespType"] = new_order_resp_type.value - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="POST", - url_path=self.BASE_ENDPOINT + "order", - payload=payload, + url_path = base_endpoint + "orderList" + super().__init__( + client, + methods, + url_path, ) + self._resp_decoder = msgspec.json.Decoder(BinanceSpotOrderOco) - return msgspec.json.decode(raw) - - async def cancel_order( - self, - symbol: str, - order_id: Optional[str] = None, - orig_client_order_id: Optional[str] = None, - new_client_order_id: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Cancel an open order. - - Cancel Order (TRADE). - `DELETE /api/v3/order`. + orderList (OCO) GET endpoint parameters. Parameters ---------- - symbol : str - The symbol for the request. - order_id : str, optional - The order ID to cancel. - orig_client_order_id : str, optional - The original client order ID to cancel. - new_client_order_id : str, optional - The new client order ID to uniquely identify this request. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#cancel-order-trade - + timestamp : str + The millisecond timestamp of the request. + orderListId : str, optional + The unique identifier of the order list to retrieve. + origClientOrderId : str, optional + The client specified identifier of the order list to retrieve. + recvWindow : str, optional + The response receive window in milliseconds for the request. + Cannot exceed 60000. + + NOTE: Either orderListId or origClientOrderId must be provided. """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if order_id is not None: - payload["orderId"] = str(order_id) - if orig_client_order_id is not None: - payload["origClientOrderId"] = str(orig_client_order_id) - if new_client_order_id is not None: - payload["newClientOrderId"] = str(new_client_order_id) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="DELETE", - url_path=self.BASE_ENDPOINT + "order", - payload=payload, - ) - return msgspec.json.decode(raw) + timestamp: str + orderListId: Optional[str] = None + origClientOrderId: Optional[str] = None + recvWindow: Optional[str] = None - async def cancel_open_orders( - self, - symbol: str, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: + class DeleteParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Cancel all open orders for a symbol. This includes OCO orders. - - Cancel all Open Orders for a Symbol (TRADE). - `DELETE api/v3/openOrders`. + orderList (OCO) DELETE endpoint parameters. Parameters ---------- - symbol : str - The symbol for the request. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#cancel-all-open-orders-on-a-symbol-trade - + timestamp : str + The millisecond timestamp of the request. + symbol : BinanceSymbol + The symbol of the order. + orderListId : str, optional + The unique identifier of the order list to retrieve. + listClientOrderId : str, optional + The client specified identifier of the order list to retrieve. + newClientOrderId : str, optional + Used to uniquely identify this cancel. Automatically generated + by default. + recvWindow : str, optional + The response receive window in milliseconds for the request. + Cannot exceed 60000. + + NOTE: Either orderListId or listClientOrderId must be provided. """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="DELETE", - url_path=self.BASE_ENDPOINT + "openOrders", - payload=payload, - ) - - return msgspec.json.decode(raw) - async def get_order( - self, - symbol: str, - order_id: Optional[str] = None, - orig_client_order_id: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: - """ - Check an order's status. + timestamp: str + symbol: BinanceSymbol + orderListId: Optional[str] = None + listClientOrderId: Optional[str] = None + newClientOrderId: Optional[str] = None + recvWindow: Optional[str] = None - Query Order (USER_DATA). - `GET /api/v3/order`. + async def _get(self, parameters: GetParameters) -> BinanceSpotOrderOco: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) - Parameters - ---------- - symbol : str - The symbol for the request. - order_id : str, optional - The order ID for the request. - orig_client_order_id : str, optional - The original client order ID for the request. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). + async def _delete(self, parameters: DeleteParameters) -> BinanceSpotOrderOco: + method_type = BinanceMethodType.DELETE + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) - Returns - ------- - dict[str, Any] - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#query-order-user_data +class BinanceSpotAllOrderListHttp(BinanceHttpEndpoint): + """ + Endpoint for querying all SPOT/MARGIN OCO orders. - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if order_id is not None: - payload["orderId"] = order_id - if orig_client_order_id is not None: - payload["origClientOrderId"] = orig_client_order_id - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "order", - payload=payload, - ) + `GET /api/v3/allOrderList` - return msgspec.json.decode(raw) + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#query-all-oco-user_data + """ - async def get_open_orders( + def __init__( self, - symbol: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> list[dict[str, Any]]: - """ - Get all open orders for a symbol. - - Query Current Open Orders (USER_DATA). - - Parameters - ---------- - symbol : str, optional - The symbol for the request. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#current-open-orders-user_data - https://binance-docs.github.io/apidocs/futures/en/#current-open-orders-user_data - - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "openOrders", - payload=payload, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + url_path = base_endpoint + "allOrderList" + super().__init__( + client, + methods, + url_path, ) + self._resp_decoder = msgspec.json.Decoder(list[BinanceSpotOrderOco]) - return msgspec.json.decode(raw) - - async def get_orders( - self, - symbol: str, - order_id: Optional[str] = None, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = None, - recv_window: Optional[int] = None, - ) -> list[dict[str, Any]]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Get all account orders (open, or closed). - - All Orders (USER_DATA). + Parameters of allOrderList GET request. Parameters ---------- - symbol : str - The symbol for the request. - order_id : str, optional + timestamp : str + The millisecond timestamp of the request. + fromId : str, optional The order ID for the request. - start_time : int, optional + If included, request will return orders from this orderId INCLUSIVE. + startTime : str, optional The start time (UNIX milliseconds) filter for the request. - end_time : int, optional + endTime : str, optional The end time (UNIX milliseconds) filter for the request. limit : int, optional The limit for the response. - recv_window : int, optional + Default 500, max 1000 + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - Returns - ------- - list[dict[str, Any]] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#all-orders-user_data - https://binance-docs.github.io/apidocs/futures/en/#all-orders-user_data - + Warnings + -------- + If fromId is specified, neither startTime endTime can be provided. """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if order_id is not None: - payload["orderId"] = order_id - if start_time is not None: - payload["startTime"] = str(start_time) - if end_time is not None: - payload["endTime"] = str(end_time) - if limit is not None: - payload["limit"] = str(limit) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "allOrders", - payload=payload, - ) - return msgspec.json.decode(raw) + timestamp: str + fromId: Optional[str] = None + startTime: Optional[str] = None + endTime: Optional[str] = None + limit: Optional[int] = None + recvWindow: Optional[str] = None - async def new_oco_order( - self, - symbol: str, - side: str, - quantity: str, - price: str, - stop_price: str, - list_client_order_id: Optional[str] = None, - limit_client_order_id: Optional[str] = None, - limit_iceberg_qty: Optional[str] = None, - stop_client_order_id: Optional[str] = None, - stop_limit_price: Optional[str] = None, - stop_iceberg_qty: Optional[str] = None, - stop_limit_time_in_force: Optional[str] = None, - new_order_resp_type: NewOrderRespType = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: - """ - Submit a new OCO order. + async def _get(self, parameters: GetParameters) -> list[BinanceSpotOrderOco]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) - Submit New OCO (TRADE). - `POST /api/v3/order/oco`. - Parameters - ---------- - symbol : str - The symbol for the request. - side : str - The order side for the request. - quantity : str - The order quantity for the request. - price : str - The order price for the request. - stop_price : str - The order stop price for the request. - list_client_order_id : str, optional - The list client order ID for the request. - limit_client_order_id : str, optional - The LIMIT client order ID for the request. - limit_iceberg_qty : str, optional - The LIMIT order display quantity for the request. - stop_client_order_id : str, optional - The STOP order client order ID for the request. - stop_limit_price : str, optional - The STOP_LIMIT price for the request. - stop_iceberg_qty : str, optional - The STOP order display quantity for the request. - stop_limit_time_in_force : str, optional - The STOP_LIMIT time_in_force for the request. - new_order_resp_type : NewOrderRespType, optional - The response type for the order request. - MARKET and LIMIT order types default to FULL, all other orders default to ACK. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). +class BinanceSpotOpenOrderListHttp(BinanceHttpEndpoint): + """ + Endpoint for querying all SPOT/MARGIN OPEN OCO orders. - Returns - ------- - dict[str, Any] + `GET /api/v3/openOrderList` - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#new-oco-trade + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#query-open-oco-user_data + """ - """ - payload: dict[str, str] = { - "symbol": format_symbol(symbol), - "side": side, - "quantity": quantity, - "price": price, - "stopPrice": stop_price, + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, } - if list_client_order_id is not None: - payload["listClientOrderId"] = list_client_order_id - if limit_client_order_id is not None: - payload["limitClientOrderId"] = limit_client_order_id - if limit_iceberg_qty is not None: - payload["limitIcebergQty"] = limit_iceberg_qty - if stop_client_order_id is not None: - payload["stopClientOrderId"] = stop_client_order_id - if stop_limit_price is not None: - payload["stopLimitPrice"] = stop_limit_price - if stop_iceberg_qty is not None: - payload["stopIcebergQty"] = stop_iceberg_qty - if stop_limit_time_in_force is not None: - payload["stopLimitTimeInForce"] = stop_limit_time_in_force - if new_order_resp_type is not None: - payload["new_order_resp_type"] = new_order_resp_type.value - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="POST", - url_path=self.BASE_ENDPOINT + "order/oco", - payload=payload, + url_path = base_endpoint + "openOrderList" + super().__init__( + client, + methods, + url_path, ) + self._resp_decoder = msgspec.json.Decoder(list[BinanceSpotOrderOco]) - return msgspec.json.decode(raw) - - async def cancel_oco_order( - self, - symbol: str, - order_list_id: Optional[str] = None, - list_client_order_id: Optional[str] = None, - new_client_order_id: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Cancel an entire Order List. - - Either `order_list_id` or `list_client_order_id` must be provided. - - Cancel OCO (TRADE). - `DELETE /api/v3/orderList`. + Parameters of allOrderList GET request. Parameters ---------- - symbol : str - The symbol for the request. - order_list_id : str, optional - The order list ID for the request. - list_client_order_id : str, optional - The list client order ID for the request. - new_client_order_id : str, optional - The new client order ID to uniquely identify this request. - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#cancel-oco-trade - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if order_list_id is not None: - payload["orderListId"] = order_list_id - if list_client_order_id is not None: - payload["listClientOrderId"] = list_client_order_id - if new_client_order_id is not None: - payload["newClientOrderId"] = new_client_order_id - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="DELETE", - url_path=self.BASE_ENDPOINT + "orderList", - payload=payload, - ) - return msgspec.json.decode(raw) - - async def get_oco_order( - self, - order_list_id: Optional[str], - orig_client_order_id: Optional[str], - recv_window: Optional[int] = None, - ) -> dict[str, Any]: - """ - Retrieve a specific OCO based on provided optional parameters. + timestamp: str + recvWindow: Optional[str] = None - Either `order_list_id` or `orig_client_order_id` must be provided. + async def _get(self, parameters: GetParameters) -> list[BinanceSpotOrderOco]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) - Query OCO (USER_DATA). - `GET /api/v3/orderList`. - Parameters - ---------- - order_list_id : str, optional - The order list ID for the request. - orig_client_order_id : str, optional - The original client order ID for the request. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). +class BinanceSpotAccountHttp(BinanceHttpEndpoint): + """ + Endpoint of current SPOT/MARGIN account information. - Returns - ------- - dict[str, Any] + `GET /api/v3/account` - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#query-oco-user_data + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#account-information-user_data + """ - """ - payload: dict[str, str] = {} - if order_list_id is not None: - payload["orderListId"] = order_list_id - if orig_client_order_id is not None: - payload["origClientOrderId"] = orig_client_order_id - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "orderList", - payload=payload, + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + url_path = base_endpoint + "account" + super().__init__( + client, + methods, + url_path, ) + self._resp_decoder = msgspec.json.Decoder(BinanceSpotAccountInfo) - return msgspec.json.decode(raw) - - async def get_oco_orders( - self, - from_id: Optional[str] = None, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = None, - recv_window: Optional[int] = None, - ) -> dict[str, Any]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Retrieve all OCO based on provided optional parameters. - - If `from_id` is provided then neither `start_time` nor `end_time` can be - provided. - - Query all OCO (USER_DATA). - `GET /api/v3/allOrderList`. + Parameters of account GET request. Parameters ---------- - from_id : int, optional - The order ID filter for the request. - start_time : int, optional - The start time (UNIX milliseconds) filter for the request. - end_time : int, optional - The end time (UNIX milliseconds) filter for the request. - limit : int, optional - The limit for the response. - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#query-all-oco-user_data - """ - payload: dict[str, str] = {} - if from_id is not None: - payload["fromId"] = from_id - if start_time is not None: - payload["startTime"] = str(start_time) - if end_time is not None: - payload["endTime"] = str(end_time) - if limit is not None: - payload["limit"] = str(limit) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "allOrderList", - payload=payload, - ) - return msgspec.json.decode(raw) + timestamp: str + recvWindow: Optional[str] = None - async def get_oco_open_orders(self, recv_window: Optional[int] = None) -> dict[str, Any]: - """ - Get all open OCO orders. + async def _get(self, parameters: GetParameters) -> BinanceSpotAccountInfo: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) - Query Open OCO (USER_DATA). - GET /api/v3/openOrderList. - Parameters - ---------- - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). +class BinanceSpotOrderRateLimitHttp(BinanceHttpEndpoint): + """ + Endpoint of current SPOT/MARGIN order count usage for all intervals. - Returns - ------- - dict[str, Any] + `GET /api/v3/rateLimit/order` - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#query-open-oco-user_data + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#query-current-order-count-usage-trade + """ - """ - payload: dict[str, str] = {} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "openOrderList", - payload=payload, + def __init__( + self, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.TRADE, + } + url_path = base_endpoint + "rateLimit/order" + super().__init__( + client, + methods, + url_path, ) + self._resp_decoder = msgspec.json.Decoder(list[BinanceRateLimit]) - return msgspec.json.decode(raw) - - async def account(self, recv_window: Optional[int] = None) -> BinanceSpotAccountInfo: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Get current account information. - - Account Information (USER_DATA). - `GET /api/v3/account`. + Parameters of rateLimit/order GET request. Parameters ---------- - recv_window : int, optional + timestamp : str + The millisecond timestamp of the request. + recvWindow : str, optional The response receive window for the request (cannot be greater than 60000). - - Returns - ------- - BinanceSpotAccountInfo - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#account-information-user_data - """ - payload: dict[str, str] = {} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "account", - payload=payload, - ) - return self._decoder_account_info.decode(raw) + timestamp: str + recvWindow: Optional[str] = None - async def get_account_trades( - self, - symbol: str, - from_id: Optional[str] = None, - order_id: Optional[str] = None, - start_time: Optional[int] = None, - end_time: Optional[int] = None, - limit: Optional[int] = None, - recv_window: Optional[int] = None, - ) -> list[dict[str, Any]]: - """ - Get trades for a specific account and symbol. + async def _get(self, parameters: GetParameters) -> list[BinanceRateLimit]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._resp_decoder.decode(raw) - Account Trade List (USER_DATA) - Parameters - ---------- - symbol : str - The symbol for the request. - from_id : str, optional - The trade match ID to query from. - order_id : str, optional - The order ID for the trades. This can only be used in combination with symbol. - start_time : int, optional - The start time (UNIX milliseconds) filter for the request. - end_time : int, optional - The end time (UNIX milliseconds) filter for the request. - limit : int, optional - The limit for the response. - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). +class BinanceSpotAccountHttpAPI(BinanceAccountHttpAPI): + """ + Provides access to the `Binance Spot/Margin` Account/Trade HTTP REST API. - Returns - ------- - list[dict[str, Any]] + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + clock : LiveClock, + The clock for the API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint prefix. + """ - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#account-trade-list-user_data + def __init__( + self, + client: BinanceHttpClient, + clock: LiveClock, + account_type: BinanceAccountType = BinanceAccountType.SPOT, + ): + super().__init__( + client=client, + clock=clock, + account_type=account_type, + ) - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if from_id is not None: - payload["fromId"] = from_id - if order_id is not None: - payload["orderId"] = order_id - if start_time is not None: - payload["startTime"] = str(start_time) - if end_time is not None: - payload["endTime"] = str(end_time) - if limit is not None: - payload["limit"] = str(limit) - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "myTrades", - payload=payload, + if not account_type.is_spot_or_margin: + raise RuntimeError( # pragma: no cover (design-time error) + f"`BinanceAccountType` not SPOT, MARGIN_CROSS or MARGIN_ISOLATED, was {account_type}", # pragma: no cover + ) + + # Create endpoints + self._endpoint_spot_open_orders = BinanceSpotOpenOrdersHttp(client, self.base_endpoint) + self._endpoint_spot_order_oco = BinanceSpotOrderOcoHttp(client, self.base_endpoint) + self._endpoint_spot_order_list = BinanceSpotOrderListHttp(client, self.base_endpoint) + self._endpoint_spot_all_order_list = BinanceSpotAllOrderListHttp(client, self.base_endpoint) + self._endpoint_spot_open_order_list = BinanceSpotOpenOrderListHttp( + client, + self.base_endpoint, + ) + self._endpoint_spot_account = BinanceSpotAccountHttp(client, self.base_endpoint) + self._endpoint_spot_order_rate_limit = BinanceSpotOrderRateLimitHttp( + client, + self.base_endpoint, ) - return msgspec.json.decode(raw) + async def new_spot_oco( + self, + symbol: str, + side: BinanceOrderSide, + quantity: str, + price: str, + stop_price: str, + list_client_order_id: Optional[str] = None, + limit_client_order_id: Optional[str] = None, + limit_strategy_id: Optional[int] = None, + limit_strategy_type: Optional[int] = None, + limit_iceberg_qty: Optional[str] = None, + trailing_delta: Optional[str] = None, + stop_client_order_id: Optional[str] = None, + stop_strategy_id: Optional[int] = None, + stop_strategy_type: Optional[int] = None, + stop_limit_price: Optional[str] = None, + stop_iceberg_qty: Optional[str] = None, + stop_limit_time_in_force: Optional[BinanceTimeInForce] = None, + new_order_resp_type: Optional[BinanceNewOrderRespType] = None, + recv_window: Optional[str] = None, + ) -> BinanceSpotOrderOco: + """Send in a new spot OCO order to Binance.""" + if stop_limit_price is not None and stop_limit_time_in_force is None: + raise RuntimeError( + "stopLimitPrice cannot be provided without stopLimitTimeInForce.", + ) + if stop_limit_time_in_force == BinanceTimeInForce.GTX: + raise RuntimeError( + "stopLimitTimeInForce, Good Till Crossing (GTX) not supported.", + ) + return await self._endpoint_spot_order_oco._post( + parameters=self._endpoint_spot_order_oco.PostParameters( + symbol=BinanceSymbol(symbol), + timestamp=self._timestamp(), + side=side, + quantity=quantity, + price=price, + stopPrice=stop_price, + listClientOrderId=list_client_order_id, + limitClientOrderId=limit_client_order_id, + limitStrategyId=limit_strategy_id, + limitStrategyType=limit_strategy_type, + limitIcebergQty=limit_iceberg_qty, + trailingDelta=trailing_delta, + stopClientOrderId=stop_client_order_id, + stopStrategyId=stop_strategy_id, + stopStrategyType=stop_strategy_type, + stopLimitPrice=stop_limit_price, + stopIcebergQty=stop_iceberg_qty, + stopLimitTimeInForce=stop_limit_time_in_force, + newOrderRespType=new_order_resp_type, + recvWindow=recv_window, + ), + ) - async def get_order_rate_limit(self, recv_window: Optional[int] = None) -> dict[str, Any]: - """ - Get the user's current order count usage for all intervals. + async def query_spot_oco( + self, + order_list_id: Optional[str] = None, + orig_client_order_id: Optional[str] = None, + recv_window: Optional[str] = None, + ) -> BinanceSpotOrderOco: + """Check single spot OCO order information.""" + if order_list_id is None and orig_client_order_id is None: + raise RuntimeError( + "Either orderListId or origClientOrderId must be provided.", + ) + return await self._endpoint_spot_order_list._get( + parameters=self._endpoint_spot_order_list.GetParameters( + timestamp=self._timestamp(), + orderListId=order_list_id, + origClientOrderId=orig_client_order_id, + recvWindow=recv_window, + ), + ) - Query Current Order Count Usage (TRADE). - `GET /api/v3/rateLimit/order`. + async def cancel_all_open_orders( + self, + symbol: str, + recv_window: Optional[str] = None, + ) -> bool: + """Cancel all active orders on a symbol, including OCO. Returns whether successful.""" + await self._endpoint_spot_open_orders._delete( + parameters=self._endpoint_spot_open_orders.DeleteParameters( + timestamp=self._timestamp(), + symbol=BinanceSymbol(symbol), + recvWindow=recv_window, + ), + ) + return True - Parameters - ---------- - recv_window : int, optional - The response receive window for the request (cannot be greater than 60000). + async def cancel_spot_oco( + self, + symbol: str, + order_list_id: Optional[str] = None, + list_client_order_id: Optional[str] = None, + new_client_order_id: Optional[str] = None, + recv_window: Optional[str] = None, + ) -> BinanceSpotOrderOco: + """Delete spot OCO order from Binance.""" + if order_list_id is None and list_client_order_id is None: + raise RuntimeError( + "Either orderListId or listClientOrderId must be provided.", + ) + return await self._endpoint_spot_order_list._delete( + parameters=self._endpoint_spot_order_list.DeleteParameters( + timestamp=self._timestamp(), + symbol=BinanceSymbol(symbol), + orderListId=order_list_id, + listClientOrderId=list_client_order_id, + newClientOrderId=new_client_order_id, + recvWindow=recv_window, + ), + ) - Returns - ------- - dict[str, Any] + async def query_spot_all_oco( + self, + from_id: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + limit: Optional[int] = None, + recv_window: Optional[str] = None, + ) -> list[BinanceSpotOrderOco]: + """Check all spot OCO orders' information, matching provided filter parameters.""" + if from_id is not None and (start_time or end_time) is not None: + raise RuntimeError( + "Cannot specify both fromId and a startTime/endTime.", + ) + return await self._endpoint_spot_all_order_list._get( + parameters=self._endpoint_spot_all_order_list.GetParameters( + timestamp=self._timestamp(), + fromId=from_id, + startTime=start_time, + endTime=end_time, + limit=limit, + recvWindow=recv_window, + ), + ) - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#query-current-order-count-usage-trade + async def query_spot_all_open_oco( + self, + recv_window: Optional[str] = None, + ) -> list[BinanceSpotOrderOco]: + """Check all OPEN spot OCO orders' information.""" + return await self._endpoint_spot_open_order_list._get( + parameters=self._endpoint_spot_open_order_list.GetParameters( + timestamp=self._timestamp(), + recvWindow=recv_window, + ), + ) - """ - payload: dict[str, str] = {} - if recv_window is not None: - payload["recvWindow"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "rateLimit/order", - payload=payload, + async def query_spot_account_info( + self, + recv_window: Optional[str] = None, + ) -> BinanceSpotAccountInfo: + """Check SPOT/MARGIN Binance account information.""" + return await self._endpoint_spot_account._get( + parameters=self._endpoint_spot_account.GetParameters( + timestamp=self._timestamp(), + recvWindow=recv_window, + ), ) - return msgspec.json.decode(raw) + async def query_spot_order_rate_limit( + self, + recv_window: Optional[str] = None, + ) -> list[BinanceRateLimit]: + """Check SPOT/MARGIN order count/rateLimit.""" + return await self._endpoint_spot_order_rate_limit._get( + parameters=self._endpoint_spot_order_rate_limit.GetParameters( + timestamp=self._timestamp(), + recvWindow=recv_window, + ), + ) diff --git a/nautilus_trader/adapters/binance/spot/http/market.py b/nautilus_trader/adapters/binance/spot/http/market.py index ef4b51fa65fb..d20366ad4943 100644 --- a/nautilus_trader/adapters/binance/spot/http/market.py +++ b/nautilus_trader/adapters/binance/spot/http/market.py @@ -13,452 +13,170 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from typing import Any, Optional +from typing import Optional import msgspec -from nautilus_trader.adapters.binance.common.functions import convert_symbols_list_to_json_array -from nautilus_trader.adapters.binance.common.functions import format_symbol -from nautilus_trader.adapters.binance.common.schemas import BinanceTrade +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbols from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.adapters.binance.http.market import BinanceMarketHttpAPI +from nautilus_trader.adapters.binance.spot.enums import BinanceSpotPermissions +from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotAvgPrice from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotExchangeInfo -class BinanceSpotMarketHttpAPI: +class BinanceSpotExchangeInfoHttp(BinanceHttpEndpoint): """ - Provides access to the `Binance Futures` Market HTTP REST API. + Endpoint of SPOT/MARGIN exchange trading rules and symbol information. - Parameters + `GET /api/v3/exchangeInfo` + + References ---------- - client : BinanceHttpClient - The Binance REST API client. + https://binance-docs.github.io/apidocs/spot/en/#exchange-information """ - BASE_ENDPOINT = "/api/v3/" - - def __init__(self, client: BinanceHttpClient): - self.client = client - - self._decoder_exchange_info = msgspec.json.Decoder(BinanceSpotExchangeInfo) - self._decoder_trades = msgspec.json.Decoder(list[BinanceTrade]) - - async def ping(self) -> dict[str, Any]: - """ - Test the connectivity to the REST API. - - `GET /api/v3/ping` - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#test-connectivity - - """ - raw: bytes = await self.client.query(url_path=self.BASE_ENDPOINT + "ping") - return msgspec.json.decode(raw) - - async def time(self) -> dict[str, Any]: - """ - Test connectivity to the Rest API and get the current server time. - - Check Server Time. - `GET /api/v3/time` - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#check-server-time - - """ - raw: bytes = await self.client.query(url_path=self.BASE_ENDPOINT + "time") - return msgspec.json.decode(raw) - - async def exchange_info( + def __init__( self, - symbol: Optional[str] = None, - symbols: Optional[list[str]] = None, - ) -> BinanceSpotExchangeInfo: - """ - Get current exchange trading rules and symbol information. - Only either `symbol` or `symbols` should be passed. - - Exchange Information. - `GET /api/v3/exchangeinfo` - - Parameters - ---------- - symbol : str, optional - The trading pair. - symbols : list[str], optional - The list of trading pairs. - - Returns - ------- - BinanceSpotExchangeInfo - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#exchange-information - - """ - if symbol and symbols: - raise ValueError("`symbol` and `symbols` cannot be sent together") - - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) - if symbols is not None: - payload["symbols"] = convert_symbols_list_to_json_array(symbols) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "exchangeInfo", - payload=payload, - ) - - return self._decoder_exchange_info.decode(raw) - - async def depth(self, symbol: str, limit: Optional[int] = None) -> dict[str, Any]: - """ - Get orderbook. - - `GET /api/v3/depth` - - Parameters - ---------- - symbol : str - The trading pair. - limit : int, optional, default 100 - The limit for the response. Default 100; max 5000. - Valid limits:[5, 10, 20, 50, 100, 500, 1000, 5000]. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#order-book - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if limit is not None: - payload["limit"] = str(limit) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "depth", - payload=payload, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, + } + url_path = base_endpoint + "exchangeInfo" + super().__init__( + client, + methods, + url_path, ) + self._get_resp_decoder = msgspec.json.Decoder(BinanceSpotExchangeInfo) - return msgspec.json.decode(raw) - - async def trades(self, symbol: str, limit: Optional[int] = None) -> list[BinanceTrade]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Get recent market trades. - - Recent Trades List. - `GET /api/v3/trades` + GET exchangeInfo parameters. Parameters ---------- - symbol : str - The trading pair. - limit : int, optional - The limit for the response. Default 500; max 1000. - - Returns - ------- - list[BinanceTrade] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#recent-trades-list - + symbol : BinanceSymbol, optional + The specify trading pair to get exchange info for. + symbols : BinanceSymbols, optional + The specify list of trading pairs to get exchange info for. + permissions : BinanceSpotPermissions, optional + The filter symbols list by supported permissions. """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if limit is not None: - payload["limit"] = str(limit) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "trades", - payload=payload, - ) - return self._decoder_trades.decode(raw) + symbol: Optional[BinanceSymbol] = None + symbols: Optional[BinanceSymbols] = None + permissions: Optional[BinanceSpotPermissions] = None - async def historical_trades( - self, - symbol: str, - from_id: Optional[int] = None, - limit: Optional[int] = None, - ) -> dict[str, Any]: - """ - Get older market trades. - - Old Trade Lookup. - `GET /api/v3/historicalTrades` + async def _get(self, parameters: Optional[GetParameters] = None) -> BinanceSpotExchangeInfo: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) - Parameters - ---------- - symbol : str - The trading pair. - from_id : int, optional - The trade ID to fetch from. Default gets most recent trades. - limit : int, optional - The limit for the response. Default 500; max 1000. - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#old-trade-lookup - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if limit is not None: - payload["limit"] = str(limit) - if from_id is not None: - payload["fromId"] = str(from_id) - - raw: bytes = await self.client.limit_request( - http_method="GET", - url_path=self.BASE_ENDPOINT + "historicalTrades", - payload=payload, - ) - - return msgspec.json.decode(raw) - - async def agg_trades( - self, - symbol: str, - from_id: Optional[int] = None, - start_time_ms: Optional[int] = None, - end_time_ms: Optional[int] = None, - limit: Optional[int] = None, - ) -> dict[str, Any]: - """ - Get recent aggregated market trades. - - Compressed/Aggregate Trades List. - `GET /api/v3/aggTrades` - - Parameters - ---------- - symbol : str - The trading pair. - from_id : int, optional - The trade ID to fetch from. Default gets most recent trades. - start_time_ms : int, optional - The UNIX timestamp (milliseconds) to get aggregate trades from INCLUSIVE. - end_time_ms: int, optional - The UNIX timestamp (milliseconds) to get aggregate trades until INCLUSIVE. - limit : int, optional - The limit for the response. Default 500; max 1000. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#compressed-aggregate-trades-list - - """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - if from_id is not None: - payload["fromId"] = str(from_id) - if start_time_ms is not None: - payload["startTime"] = str(start_time_ms) - if end_time_ms is not None: - payload["endTime"] = str(end_time_ms) - if limit is not None: - payload["limit"] = str(limit) +class BinanceSpotAvgPriceHttp(BinanceHttpEndpoint): + """ + Endpoint of current average price of a symbol. - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "aggTrades", - payload=payload, - ) + `GET /api/v3/avgPrice` - return msgspec.json.decode(raw) + References + ---------- + https://binance-docs.github.io/apidocs/spot/en/#current-average-price + """ - async def klines( + def __init__( self, - symbol: str, - interval: str, - start_time_ms: Optional[int] = None, - end_time_ms: Optional[int] = None, - limit: Optional[int] = None, - ) -> list[list[Any]]: - """ - Kline/Candlestick Data. - - `GET /api/v3/klines` - - Parameters - ---------- - symbol : str - The trading pair. - interval : str - The interval of kline, e.g 1m, 5m, 1h, 1d, etc. - start_time_ms : int, optional - The UNIX timestamp (milliseconds) to get aggregate trades from INCLUSIVE. - end_time_ms: int, optional - The UNIX timestamp (milliseconds) to get aggregate trades until INCLUSIVE. - limit : int, optional - The limit for the response. Default 500; max 1000. - - Returns - ------- - list[list[Any]] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data - - """ - payload: dict[str, str] = { - "symbol": format_symbol(symbol), - "interval": interval, + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.NONE, } - if start_time_ms is not None: - payload["startTime"] = str(start_time_ms) - if end_time_ms is not None: - payload["endTime"] = str(end_time_ms) - if limit is not None: - payload["limit"] = str(limit) - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "klines", - payload=payload, + url_path = base_endpoint + "avgPrice" + super().__init__( + client, + methods, + url_path, ) + self._get_resp_decoder = msgspec.json.Decoder(BinanceSpotAvgPrice) - return msgspec.json.decode(raw) - - async def avg_price(self, symbol: str) -> dict[str, Any]: + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): """ - Get the current average price for the given symbol. - - `GET /api/v3/avgPrice` + GET avgPrice parameters. Parameters ---------- - symbol : str - The trading pair. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#current-average-price - + symbol : BinanceSymbol + Specify trading pair to get average price for. """ - payload: dict[str, str] = {"symbol": format_symbol(symbol)} - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "avgPrice", - payload=payload, - ) + symbol: BinanceSymbol = None - return msgspec.json.decode(raw) - - async def ticker_24hr(self, symbol: Optional[str] = None) -> dict[str, Any]: - """ - 24hr Ticker Price Change Statistics. - - `GET /api/v3/ticker/24hr` - - Parameters - ---------- - symbol : str, optional - The trading pair. + async def _get(self, parameters: GetParameters) -> BinanceSpotAvgPrice: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + return self._get_resp_decoder.decode(raw) - Returns - ------- - dict[str, Any] - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#24hr-ticker-price-change-statistics +class BinanceSpotMarketHttpAPI(BinanceMarketHttpAPI): + """ + Provides access to the `Binance Spot` Market HTTP REST API. - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint. + """ - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "ticker/24hr", - payload=payload, + def __init__( + self, + client: BinanceHttpClient, + account_type: BinanceAccountType = BinanceAccountType.SPOT, + ): + super().__init__( + client=client, + account_type=account_type, ) - return msgspec.json.decode(raw) + if not account_type.is_spot_or_margin: + raise RuntimeError( # pragma: no cover (design-time error) + f"`BinanceAccountType` not SPOT, MARGIN_CROSS or MARGIN_ISOLATED, was {account_type}", # pragma: no cover + ) - async def ticker_price(self, symbol: Optional[str] = None) -> dict[str, Any]: - """ - Symbol Price Ticker. - - `GET /api/v3/ticker/price` - - Parameters - ---------- - symbol : str, optional - The trading pair. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#symbol-price-ticker - - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol) + self._endpoint_spot_exchange_info = BinanceSpotExchangeInfoHttp(client, self.base_endpoint) + self._endpoint_spot_average_price = BinanceSpotAvgPriceHttp(client, self.base_endpoint) - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "ticker/price", - payload=payload, + async def query_spot_exchange_info( + self, + symbol: Optional[str] = None, + symbols: Optional[list[str]] = None, + permissions: Optional[BinanceSpotPermissions] = None, + ) -> BinanceSpotExchangeInfo: + """Check Binance Spot exchange information.""" + if symbol and symbols: + raise ValueError("`symbol` and `symbols` cannot be sent together") + return await self._endpoint_spot_exchange_info._get( + parameters=self._endpoint_spot_exchange_info.GetParameters( + symbol=BinanceSymbol(symbol), + symbols=BinanceSymbols(symbols), + permissions=permissions, + ), ) - return msgspec.json.decode(raw) - - async def book_ticker(self, symbol: Optional[str] = None) -> dict[str, Any]: - """ - Symbol Order Book Ticker. - - `GET /api/v3/ticker/bookTicker` - - Parameters - ---------- - symbol : str, optional - The trading pair. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker - - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = format_symbol(symbol).upper() - - raw: bytes = await self.client.query( - url_path=self.BASE_ENDPOINT + "ticker/bookTicker", - payload=payload, + async def query_spot_average_price(self, symbol: str) -> BinanceSpotAvgPrice: + """Check average price for a provided symbol on the Spot exchange.""" + return await self._endpoint_spot_average_price._get( + parameters=self._endpoint_spot_average_price.GetParameters( + symbol=BinanceSymbol(symbol), + ), ) - - return msgspec.json.decode(raw) diff --git a/nautilus_trader/adapters/binance/spot/http/user.py b/nautilus_trader/adapters/binance/spot/http/user.py index 4ebe8c75274d..4f37c6abae1a 100644 --- a/nautilus_trader/adapters/binance/spot/http/user.py +++ b/nautilus_trader/adapters/binance/spot/http/user.py @@ -13,16 +13,13 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from typing import Any - -import msgspec from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.functions import format_symbol from nautilus_trader.adapters.binance.http.client import BinanceHttpClient +from nautilus_trader.adapters.binance.http.user import BinanceUserDataHttpAPI -class BinanceSpotUserDataHttpAPI: +class BinanceSpotUserDataHttpAPI(BinanceUserDataHttpAPI): """ Provides access to the `Binance Spot/Margin` User Data HTTP REST API. @@ -30,6 +27,8 @@ class BinanceSpotUserDataHttpAPI: ---------- client : BinanceHttpClient The Binance REST API client. + account_type : BinanceAccountType + The Binance account type, used to select the endpoint. """ def __init__( @@ -37,201 +36,12 @@ def __init__( client: BinanceHttpClient, account_type: BinanceAccountType = BinanceAccountType.SPOT, ): - self.client = client - self.account_type = account_type + super().__init__( + client=client, + account_type=account_type, + ) - if account_type == BinanceAccountType.SPOT: - self.BASE_ENDPOINT = "/api/v3/" - elif account_type == BinanceAccountType.MARGIN: - self.BASE_ENDPOINT = "sapi/v1/" - else: + if not account_type.is_spot_or_margin: raise RuntimeError( # pragma: no cover (design-time error) - f"invalid `BinanceAccountType`, was {account_type}", # pragma: no cover (design-time error) # noqa + f"`BinanceAccountType` not SPOT, MARGIN_CROSS or MARGIN_ISOLATED, was {account_type}", # pragma: no cover (design-time error) # noqa ) - - async def create_listen_key(self) -> dict[str, Any]: - """ - Create a new listen key for the Binance Spot/Margin. - - Start a new user data stream. The stream will close after 60 minutes - unless a keepalive is sent. If the account has an active listenKey, - that listenKey will be returned and its validity will be extended for 60 - minutes. - - Create a ListenKey (USER_STREAM). - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#listen-key-spot - - """ - raw: bytes = await self.client.send_request( - http_method="POST", - url_path=self.BASE_ENDPOINT + "userDataStream", - ) - - return msgspec.json.decode(raw) - - async def ping_listen_key(self, key: str) -> dict[str, Any]: - """ - Ping/Keep-alive a listen key for the Binance Spot/Margin API. - - Keep-alive a user data stream to prevent a time-out. User data streams - will close after 60 minutes. It's recommended to send a ping about every - 30 minutes. - - Ping/Keep-alive a ListenKey (USER_STREAM). - - Parameters - ---------- - key : str - The listen key for the request. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#listen-key-spot - - """ - raw: bytes = await self.client.send_request( - http_method="PUT", - url_path=self.BASE_ENDPOINT + "userDataStream", - payload={"listenKey": key}, - ) - - return msgspec.json.decode(raw) - - async def close_listen_key(self, key: str) -> dict[str, Any]: - """ - Close a listen key for the Binance Spot/Margin API. - - Close a ListenKey (USER_STREAM). - - Parameters - ---------- - key : str - The listen key for the request. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#listen-key-spot - - """ - raw: bytes = await self.client.send_request( - http_method="DELETE", - url_path=self.BASE_ENDPOINT + "userDataStream", - payload={"listenKey": key}, - ) - - return msgspec.json.decode(raw) - - async def create_listen_key_isolated_margin(self, symbol: str) -> dict[str, Any]: - """ - Create a new listen key for the ISOLATED MARGIN API. - - Start a new user data stream. The stream will close after 60 minutes - unless a keepalive is sent. If the account has an active listenKey, - that listenKey will be returned and its validity will be extended for 60 - minutes. - - Create a ListenKey (USER_STREAM). - `POST /api/v3/userDataStream `. - - Parameters - ---------- - symbol : str - The symbol for the listen key request. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#listen-key-isolated-margin - - """ - raw: bytes = await self.client.send_request( - http_method="POST", - url_path="/sapi/v1/userDataStream/isolated", - payload={"symbol": format_symbol(symbol)}, - ) - - return msgspec.json.decode(raw) - - async def ping_listen_key_isolated_margin(self, symbol: str, key: str) -> dict[str, Any]: - """ - Ping/Keep-alive a listen key for the ISOLATED MARGIN API. - - Keep-alive a user data stream to prevent a time-out. User data streams - will close after 60 minutes. It's recommended to send a ping about every - 30 minutes. - - Ping/Keep-alive a ListenKey (USER_STREAM). - `PUT /api/v3/userDataStream`. - - Parameters - ---------- - symbol : str - The symbol for the listen key request. - key : str - The listen key for the request. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#listen-key-isolated-margin - - """ - raw: bytes = await self.client.send_request( - http_method="PUT", - url_path="/sapi/v1/userDataStream/isolated", - payload={"listenKey": key, "symbol": format_symbol(symbol)}, - ) - - return msgspec.json.decode(raw) - - async def close_listen_key_isolated_margin(self, symbol: str, key: str) -> dict[str, Any]: - """ - Close a listen key for the ISOLATED MARGIN API. - - Close a ListenKey (USER_STREAM). - `DELETE /sapi/v1/userDataStream`. - - Parameters - ---------- - symbol : str - The symbol for the listen key request. - key : str - The listen key for the request. - - Returns - ------- - dict[str, Any] - - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#listen-key-isolated-margin - - """ - raw: bytes = await self.client.send_request( - http_method="DELETE", - url_path="/sapi/v1/userDataStream/isolated", - payload={"listenKey": key, "symbol": format_symbol(symbol)}, - ) - - return msgspec.json.decode(raw) diff --git a/nautilus_trader/adapters/binance/spot/http/wallet.py b/nautilus_trader/adapters/binance/spot/http/wallet.py index a81672613fa5..1ce3abe3d921 100644 --- a/nautilus_trader/adapters/binance/spot/http/wallet.py +++ b/nautilus_trader/adapters/binance/spot/http/wallet.py @@ -17,94 +17,111 @@ import msgspec +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceMethodType +from nautilus_trader.adapters.binance.common.enums import BinanceSecurityType +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol from nautilus_trader.adapters.binance.http.client import BinanceHttpClient -from nautilus_trader.adapters.binance.spot.schemas.wallet import BinanceSpotTradeFees +from nautilus_trader.adapters.binance.http.endpoint import BinanceHttpEndpoint +from nautilus_trader.adapters.binance.spot.schemas.wallet import BinanceSpotTradeFee +from nautilus_trader.common.clock import LiveClock -class BinanceSpotWalletHttpAPI: +class BinanceSpotTradeFeeHttp(BinanceHttpEndpoint): """ - Provides access to the `Binance Spot/Margin` Wallet HTTP REST API. + Endpoint of maker/taker trade fee information. - Parameters + `GET /sapi/v1/asset/tradeFee` + + References ---------- - client : BinanceHttpClient - The Binance REST API client. + https://binance-docs.github.io/apidocs/spot/en/#trade-fee-user_data """ - def __init__(self, client: BinanceHttpClient): - self.client = client - - self._decoder_trade_fees = msgspec.json.Decoder(BinanceSpotTradeFees) - self._decoder_trade_fees_array = msgspec.json.Decoder(list[BinanceSpotTradeFees]) - - async def trade_fee( + def __init__( self, - symbol: Optional[str] = None, - recv_window: Optional[int] = None, - ) -> BinanceSpotTradeFees: - """ - Fetch trade fee. + client: BinanceHttpClient, + base_endpoint: str, + ): + methods = { + BinanceMethodType.GET: BinanceSecurityType.USER_DATA, + } + super().__init__( + client, + methods, + base_endpoint + "tradeFee", + ) + self._get_obj_resp_decoder = msgspec.json.Decoder(BinanceSpotTradeFee) + self._get_arr_resp_decoder = msgspec.json.Decoder(list[BinanceSpotTradeFee]) - `GET /sapi/v1/asset/tradeFee` + class GetParameters(msgspec.Struct, omit_defaults=True, frozen=True): + """ + GET parameters for fetching trade fees. Parameters ---------- - symbol : str, optional - The trading pair. If None then queries for all symbols. - recv_window : int, optional - The acceptable receive window for the response. + symbol : BinanceSymbol + Optional symbol to receive individual trade fee + recvWindow : str + Optional number of milliseconds after timestamp the request is valid + timestamp : str + Millisecond timestamp of the request + """ - Returns - ------- - BinanceSpotTradeFees + timestamp: str + symbol: Optional[BinanceSymbol] = None + recvWindow: Optional[str] = None - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#trade-fee-user_data + async def _get(self, parameters: GetParameters) -> list[BinanceSpotTradeFee]: + method_type = BinanceMethodType.GET + raw = await self._method(method_type, parameters) + if parameters.symbol is not None: + return [self._get_obj_resp_decoder.decode(raw)] + else: + return self._get_arr_resp_decoder.decode(raw) - """ - payload: dict[str, str] = {} - if symbol is not None: - payload["symbol"] = symbol - if recv_window is not None: - payload["recv_window"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path="/sapi/v1/asset/tradeFee", - payload=payload, - ) - return self._decoder_trade_fees.decode(raw) +class BinanceSpotWalletHttpAPI: + """ + Provides access to the `Binance Spot/Margin` Wallet HTTP REST API. - async def trade_fees(self, recv_window: Optional[int] = None) -> list[BinanceSpotTradeFees]: - """ - Fetch trade fee. + Parameters + ---------- + client : BinanceHttpClient + The Binance REST API client. + """ - `GET /sapi/v1/asset/tradeFee` + def __init__( + self, + client: BinanceHttpClient, + clock: LiveClock, + account_type: BinanceAccountType = BinanceAccountType.SPOT, + ): + self.client = client + self._clock = clock + self.base_endpoint = "/sapi/v1/asset/" - Parameters - ---------- - recv_window : int, optional - The acceptable receive window for the response. + if not account_type.is_spot_or_margin: + raise RuntimeError( # pragma: no cover (design-time error) + f"`BinanceAccountType` not SPOT, MARGIN_CROSS or MARGIN_ISOLATED, was {account_type}", # pragma: no cover + ) - Returns - ------- - list[BinanceSpotTradeFees] + self._endpoint_spot_trade_fee = BinanceSpotTradeFeeHttp(client, self.base_endpoint) - References - ---------- - https://binance-docs.github.io/apidocs/spot/en/#trade-fee-user_data + def _timestamp(self) -> str: + """Create Binance timestamp from internal clock.""" + return str(self._clock.timestamp_ms()) - """ - payload: dict[str, str] = {} - if recv_window is not None: - payload["recv_window"] = str(recv_window) - - raw: bytes = await self.client.sign_request( - http_method="GET", - url_path="/sapi/v1/asset/tradeFee", - payload=payload, + async def query_spot_trade_fees( + self, + symbol: Optional[str] = None, + recv_window: Optional[str] = None, + ) -> list[BinanceSpotTradeFee]: + fees = await self._endpoint_spot_trade_fee._get( + parameters=self._endpoint_spot_trade_fee.GetParameters( + timestamp=self._timestamp(), + symbol=BinanceSymbol(symbol) if symbol is not None else None, + recvWindow=recv_window, + ), ) - - return self._decoder_trade_fees_array.decode(raw) + return fees diff --git a/nautilus_trader/adapters/binance/spot/parsing/account.py b/nautilus_trader/adapters/binance/spot/parsing/account.py deleted file mode 100644 index 0bed5e1f27ad..000000000000 --- a/nautilus_trader/adapters/binance/spot/parsing/account.py +++ /dev/null @@ -1,58 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from decimal import Decimal - -from nautilus_trader.adapters.binance.spot.schemas.account import BinanceSpotBalanceInfo -from nautilus_trader.adapters.binance.spot.schemas.user import BinanceSpotBalance -from nautilus_trader.model.currency import Currency -from nautilus_trader.model.objects import AccountBalance -from nautilus_trader.model.objects import Money - - -def parse_account_balances_ws(raw_balances: list[BinanceSpotBalance]) -> list[AccountBalance]: - balances: list[AccountBalance] = [] - for b in raw_balances: - currency = Currency.from_str(b.a) - free = Decimal(b.f) - locked = Decimal(b.l) - total: Decimal = free + locked - - balance = AccountBalance( - total=Money(total, currency), - locked=Money(locked, currency), - free=Money(free, currency), - ) - balances.append(balance) - - return balances - - -def parse_account_balances_http(raw_balances: list[BinanceSpotBalanceInfo]) -> list[AccountBalance]: - balances: list[AccountBalance] = [] - for b in raw_balances: - currency = Currency.from_str(b.asset) - free = Decimal(b.free) - locked = Decimal(b.locked) - total: Decimal = free + locked - - balance = AccountBalance( - total=Money(total, currency), - locked=Money(locked, currency), - free=Money(free, currency), - ) - balances.append(balance) - - return balances diff --git a/nautilus_trader/adapters/binance/spot/parsing/data.py b/nautilus_trader/adapters/binance/spot/parsing/data.py deleted file mode 100644 index 3d078a6b82ad..000000000000 --- a/nautilus_trader/adapters/binance/spot/parsing/data.py +++ /dev/null @@ -1,170 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from decimal import Decimal -from typing import Optional - -import msgspec - -from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE -from nautilus_trader.adapters.binance.common.enums import BinanceSymbolFilterType -from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotOrderBookDepthData -from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotSymbolInfo -from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotTradeData -from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSymbolFilter -from nautilus_trader.adapters.binance.spot.schemas.wallet import BinanceSpotTradeFees -from nautilus_trader.core.correctness import PyCondition -from nautilus_trader.core.datetime import millis_to_nanos -from nautilus_trader.model.currency import Currency -from nautilus_trader.model.data.tick import TradeTick -from nautilus_trader.model.enums import AggressorSide -from nautilus_trader.model.enums import BookType -from nautilus_trader.model.enums import CurrencyType -from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import Symbol -from nautilus_trader.model.identifiers import TradeId -from nautilus_trader.model.instruments.base import Instrument -from nautilus_trader.model.instruments.currency_pair import CurrencyPair -from nautilus_trader.model.objects import PRICE_MAX -from nautilus_trader.model.objects import PRICE_MIN -from nautilus_trader.model.objects import QUANTITY_MAX -from nautilus_trader.model.objects import QUANTITY_MIN -from nautilus_trader.model.objects import Money -from nautilus_trader.model.objects import Price -from nautilus_trader.model.objects import Quantity -from nautilus_trader.model.orderbook.data import OrderBookSnapshot - - -def parse_spot_instrument_http( - symbol_info: BinanceSpotSymbolInfo, - fees: Optional[BinanceSpotTradeFees], - ts_event: int, - ts_init: int, -) -> Instrument: - # Create base asset - base_currency = Currency( - code=symbol_info.baseAsset, - precision=symbol_info.baseAssetPrecision, - iso4217=0, # Currently undetermined for crypto assets - name=symbol_info.baseAsset, - currency_type=CurrencyType.CRYPTO, - ) - - # Create quote asset - quote_currency = Currency( - code=symbol_info.quoteAsset, - precision=symbol_info.quoteAssetPrecision, - iso4217=0, # Currently undetermined for crypto assets - name=symbol_info.quoteAsset, - currency_type=CurrencyType.CRYPTO, - ) - - native_symbol = Symbol(symbol_info.symbol) - instrument_id = InstrumentId(symbol=native_symbol, venue=BINANCE_VENUE) - - # Parse instrument filters - filters: dict[BinanceSymbolFilterType, BinanceSymbolFilter] = { - f.filterType: f for f in symbol_info.filters - } - price_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.PRICE_FILTER) - lot_size_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.LOT_SIZE) - min_notional_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.MIN_NOTIONAL) - # market_lot_size_filter = symbol_filters.get("MARKET_LOT_SIZE") - - tick_size = price_filter.tickSize.rstrip("0") - step_size = lot_size_filter.stepSize.rstrip("0") - PyCondition.in_range(float(tick_size), PRICE_MIN, PRICE_MAX, "tick_size") - PyCondition.in_range(float(step_size), QUANTITY_MIN, QUANTITY_MAX, "step_size") - - price_precision = abs(Decimal(tick_size).as_tuple().exponent) - size_precision = abs(Decimal(step_size).as_tuple().exponent) - price_increment = Price.from_str(tick_size) - size_increment = Quantity.from_str(step_size) - lot_size = Quantity.from_str(step_size) - - PyCondition.in_range(float(lot_size_filter.maxQty), QUANTITY_MIN, QUANTITY_MAX, "maxQty") - PyCondition.in_range(float(lot_size_filter.minQty), QUANTITY_MIN, QUANTITY_MAX, "minQty") - max_quantity = Quantity(float(lot_size_filter.maxQty), precision=size_precision) - min_quantity = Quantity(float(lot_size_filter.minQty), precision=size_precision) - min_notional = None - if filters.get(BinanceSymbolFilterType.MIN_NOTIONAL): - min_notional = Money(min_notional_filter.minNotional, currency=quote_currency) - max_price = Price(min(float(price_filter.maxPrice), 4294967296.0), precision=price_precision) - min_price = Price(max(float(price_filter.minPrice), 0.0), precision=price_precision) - - # Parse fees - maker_fee: Decimal = Decimal(0) - taker_fee: Decimal = Decimal(0) - if fees: - maker_fee = Decimal(fees.makerCommission) - taker_fee = Decimal(fees.takerCommission) - - # Create instrument - return CurrencyPair( - instrument_id=instrument_id, - native_symbol=native_symbol, - base_currency=base_currency, - quote_currency=quote_currency, - price_precision=price_precision, - size_precision=size_precision, - price_increment=price_increment, - size_increment=size_increment, - lot_size=lot_size, - max_quantity=max_quantity, - min_quantity=min_quantity, - max_notional=None, - min_notional=min_notional, - max_price=max_price, - min_price=min_price, - margin_init=Decimal(0), - margin_maint=Decimal(0), - maker_fee=maker_fee, - taker_fee=taker_fee, - ts_event=ts_event, - ts_init=ts_init, - info=msgspec.json.decode(msgspec.json.encode(symbol_info)), - ) - - -def parse_spot_book_snapshot( - instrument_id: InstrumentId, - data: BinanceSpotOrderBookDepthData, - ts_init: int, -) -> OrderBookSnapshot: - return OrderBookSnapshot( - instrument_id=instrument_id, - book_type=BookType.L2_MBP, - bids=[[float(o[0]), float(o[1])] for o in data.bids], - asks=[[float(o[0]), float(o[1])] for o in data.asks], - ts_event=ts_init, - ts_init=ts_init, - sequence=data.lastUpdateId, - ) - - -def parse_spot_trade_tick_ws( - instrument_id: InstrumentId, - data: BinanceSpotTradeData, - ts_init: int, -) -> TradeTick: - return TradeTick( - instrument_id=instrument_id, - price=Price.from_str(data.p), - size=Quantity.from_str(data.q), - aggressor_side=AggressorSide.SELLER if data.m else AggressorSide.BUYER, - trade_id=TradeId(str(data.t)), - ts_event=millis_to_nanos(data.T), - ts_init=ts_init, - ) diff --git a/nautilus_trader/adapters/binance/spot/parsing/execution.py b/nautilus_trader/adapters/binance/spot/parsing/execution.py deleted file mode 100644 index aa2b275222d3..000000000000 --- a/nautilus_trader/adapters/binance/spot/parsing/execution.py +++ /dev/null @@ -1,185 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -from decimal import Decimal -from typing import Any - -from nautilus_trader.adapters.binance.common.enums import BinanceOrderStatus -from nautilus_trader.adapters.binance.spot.enums import BinanceSpotOrderType -from nautilus_trader.core.datetime import millis_to_nanos -from nautilus_trader.core.uuid import UUID4 -from nautilus_trader.execution.reports import OrderStatusReport -from nautilus_trader.execution.reports import TradeReport -from nautilus_trader.model.currency import Currency -from nautilus_trader.model.enums import LiquiditySide -from nautilus_trader.model.enums import OrderSide -from nautilus_trader.model.enums import OrderStatus -from nautilus_trader.model.enums import OrderType -from nautilus_trader.model.enums import TimeInForce -from nautilus_trader.model.enums import TriggerType -from nautilus_trader.model.identifiers import AccountId -from nautilus_trader.model.identifiers import ClientOrderId -from nautilus_trader.model.identifiers import InstrumentId -from nautilus_trader.model.identifiers import TradeId -from nautilus_trader.model.identifiers import VenueOrderId -from nautilus_trader.model.objects import AccountBalance -from nautilus_trader.model.objects import Money -from nautilus_trader.model.objects import Price -from nautilus_trader.model.objects import Quantity -from nautilus_trader.model.orders.base import Order - - -def parse_balances( - raw_balances: list[dict[str, str]], - asset_key: str, - free_key: str, - locked_key: str, -) -> list[AccountBalance]: - parsed_balances: dict[Currency, tuple[Decimal, Decimal, Decimal]] = {} - for b in raw_balances: - currency = Currency.from_str(b[asset_key]) - free = Decimal(b[free_key]) - locked = Decimal(b[locked_key]) - total: Decimal = free + locked - parsed_balances[currency] = (total, locked, free) - - balances: list[AccountBalance] = [ - AccountBalance( - total=Money(values[0], currency), - locked=Money(values[1], currency), - free=Money(values[2], currency), - ) - for currency, values in parsed_balances.items() - ] - - return balances - - -def parse_time_in_force(time_in_force: str) -> TimeInForce: - if time_in_force == "GTX": - return TimeInForce.GTC - else: - return TimeInForce[time_in_force] - - -def parse_order_status(status: BinanceOrderStatus) -> OrderStatus: - if status == BinanceOrderStatus.NEW: - return OrderStatus.ACCEPTED - elif status == BinanceOrderStatus.CANCELED: - return OrderStatus.CANCELED - elif status == BinanceOrderStatus.PARTIALLY_FILLED: - return OrderStatus.PARTIALLY_FILLED - elif status == BinanceOrderStatus.FILLED: - return OrderStatus.FILLED - elif status == BinanceOrderStatus.EXPIRED: - return OrderStatus.EXPIRED - else: - raise RuntimeError( # pragma: no cover (design-time error) - f"unrecognized order status, was {status}", # pragma: no cover - ) - - -def parse_order_type(order_type: BinanceSpotOrderType) -> OrderType: - if order_type == BinanceSpotOrderType.STOP: - return OrderType.STOP_MARKET - elif order_type == BinanceSpotOrderType.STOP_LOSS: - return OrderType.STOP_MARKET - elif order_type == BinanceSpotOrderType.STOP_LOSS_LIMIT: - return OrderType.STOP_LIMIT - elif order_type == BinanceSpotOrderType.TAKE_PROFIT: - return OrderType.LIMIT - elif order_type == BinanceSpotOrderType.TAKE_PROFIT_LIMIT: - return OrderType.STOP_LIMIT - elif order_type == BinanceSpotOrderType.LIMIT_MAKER: - return OrderType.LIMIT - elif order_type == BinanceSpotOrderType.LIMIT: - return OrderType.LIMIT - else: - return OrderType.MARKET - - -def binance_order_type(order: Order) -> BinanceSpotOrderType: - if order.order_type == OrderType.MARKET: - return BinanceSpotOrderType.MARKET - elif order.order_type == OrderType.LIMIT: - if order.is_post_only: - return BinanceSpotOrderType.LIMIT_MAKER - else: - return BinanceSpotOrderType.LIMIT - elif order.order_type == OrderType.STOP_LIMIT: - return BinanceSpotOrderType.STOP_LOSS_LIMIT - elif order.order_type == OrderType.LIMIT_IF_TOUCHED: - return BinanceSpotOrderType.TAKE_PROFIT_LIMIT - else: - raise RuntimeError("invalid `OrderType`") # pragma: no cover (design-time error) # noqa - - -def parse_order_report_http( - account_id: AccountId, - instrument_id: InstrumentId, - data: dict[str, Any], - report_id: UUID4, - ts_init: int, -) -> OrderStatusReport: - client_id_str = data.get("clientOrderId") - order_type = data["type"].upper() - price = data.get("price") - trigger_price = Decimal(data["stopPrice"]) - avg_px = Decimal(data["price"]) - return OrderStatusReport( - account_id=account_id, - instrument_id=instrument_id, - client_order_id=ClientOrderId(client_id_str) if client_id_str is not None else None, - venue_order_id=VenueOrderId(str(data["orderId"])), - order_side=OrderSide[data["side"].upper()], - order_type=parse_order_type(order_type), - time_in_force=parse_time_in_force(data["timeInForce"].upper()), - order_status=parse_order_status(BinanceOrderStatus(data["status"].upper())), - price=Price.from_str(price) if price is not None else None, - quantity=Quantity.from_str(data["origQty"]), - filled_qty=Quantity.from_str(data["executedQty"]), - avg_px=avg_px if avg_px > 0 else None, - post_only=order_type == "LIMIT_MAKER", - reduce_only=False, - report_id=report_id, - ts_accepted=millis_to_nanos(data["time"]), - ts_last=millis_to_nanos(data["updateTime"]), - ts_init=ts_init, - trigger_price=Price.from_str(str(trigger_price)) if trigger_price > 0 else None, - trigger_type=TriggerType.LAST_TRADE if trigger_price > 0 else TriggerType.NO_TRIGGER, - ) - - -def parse_trade_report_http( - account_id: AccountId, - instrument_id: InstrumentId, - data: dict[str, Any], - report_id: UUID4, - ts_init: int, -) -> TradeReport: - return TradeReport( - account_id=account_id, - instrument_id=instrument_id, - venue_order_id=VenueOrderId(str(data["orderId"])), - trade_id=TradeId(str(data["id"])), - order_side=OrderSide.BUY if data["isBuyer"] else OrderSide.SELL, - last_qty=Quantity.from_str(data["qty"]), - last_px=Price.from_str(data["price"]), - commission=Money(data["commission"], Currency.from_str(data["commissionAsset"])), - liquidity_side=LiquiditySide.MAKER if data["isMaker"] else LiquiditySide.TAKER, - report_id=report_id, - ts_event=millis_to_nanos(data["time"]), - ts_init=ts_init, - ) diff --git a/nautilus_trader/adapters/binance/spot/providers.py b/nautilus_trader/adapters/binance/spot/providers.py index 3326dd9bf1a6..a7e50d34f5c0 100644 --- a/nautilus_trader/adapters/binance/spot/providers.py +++ b/nautilus_trader/adapters/binance/spot/providers.py @@ -13,25 +13,38 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -import time +from decimal import Decimal from typing import Optional +import msgspec + from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.enums import BinanceSymbolFilterType +from nautilus_trader.adapters.binance.common.schemas.market import BinanceSymbolFilter +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol from nautilus_trader.adapters.binance.http.client import BinanceHttpClient from nautilus_trader.adapters.binance.http.error import BinanceClientError from nautilus_trader.adapters.binance.spot.http.market import BinanceSpotMarketHttpAPI from nautilus_trader.adapters.binance.spot.http.wallet import BinanceSpotWalletHttpAPI -from nautilus_trader.adapters.binance.spot.parsing.data import parse_spot_instrument_http -from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotExchangeInfo from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotSymbolInfo -from nautilus_trader.adapters.binance.spot.schemas.wallet import BinanceSpotTradeFees +from nautilus_trader.adapters.binance.spot.schemas.wallet import BinanceSpotTradeFee +from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.logging import Logger from nautilus_trader.common.providers import InstrumentProvider from nautilus_trader.config import InstrumentProviderConfig from nautilus_trader.core.correctness import PyCondition from nautilus_trader.core.datetime import millis_to_nanos from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import Symbol +from nautilus_trader.model.instruments.currency_pair import CurrencyPair +from nautilus_trader.model.objects import PRICE_MAX +from nautilus_trader.model.objects import PRICE_MIN +from nautilus_trader.model.objects import QUANTITY_MAX +from nautilus_trader.model.objects import QUANTITY_MIN +from nautilus_trader.model.objects import Money +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity class BinanceSpotInstrumentProvider(InstrumentProvider): @@ -52,6 +65,7 @@ def __init__( self, client: BinanceHttpClient, logger: Logger, + clock: LiveClock, account_type: BinanceAccountType = BinanceAccountType.SPOT, config: Optional[InstrumentProviderConfig] = None, ): @@ -63,36 +77,47 @@ def __init__( self._client = client self._account_type = account_type + self._clock = clock - self._http_wallet = BinanceSpotWalletHttpAPI(self._client) - self._http_market = BinanceSpotMarketHttpAPI(self._client) + self._http_wallet = BinanceSpotWalletHttpAPI( + self._client, + clock=self._clock, + account_type=account_type, + ) + self._http_market = BinanceSpotMarketHttpAPI(self._client, account_type=account_type) self._log_warnings = config.log_warnings if config else True + self._decoder = msgspec.json.Decoder() + self._encoder = msgspec.json.Encoder() + async def load_all_async(self, filters: Optional[dict] = None) -> None: filters_str = "..." if not filters else f" with filters {filters}..." self._log.info(f"Loading all instruments{filters_str}") # Get current commission rates - if self._client.base_url.__contains__("testnet.binance.vision"): - fees: dict[str, BinanceSpotTradeFees] = {} - else: - try: - fee_res: list[BinanceSpotTradeFees] = await self._http_wallet.trade_fees() - fees = {s.symbol: s for s in fee_res} - except BinanceClientError as e: - self._log.error( - "Cannot load instruments: API key authentication failed " - f"(this is needed to fetch the applicable account fee tier). {e.message}", - ) - return + try: + # response = await self._http_wallet.query_spot_trade_fees() + # fees_dict: dict[str, BinanceSpotTradeFee] = {fee.symbol: fee for fee in response} + # TODO: Requests for testnet seem to fail auth + self._log.warning( + "Currently not requesting actual trade fees. " + "All instruments will have zero fees.", + ) + fees_dict: dict[str, BinanceSpotTradeFee] = {} + except BinanceClientError as e: + self._log.error( + "Cannot load instruments: API key authentication failed " + f"(this is needed to fetch the applicable account fee tier). {e.message}", + ) + return # Get exchange info for all assets - exchange_info: BinanceSpotExchangeInfo = await self._http_market.exchange_info() + exchange_info = await self._http_market.query_spot_exchange_info() for symbol_info in exchange_info.symbols: self._parse_instrument( symbol_info=symbol_info, - fees=fees.get(symbol_info.symbol), + fee=fees_dict.get(symbol_info.symbol), ts_event=millis_to_nanos(exchange_info.serverTime), ) @@ -114,8 +139,14 @@ async def load_ids_async( # Get current commission rates try: - fee_res: list[BinanceSpotTradeFees] = await self._http_wallet.trade_fees() - fees: dict[str, BinanceSpotTradeFees] = {s.symbol: s for s in fee_res} + # response = await self._http_wallet.query_spot_trade_fees() + # fees_dict: dict[str, BinanceSpotTradeFee] = {fee.symbol: fee for fee in response} + # TODO: Requests for testnet seem to fail auth + self._log.warning( + "Currently not requesting actual trade fees. " + "All instruments will have zero fees.", + ) + fees_dict: dict[str, BinanceSpotTradeFee] = {} except BinanceClientError as e: self._log.error( "Cannot load instruments: API key authentication failed " @@ -124,16 +155,19 @@ async def load_ids_async( return # Extract all symbol strings - symbols: list[str] = [instrument_id.symbol.value for instrument_id in instrument_ids] - + symbols = [ + str(BinanceSymbol(instrument_id.symbol.value)) for instrument_id in instrument_ids + ] # Get exchange info for all assets - exchange_info: BinanceSpotExchangeInfo = await self._http_market.exchange_info( - symbols=symbols, - ) - for symbol_info in exchange_info.symbols: + exchange_info = await self._http_market.query_spot_exchange_info(symbols=symbols) + symbol_info_dict: dict[str, BinanceSpotSymbolInfo] = { + info.symbol: info for info in exchange_info.symbols + } + + for symbol in symbols: self._parse_instrument( - symbol_info=symbol_info, - fees=fees[symbol_info.symbol], + symbol_info=symbol_info_dict[symbol], + fee=fees_dict[symbol], ts_event=millis_to_nanos(exchange_info.serverTime), ) @@ -144,13 +178,18 @@ async def load_async(self, instrument_id: InstrumentId, filters: Optional[dict] filters_str = "..." if not filters else f" with filters {filters}..." self._log.debug(f"Loading instrument {instrument_id}{filters_str}.") - symbol = instrument_id.symbol.value + symbol = str(BinanceSymbol(instrument_id.symbol.value)) # Get current commission rates try: - fees: BinanceSpotTradeFees = await self._http_wallet.trade_fee( - symbol=instrument_id.symbol.value, + # trade_fees = await self._http_wallet.query_spot_trade_fees(symbol=symbol) + # fees_dict: dict[str, BinanceSpotTradeFee] = {fee.symbol: fee for fee in trade_fees} + # TODO: Requests for testnet seem to fail auth + self._log.warning( + "Currently not requesting actual trade fees. " + "All instruments will have zero fees.", ) + fees_dict: dict[str, BinanceSpotTradeFee] = {} except BinanceClientError as e: self._log.error( "Cannot load instruments: API key authentication failed " @@ -159,29 +198,108 @@ async def load_async(self, instrument_id: InstrumentId, filters: Optional[dict] return # Get exchange info for asset - exchange_info: BinanceSpotExchangeInfo = await self._http_market.exchange_info( - symbol=symbol, + exchange_info = await self._http_market.query_spot_exchange_info(symbol=symbol) + symbol_info_dict: dict[str, BinanceSpotSymbolInfo] = { + info.symbol: info for info in exchange_info.symbols + } + + self._parse_instrument( + symbol_info=symbol_info_dict[symbol], + fee=fees_dict[symbol], + ts_event=millis_to_nanos(exchange_info.serverTime), ) - for symbol_info in exchange_info.symbols: - self._parse_instrument( - symbol_info=symbol_info, - fees=fees, - ts_event=millis_to_nanos(exchange_info.serverTime), - ) def _parse_instrument( self, symbol_info: BinanceSpotSymbolInfo, - fees: Optional[BinanceSpotTradeFees], + fee: Optional[BinanceSpotTradeFee], ts_event: int, ) -> None: - ts_init = time.time_ns() + ts_init = self._clock.timestamp_ns() try: - instrument = parse_spot_instrument_http( - symbol_info=symbol_info, - fees=fees, + base_currency = symbol_info.parse_to_base_asset() + quote_currency = symbol_info.parse_to_quote_asset() + + native_symbol = Symbol(symbol_info.symbol) + instrument_id = InstrumentId(symbol=native_symbol, venue=BINANCE_VENUE) + + # Parse instrument filters + filters: dict[BinanceSymbolFilterType, BinanceSymbolFilter] = { + f.filterType: f for f in symbol_info.filters + } + price_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.PRICE_FILTER) + lot_size_filter: BinanceSymbolFilter = filters.get(BinanceSymbolFilterType.LOT_SIZE) + min_notional_filter: BinanceSymbolFilter = filters.get( + BinanceSymbolFilterType.MIN_NOTIONAL, + ) + # market_lot_size_filter = symbol_filters.get("MARKET_LOT_SIZE") + + tick_size = price_filter.tickSize.rstrip("0") + step_size = lot_size_filter.stepSize.rstrip("0") + PyCondition.in_range(float(tick_size), PRICE_MIN, PRICE_MAX, "tick_size") + PyCondition.in_range(float(step_size), QUANTITY_MIN, QUANTITY_MAX, "step_size") + + price_precision = abs(int(Decimal(tick_size).as_tuple().exponent)) + size_precision = abs(int(Decimal(step_size).as_tuple().exponent)) + price_increment = Price.from_str(tick_size) + size_increment = Quantity.from_str(step_size) + lot_size = Quantity.from_str(step_size) + + PyCondition.in_range( + float(lot_size_filter.maxQty), + QUANTITY_MIN, + QUANTITY_MAX, + "maxQty", + ) + PyCondition.in_range( + float(lot_size_filter.minQty), + QUANTITY_MIN, + QUANTITY_MAX, + "minQty", + ) + max_quantity = Quantity(float(lot_size_filter.maxQty), precision=size_precision) + min_quantity = Quantity(float(lot_size_filter.minQty), precision=size_precision) + min_notional = None + if filters.get(BinanceSymbolFilterType.MIN_NOTIONAL): + min_notional = Money(min_notional_filter.minNotional, currency=quote_currency) + max_price = Price( + min(float(price_filter.maxPrice), 4294967296.0), + precision=price_precision, + ) + min_price = Price(max(float(price_filter.minPrice), 0.0), precision=price_precision) + + # Parse fees + maker_fee: Decimal = Decimal(0) + taker_fee: Decimal = Decimal(0) + if fee: + assert fee.symbol == symbol_info.symbol + maker_fee = Decimal(fee.makerCommission) + taker_fee = Decimal(fee.takerCommission) + + # Create instrument + instrument = CurrencyPair( + instrument_id=instrument_id, + native_symbol=native_symbol, + base_currency=base_currency, + quote_currency=quote_currency, + price_precision=price_precision, + size_precision=size_precision, + price_increment=price_increment, + size_increment=size_increment, + lot_size=lot_size, + max_quantity=max_quantity, + min_quantity=min_quantity, + max_notional=None, + min_notional=min_notional, + max_price=max_price, + min_price=min_price, + margin_init=Decimal(0), + margin_maint=Decimal(0), + maker_fee=maker_fee, + taker_fee=taker_fee, ts_event=min(ts_event, ts_init), ts_init=ts_init, + info=self._decoder.decode(self._encoder.encode(symbol_info)), ) self.add_currency(currency=instrument.base_currency) self.add_currency(currency=instrument.quote_currency) diff --git a/nautilus_trader/adapters/binance/spot/schemas/account.py b/nautilus_trader/adapters/binance/spot/schemas/account.py index 7118ff191fbb..59c3c040653f 100644 --- a/nautilus_trader/adapters/binance/spot/schemas/account.py +++ b/nautilus_trader/adapters/binance/spot/schemas/account.py @@ -13,9 +13,16 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +from decimal import Decimal +from typing import Optional + import msgspec from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.common.schemas.account import BinanceOrder +from nautilus_trader.model.currency import Currency +from nautilus_trader.model.objects import AccountBalance +from nautilus_trader.model.objects import Money ################################################################################ @@ -23,7 +30,7 @@ ################################################################################ -class BinanceSpotBalanceInfo(msgspec.Struct): +class BinanceSpotBalanceInfo(msgspec.Struct, frozen=True): """ HTTP response 'inner struct' from `Binance Spot/Margin` GET /api/v3/account (HMAC SHA256). """ @@ -32,8 +39,19 @@ class BinanceSpotBalanceInfo(msgspec.Struct): free: str locked: str + def parse_to_account_balance(self) -> AccountBalance: + currency = Currency.from_str(self.asset) + free = Decimal(self.free) + locked = Decimal(self.locked) + total: Decimal = free + locked + return AccountBalance( + total=Money(total, currency), + locked=Money(locked, currency), + free=Money(free, currency), + ) + -class BinanceSpotAccountInfo(msgspec.Struct): +class BinanceSpotAccountInfo(msgspec.Struct, frozen=True): """ HTTP response from `Binance Spot/Margin` GET /api/v3/account (HMAC SHA256). """ @@ -49,3 +67,24 @@ class BinanceSpotAccountInfo(msgspec.Struct): accountType: BinanceAccountType balances: list[BinanceSpotBalanceInfo] permissions: list[str] + + def parse_to_account_balances(self) -> list[AccountBalance]: + return [balance.parse_to_account_balance() for balance in self.balances] + + +class BinanceSpotOrderOco(msgspec.Struct, frozen=True): + """ + HTTP response from `Binance Spot/Margin` GET /api/v3/orderList (HMAC SHA256). + HTTP response from `Binance Spot/Margin` POST /api/v3/order/oco (HMAC SHA256). + HTTP response from `Binance Spot/Margin` DELETE /api/v3/orderList (HMAC SHA256). + """ + + orderListId: int + contingencyType: str + listStatusType: str + listOrderStatus: str + listClientOrderId: str + transactionTime: int + symbol: str + orders: Optional[list[BinanceOrder]] = None # Included for ACK response type + orderReports: Optional[list[BinanceOrder]] = None # Included for FULL & RESPONSE types diff --git a/nautilus_trader/adapters/binance/spot/schemas/market.py b/nautilus_trader/adapters/binance/spot/schemas/market.py index b35fa3e60f72..17e2f96b8cd9 100644 --- a/nautilus_trader/adapters/binance/spot/schemas/market.py +++ b/nautilus_trader/adapters/binance/spot/schemas/market.py @@ -13,16 +13,26 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from typing import Optional import msgspec -from nautilus_trader.adapters.binance.common.enums import BinanceExchangeFilterType -from nautilus_trader.adapters.binance.common.enums import BinanceRateLimitInterval -from nautilus_trader.adapters.binance.common.enums import BinanceRateLimitType -from nautilus_trader.adapters.binance.common.enums import BinanceSymbolFilterType -from nautilus_trader.adapters.binance.spot.enums import BinanceSpotOrderType +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.adapters.binance.common.schemas.market import BinanceExchangeFilter +from nautilus_trader.adapters.binance.common.schemas.market import BinanceOrderBookDelta +from nautilus_trader.adapters.binance.common.schemas.market import BinanceRateLimit +from nautilus_trader.adapters.binance.common.schemas.market import BinanceSymbolFilter from nautilus_trader.adapters.binance.spot.enums import BinanceSpotPermissions +from nautilus_trader.core.datetime import millis_to_nanos +from nautilus_trader.model.currency import Currency +from nautilus_trader.model.data.tick import TradeTick +from nautilus_trader.model.enums import AggressorSide +from nautilus_trader.model.enums import BookType +from nautilus_trader.model.enums import CurrencyType +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import TradeId +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity +from nautilus_trader.model.orderbook.data import OrderBookSnapshot ################################################################################ @@ -30,51 +40,8 @@ ################################################################################ -class BinanceExchangeFilter(msgspec.Struct): - """HTTP response 'inner struct' from `Binance Spot/Margin` GET /fapi/v1/exchangeInfo.""" - - filterType: BinanceExchangeFilterType - maxNumOrders: Optional[int] = None - maxNumAlgoOrders: Optional[int] = None - - -class BinanceSymbolFilter(msgspec.Struct): - """HTTP response 'inner struct' from `Binance Spot/Margin` GET /fapi/v1/exchangeInfo.""" - - filterType: BinanceSymbolFilterType - minPrice: Optional[str] = None - maxPrice: Optional[str] = None - tickSize: Optional[str] = None - multiplierUp: Optional[str] = None - multiplierDown: Optional[str] = None - avgPriceMins: Optional[int] = None - bidMultiplierUp: Optional[str] = None - bidMultiplierDown: Optional[str] = None - askMultiplierUp: Optional[str] = None - askMultiplierDown: Optional[str] = None - minQty: Optional[str] = None - maxQty: Optional[str] = None - stepSize: Optional[str] = None - minNotional: Optional[str] = None - applyToMarket: Optional[bool] = None - limit: Optional[int] = None - maxNumOrders: Optional[int] = None - maxNumAlgoOrders: Optional[int] = None - maxNumIcebergOrders: Optional[int] = None - maxPosition: Optional[str] = None - - -class BinanceRateLimit(msgspec.Struct): - """HTTP response 'inner struct' from `Binance Spot/Margin` GET /fapi/v1/exchangeInfo.""" - - rateLimitType: BinanceRateLimitType - interval: BinanceRateLimitInterval - intervalNum: int - limit: int - - -class BinanceSpotSymbolInfo(msgspec.Struct): - """HTTP response 'inner struct' from `Binance Spot/Margin` GET /fapi/v1/exchangeInfo.""" +class BinanceSpotSymbolInfo(msgspec.Struct, frozen=True): + """HTTP response 'inner struct' from `Binance Spot/Margin` GET /api/v3/exchangeInfo.""" symbol: str status: str @@ -83,7 +50,7 @@ class BinanceSpotSymbolInfo(msgspec.Struct): quoteAsset: str quotePrecision: int quoteAssetPrecision: int - orderTypes: list[BinanceSpotOrderType] + orderTypes: list[BinanceOrderType] icebergAllowed: bool ocoAllowed: bool quoteOrderQtyMarketAllowed: bool @@ -93,9 +60,27 @@ class BinanceSpotSymbolInfo(msgspec.Struct): filters: list[BinanceSymbolFilter] permissions: list[BinanceSpotPermissions] + def parse_to_base_asset(self): + return Currency( + code=self.baseAsset, + precision=self.baseAssetPrecision, + iso4217=0, # Currently undetermined for crypto assets + name=self.baseAsset, + currency_type=CurrencyType.CRYPTO, + ) + + def parse_to_quote_asset(self): + return Currency( + code=self.baseAsset, + precision=self.baseAssetPrecision, + iso4217=0, # Currently undetermined for crypto assets + name=self.baseAsset, + currency_type=CurrencyType.CRYPTO, + ) -class BinanceSpotExchangeInfo(msgspec.Struct): - """HTTP response from `Binance Spot/Margin` GET /fapi/v1/exchangeInfo.""" + +class BinanceSpotExchangeInfo(msgspec.Struct, frozen=True): + """HTTP response from `Binance Spot/Margin` GET /api/v3/exchangeInfo.""" timezone: str serverTime: int @@ -104,12 +89,11 @@ class BinanceSpotExchangeInfo(msgspec.Struct): symbols: list[BinanceSpotSymbolInfo] -class BinanceSpotOrderBookDepthData(msgspec.Struct): - """HTTP response from `Binance` GET /fapi/v1/depth.""" +class BinanceSpotAvgPrice(msgspec.Struct, frozen=True): + """HTTP response from `Binance Spot/Margin` GET /api/v3/avgPrice.""" - lastUpdateId: int - bids: list[tuple[str, str]] - asks: list[tuple[str, str]] + mins: int + price: str ################################################################################ @@ -117,11 +101,34 @@ class BinanceSpotOrderBookDepthData(msgspec.Struct): ################################################################################ -class BinanceSpotOrderBookMsg(msgspec.Struct): - """WebSocket message.""" +class BinanceSpotOrderBookPartialDepthData(msgspec.Struct): + """Websocket message 'inner struct' for 'Binance Spot/Margin Partial Book Depth Streams.'""" + + lastUpdateId: int + bids: list[BinanceOrderBookDelta] + asks: list[BinanceOrderBookDelta] + + def parse_to_order_book_snapshot( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> OrderBookSnapshot: + return OrderBookSnapshot( + instrument_id=instrument_id, + book_type=BookType.L2_MBP, + bids=[[float(o.price), float(o.size)] for o in self.bids], + asks=[[float(o.price), float(o.size)] for o in self.asks], + ts_event=ts_init, + ts_init=ts_init, + sequence=self.lastUpdateId, + ) + + +class BinanceSpotOrderBookPartialDepthMsg(msgspec.Struct): + """WebSocket message for 'Binance Spot/Margin' Partial Book Depth Streams.""" stream: str - data: BinanceSpotOrderBookDepthData + data: BinanceSpotOrderBookPartialDepthData class BinanceSpotTradeData(msgspec.Struct): @@ -153,6 +160,21 @@ class BinanceSpotTradeData(msgspec.Struct): T: int # Trade time m: bool # Is the buyer the market maker? + def parse_to_trade_tick( + self, + instrument_id: InstrumentId, + ts_init: int, + ) -> TradeTick: + return TradeTick( + instrument_id=instrument_id, + price=Price.from_str(self.p), + size=Quantity.from_str(self.q), + aggressor_side=AggressorSide.SELLER if self.m else AggressorSide.BUYER, + trade_id=TradeId(str(self.t)), + ts_event=millis_to_nanos(self.T), + ts_init=ts_init, + ) + class BinanceSpotTradeMsg(msgspec.Struct): """WebSocket message from `Binance` Trade Streams.""" diff --git a/nautilus_trader/adapters/binance/spot/schemas/user.py b/nautilus_trader/adapters/binance/spot/schemas/user.py index c9ae88b0b3b0..e84656fd95f0 100644 --- a/nautilus_trader/adapters/binance/spot/schemas/user.py +++ b/nautilus_trader/adapters/binance/spot/schemas/user.py @@ -13,16 +13,37 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- +from decimal import Decimal from typing import Optional import msgspec +from nautilus_trader.adapters.binance.common.enums import BinanceEnumParser from nautilus_trader.adapters.binance.common.enums import BinanceExecutionType from nautilus_trader.adapters.binance.common.enums import BinanceOrderSide from nautilus_trader.adapters.binance.common.enums import BinanceOrderStatus +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.adapters.binance.common.enums import BinanceTimeInForce +from nautilus_trader.adapters.binance.common.execution import BinanceCommonExecutionClient from nautilus_trader.adapters.binance.spot.enums import BinanceSpotEventType -from nautilus_trader.adapters.binance.spot.enums import BinanceSpotOrderType -from nautilus_trader.adapters.binance.spot.enums import BinanceSpotTimeInForce +from nautilus_trader.core.datetime import millis_to_nanos +from nautilus_trader.core.uuid import UUID4 +from nautilus_trader.execution.reports import OrderStatusReport +from nautilus_trader.model.currency import Currency +from nautilus_trader.model.enums import LiquiditySide +from nautilus_trader.model.enums import OrderSide +from nautilus_trader.model.enums import OrderStatus +from nautilus_trader.model.enums import TrailingOffsetType +from nautilus_trader.model.enums import TriggerType +from nautilus_trader.model.identifiers import AccountId +from nautilus_trader.model.identifiers import ClientOrderId +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import TradeId +from nautilus_trader.model.identifiers import VenueOrderId +from nautilus_trader.model.objects import AccountBalance +from nautilus_trader.model.objects import Money +from nautilus_trader.model.objects import Price +from nautilus_trader.model.objects import Quantity ################################################################################ @@ -30,7 +51,7 @@ ################################################################################ -class BinanceSpotUserMsgData(msgspec.Struct): +class BinanceSpotUserMsgData(msgspec.Struct, frozen=True): """ Inner struct for execution WebSocket messages from `Binance` """ @@ -38,7 +59,7 @@ class BinanceSpotUserMsgData(msgspec.Struct): e: BinanceSpotEventType -class BinanceSpotUserMsgWrapper(msgspec.Struct): +class BinanceSpotUserMsgWrapper(msgspec.Struct, frozen=True): """ Provides a wrapper for execution WebSocket messages from `Binance`. """ @@ -47,15 +68,26 @@ class BinanceSpotUserMsgWrapper(msgspec.Struct): data: BinanceSpotUserMsgData -class BinanceSpotBalance(msgspec.Struct): +class BinanceSpotBalance(msgspec.Struct, frozen=True): """Inner struct for `Binance Spot/Margin` balances.""" a: str # Asset f: str # Free l: str # Locked + def parse_to_account_balance(self) -> AccountBalance: + currency = Currency.from_str(self.a) + free = Decimal(self.f) + locked = Decimal(self.l) + total: Decimal = free + locked + return AccountBalance( + total=Money(total, currency), + locked=Money(locked, currency), + free=Money(free, currency), + ) -class BinanceSpotAccountUpdateMsg(msgspec.Struct): + +class BinanceSpotAccountUpdateMsg(msgspec.Struct, frozen=True): """WebSocket message for `Binance Spot/Margin` Account Update events.""" e: str # Event Type @@ -63,8 +95,20 @@ class BinanceSpotAccountUpdateMsg(msgspec.Struct): u: int # Transaction Time B: list[BinanceSpotBalance] + def parse_to_account_balances(self) -> list[AccountBalance]: + return [balance.parse_to_account_balance() for balance in self.B] + + def handle_account_update(self, exec_client: BinanceCommonExecutionClient): + """Handle BinanceSpotAccountUpdateMsg as payload of outboundAccountPosition.""" + exec_client.generate_account_state( + balances=self.parse_to_account_balances(), + margins=[], + reported=True, + ts_event=millis_to_nanos(self.u), + ) -class BinanceSpotAccountUpdateWrapper(msgspec.Struct): + +class BinanceSpotAccountUpdateWrapper(msgspec.Struct, frozen=True): """WebSocket message wrapper for `Binance Spot/Margin` Account Update events.""" stream: str @@ -82,8 +126,8 @@ class BinanceSpotOrderUpdateData(msgspec.Struct, kw_only=True): s: str # Symbol c: str # Client order ID S: BinanceOrderSide - o: BinanceSpotOrderType - f: BinanceSpotTimeInForce + o: BinanceOrderType + f: BinanceTimeInForce q: str # Original Quantity p: str # Original Price P: str # Stop price @@ -110,8 +154,136 @@ class BinanceSpotOrderUpdateData(msgspec.Struct, kw_only=True): Y: str # Last quote asset transacted quantity (i.e. lastPrice * lastQty) Q: str # Quote Order Qty + def parse_to_order_status_report( + self, + account_id: AccountId, + instrument_id: InstrumentId, + client_order_id: ClientOrderId, + venue_order_id: VenueOrderId, + ts_event: int, + ts_init: int, + enum_parser: BinanceEnumParser, + ) -> OrderStatusReport: + price = Price.from_str(self.p) if self.p is not None else None + trigger_price = Price.from_str(self.P) if self.P is not None else None + order_side = OrderSide.BUY if self.S == BinanceOrderSide.BUY else OrderSide.SELL + post_only = self.f == BinanceTimeInForce.GTX + display_qty = ( + Quantity.from_str( + str(Decimal(self.q) - Decimal(self.F)), + ) + if self.F is not None + else None + ) + + return OrderStatusReport( + account_id=account_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + order_side=order_side, + order_type=enum_parser.parse_binance_order_type(self.o), + time_in_force=enum_parser.parse_binance_time_in_force(self.f), + order_status=OrderStatus.ACCEPTED, + price=price, + trigger_price=trigger_price, + trigger_type=TriggerType.LAST_TRADE, + trailing_offset=None, + trailing_offset_type=TrailingOffsetType.NO_TRAILING_OFFSET, + quantity=Quantity.from_str(self.q), + filled_qty=Quantity.from_str(self.z), + display_qty=display_qty, + avg_px=None, + post_only=post_only, + reduce_only=False, + report_id=UUID4(), + ts_accepted=ts_event, + ts_last=ts_event, + ts_init=ts_init, + ) + + def handle_execution_report( + self, + exec_client: BinanceCommonExecutionClient, + ): + """Handle BinanceSpotOrderUpdateData as payload of executionReport event.""" + client_order_id_str: str = self.c + if not client_order_id_str or not client_order_id_str.startswith("O"): + client_order_id_str = self.C + client_order_id = ClientOrderId(client_order_id_str) + ts_event = millis_to_nanos(self.T) + venue_order_id = VenueOrderId(str(self.i)) + instrument_id = exec_client._get_cached_instrument_id(self.s) + strategy_id = exec_client._cache.strategy_id_for_order(client_order_id) + if strategy_id is None: + report = self.parse_to_order_status_report( + account_id=exec_client.account_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ts_init=exec_client._clock.timestamp_ns(), + enum_parser=exec_client._enum_parser, + ) + exec_client._send_order_status_report(report) + elif self.x == BinanceExecutionType.NEW: + exec_client.generate_order_accepted( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ) + elif self.x == BinanceExecutionType.TRADE: + instrument = exec_client._instrument_provider.find(instrument_id=instrument_id) + + # Determine commission + commission_asset: str = self.N + commission_amount: str = self.n + if commission_asset is not None: + commission = Money.from_str(f"{commission_amount} {commission_asset}") + else: + # Binance typically charges commission as base asset or BNB + commission = Money(0, instrument.base_currency) + + exec_client.generate_order_filled( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + venue_position_id=None, # NETTING accounts + trade_id=TradeId(str(self.t)), # Trade ID + order_side=exec_client._enum_parser.parse_binance_order_side(self.S), + order_type=exec_client._enum_parser.parse_binance_order_type(self.o), + last_qty=Quantity.from_str(self.l), + last_px=Price.from_str(self.L), + quote_currency=instrument.quote_currency, + commission=commission, + liquidity_side=LiquiditySide.MAKER if self.m else LiquiditySide.TAKER, + ts_event=ts_event, + ) + elif self.x == BinanceExecutionType.CANCELED: + exec_client.generate_order_canceled( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ) + elif self.x == BinanceExecutionType.EXPIRED: + exec_client.generate_order_expired( + strategy_id=strategy_id, + instrument_id=instrument_id, + client_order_id=client_order_id, + venue_order_id=venue_order_id, + ts_event=ts_event, + ) + else: + # Event not handled + exec_client._log.warning(f"Received unhandled {self}") + -class BinanceSpotOrderUpdateWrapper(msgspec.Struct): +class BinanceSpotOrderUpdateWrapper(msgspec.Struct, frozen=True): """WebSocket message wrapper for `Binance Spot/Margin` Order Update events.""" stream: str diff --git a/nautilus_trader/adapters/binance/spot/schemas/wallet.py b/nautilus_trader/adapters/binance/spot/schemas/wallet.py index 8b37aa765090..08efafd73481 100644 --- a/nautilus_trader/adapters/binance/spot/schemas/wallet.py +++ b/nautilus_trader/adapters/binance/spot/schemas/wallet.py @@ -21,8 +21,8 @@ ################################################################################ -class BinanceSpotTradeFees(msgspec.Struct): - """HTTP response from `Binance Spot/Margin` GET /sapi/v1/asset/tradeFee (HMAC SHA256).""" +class BinanceSpotTradeFee(msgspec.Struct, frozen=True): + """Schema of a single `Binance Spot/Margin` tradeFee.""" symbol: str makerCommission: str diff --git a/nautilus_trader/adapters/binance/websocket/client.py b/nautilus_trader/adapters/binance/websocket/client.py index 58f7ca23d85a..c780223fb58e 100644 --- a/nautilus_trader/adapters/binance/websocket/client.py +++ b/nautilus_trader/adapters/binance/websocket/client.py @@ -16,7 +16,7 @@ import asyncio from typing import Callable, Optional -from nautilus_trader.adapters.binance.common.functions import format_symbol +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.logging import Logger from nautilus_trader.network.websocket import WebSocketClient @@ -104,7 +104,7 @@ def subscribe_agg_trades(self, symbol: str): Update Speed: Real-time """ - self._add_stream(f"{format_symbol(symbol).lower()}@aggTrade") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@aggTrade") def subscribe_trades(self, symbol: str): """ @@ -115,7 +115,7 @@ def subscribe_trades(self, symbol: str): Update Speed: Real-time """ - self._add_stream(f"{format_symbol(symbol).lower()}@trade") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@trade") def subscribe_bars(self, symbol: str, interval: str): """ @@ -143,7 +143,7 @@ def subscribe_bars(self, symbol: str, interval: str): Update Speed: 2000ms """ - self._add_stream(f"{format_symbol(symbol).lower()}@kline_{interval}") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@kline_{interval}") def subscribe_mini_ticker(self, symbol: str = None): """ @@ -159,7 +159,7 @@ def subscribe_mini_ticker(self, symbol: str = None): if symbol is None: self._add_stream("!miniTicker@arr") else: - self._add_stream(f"{format_symbol(symbol).lower()}@miniTicker") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@miniTicker") def subscribe_ticker(self, symbol: str = None): """ @@ -175,7 +175,7 @@ def subscribe_ticker(self, symbol: str = None): if symbol is None: self._add_stream("!ticker@arr") else: - self._add_stream(f"{format_symbol(symbol).lower()}@ticker") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@ticker") def subscribe_book_ticker(self, symbol: str = None): """ @@ -190,7 +190,7 @@ def subscribe_book_ticker(self, symbol: str = None): if symbol is None: self._add_stream("!bookTicker") else: - self._add_stream(f"{format_symbol(symbol).lower()}@bookTicker") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@bookTicker") def subscribe_partial_book_depth(self, symbol: str, depth: int, speed: int): """ @@ -201,7 +201,7 @@ def subscribe_partial_book_depth(self, symbol: str, depth: int, speed: int): Update Speed: 1000ms or 100ms """ - self._add_stream(f"{format_symbol(symbol).lower()}@depth{depth}@{speed}ms") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@depth{depth}@{speed}ms") def subscribe_diff_book_depth(self, symbol: str, speed: int): """ @@ -212,7 +212,7 @@ def subscribe_diff_book_depth(self, symbol: str, speed: int): Order book price and quantity depth updates used to locally manage an order book. """ - self._add_stream(f"{format_symbol(symbol).lower()}@depth@{speed}ms") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@depth@{speed}ms") def subscribe_mark_price(self, symbol: str = None, speed: int = None): """ @@ -227,4 +227,4 @@ def subscribe_mark_price(self, symbol: str = None, speed: int = None): if symbol is None: self._add_stream("!markPrice@arr") else: - self._add_stream(f"{format_symbol(symbol).lower()}@markPrice@{int(speed / 1000)}s") + self._add_stream(f"{BinanceSymbol(symbol).lower()}@markPrice@{int(speed / 1000)}s") diff --git a/nautilus_trader/adapters/interactive_brokers/config.py b/nautilus_trader/adapters/interactive_brokers/config.py index 057220d13a4f..bdb55a668860 100644 --- a/nautilus_trader/adapters/interactive_brokers/config.py +++ b/nautilus_trader/adapters/interactive_brokers/config.py @@ -35,7 +35,7 @@ class InteractiveBrokersDataClientConfig(LiveDataClientConfig): The Interactive Brokers account id. If ``None`` then will source the `TWS_ACCOUNT`. trading_mode: str - paper or live + paper or live. account_id : str, optional The account_id to use for Nautilus. gateway_host : str, optional diff --git a/nautilus_trader/adapters/interactive_brokers/data.py b/nautilus_trader/adapters/interactive_brokers/data.py index 6d952d5d151f..d38a712ca7e8 100644 --- a/nautilus_trader/adapters/interactive_brokers/data.py +++ b/nautilus_trader/adapters/interactive_brokers/data.py @@ -14,6 +14,7 @@ # ------------------------------------------------------------------------------------------------- import asyncio +from collections import defaultdict from functools import partial from typing import Callable, Optional @@ -39,7 +40,6 @@ from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.logging import Logger -from nautilus_trader.common.logging import defaultdict from nautilus_trader.core.datetime import dt_to_unix_nanos from nautilus_trader.live.data_client import LiveMarketDataClient from nautilus_trader.model.data.bar import Bar @@ -60,6 +60,25 @@ class InteractiveBrokersDataClient(LiveMarketDataClient): """ Provides a data client for the InteractiveBrokers exchange. + + Parameters + ---------- + loop : asyncio.AbstractEventLoop + The event loop for the client. + client : IB + The ib_insync IB client. + msgbus : MessageBus + The message bus for the client. + cache : Cache + The cache for the client. + clock : LiveClock + The clock for the client. + logger : Logger + The logger for the client. + instrument_provider : InteractiveBrokersInstrumentProvider + The instrument provider. + handle_revised_bars : bool + If DataClient will emit bar updates as soon new bar opens. """ def __init__( @@ -73,29 +92,6 @@ def __init__( instrument_provider: InteractiveBrokersInstrumentProvider, handle_revised_bars: bool, ): - """ - Initialize a new instance of the ``InteractiveBrokersDataClient`` class. - - Parameters - ---------- - loop : asyncio.AbstractEventLoop - The event loop for the client. - client : IB - The ib_insync IB client. - msgbus : MessageBus - The message bus for the client. - cache : Cache - The cache for the client. - clock : LiveClock - The clock for the client. - logger : Logger - The logger for the client. - instrument_provider : InteractiveBrokersInstrumentProvider - The instrument provider. - handle_revised_bars : bool - If DataClient will emit bar updates as soon new bar opens. - - """ super().__init__( loop=loop, client_id=ClientId(IB_VENUE.value), @@ -429,7 +425,6 @@ def _on_bar_update( has_new_bar: bool, bar_type: BarType, ): - if not has_new_bar: return @@ -458,7 +453,6 @@ def _on_historical_bar_update( has_new_bar: bool, process_all: bool = False, ) -> None: - if not process_all: if self._handle_revised_bars: bars = [bar_data_list[-1]] diff --git a/nautilus_trader/adapters/interactive_brokers/factories.py b/nautilus_trader/adapters/interactive_brokers/factories.py index bcceeb0977d3..11d2a8f0bab4 100644 --- a/nautilus_trader/adapters/interactive_brokers/factories.py +++ b/nautilus_trader/adapters/interactive_brokers/factories.py @@ -31,7 +31,6 @@ ) from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger from nautilus_trader.common.logging import Logger from nautilus_trader.config import InstrumentProviderConfig from nautilus_trader.live.factories import LiveDataClientFactory @@ -104,7 +103,7 @@ def get_cached_ib_client( port = port or GATEWAY.port port = port or InteractiveBrokersGateway.PORTS[trading_mode] - client_key: tuple = (host, port) + client_key: tuple = (host, port, client_id) if client_key not in IB_INSYNC_CLIENTS: client = ib_insync.IB() @@ -113,7 +112,7 @@ def get_cached_ib_client( try: client.connect(host=host, port=port, timeout=6, clientId=client_id) break - except (TimeoutError, AttributeError, asyncio.TimeoutError): + except (TimeoutError, AttributeError, asyncio.TimeoutError, ConnectionRefusedError): continue else: raise TimeoutError(f"Failed to connect to gateway in {timeout}s") @@ -163,7 +162,7 @@ def create( # type: ignore msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ) -> InteractiveBrokersDataClient: """ Create a new InteractiveBrokers data client. @@ -182,7 +181,7 @@ def create( # type: ignore The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns @@ -191,8 +190,8 @@ def create( # type: ignore """ client = get_cached_ib_client( - username=config.username, - password=config.password, + username=config.username or os.environ["TWS_USERNAME"], + password=config.password or os.environ["TWS_PASSWORD"], host=config.gateway_host, port=config.gateway_port, trading_mode=config.trading_mode, @@ -235,7 +234,7 @@ def create( # type: ignore msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ) -> InteractiveBrokersExecutionClient: """ Create a new InteractiveBrokers execution client. @@ -254,7 +253,7 @@ def create( # type: ignore The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns @@ -263,8 +262,8 @@ def create( # type: ignore """ client = get_cached_ib_client( - username=config.username, - password=config.password, + username=config.username or os.environ["TWS_USERNAME"], + password=config.password or os.environ["TWS_PASSWORD"], host=config.gateway_host, port=config.gateway_port, client_id=config.client_id, diff --git a/nautilus_trader/adapters/interactive_brokers/providers.py b/nautilus_trader/adapters/interactive_brokers/providers.py index ece0c0c17ca4..bde82f39bf0e 100644 --- a/nautilus_trader/adapters/interactive_brokers/providers.py +++ b/nautilus_trader/adapters/interactive_brokers/providers.py @@ -26,9 +26,9 @@ from ib_insync import ContractDetails from ib_insync import Future -from nautilus_trader.adapters.betfair.util import one from nautilus_trader.adapters.interactive_brokers.common import IB_VENUE from nautilus_trader.adapters.interactive_brokers.parsing.instruments import parse_instrument +from nautilus_trader.common.functions import one from nautilus_trader.common.logging import Logger from nautilus_trader.common.providers import InstrumentProvider from nautilus_trader.config import InstrumentProviderConfig @@ -40,6 +40,21 @@ class InteractiveBrokersInstrumentProvider(InstrumentProvider): """ Provides a means of loading `Instrument` objects through Interactive Brokers. + + Parameters + ---------- + client : ib_insync.IB + The Interactive Brokers client. + config : InstrumentProviderConfig + The instrument provider config + logger : Logger + The logger for the instrument provider. + host : str + The client host name or IP address. + port : str + The client port number. + client_id : int + The unique client ID number for the connection. """ def __init__( @@ -50,26 +65,7 @@ def __init__( host: str = "127.0.0.1", port: int = 7497, client_id: int = 1, - ): - """ - Initialize a new instance of the ``InteractiveBrokersInstrumentProvider`` class. - - Parameters - ---------- - client : ib_insync.IB - The Interactive Brokers client. - config : InstrumentProviderConfig - The instrument provider config - logger : Logger - The logger for the instrument provider. - host : str - The client host name or IP address. - port : str - The client port number. - client_id : int - The unique client ID number for the connection. - - """ + ) -> None: super().__init__( venue=IB_VENUE, logger=logger, @@ -212,15 +208,15 @@ async def load( Parameters ---------- - build_options_chain: bool (default: False) - Search for full option chain - option_kwargs: str (default: False) - JSON string for options filtering, available fields: min_expiry, max_expiry, min_strike, max_strike, kind - kwargs: **kwargs + build_options_chain : bool, default False + Search for full option chain. + option_kwargs : str, default False + JSON string for options filtering, available fields: min_expiry, max_expiry, min_strike, max_strike, kind. + kwargs : **kwargs Optional extra kwargs to search for, examples: secType, conId, symbol, lastTradeDateOrContractMonth, strike, right, multiplier, exchange, primaryExchange, currency, localSymbol, tradingClass, includeExpired, secIdType, secId, - comboLegsDescrip, comboLegs, deltaNeutralContract + comboLegsDescrip, comboLegs, deltaNeutralContract. """ self._log.debug(f"Attempting to find instrument for {kwargs=}") contract = self._parse_contract(**kwargs) diff --git a/nautilus_trader/adapters/sandbox/factory.py b/nautilus_trader/adapters/sandbox/factory.py index 0b8cfd01a245..dacafee7f287 100644 --- a/nautilus_trader/adapters/sandbox/factory.py +++ b/nautilus_trader/adapters/sandbox/factory.py @@ -19,7 +19,7 @@ from nautilus_trader.adapters.sandbox.execution import SandboxExecutionClient from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.live.factories import LiveExecClientFactory from nautilus_trader.msgbus.bus import MessageBus @@ -37,7 +37,7 @@ def create( # type: ignore msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ) -> SandboxExecutionClient: """ Create a new Sandbox execution client. @@ -56,7 +56,7 @@ def create( # type: ignore The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns diff --git a/nautilus_trader/backtest/__main__.py b/nautilus_trader/backtest/__main__.py index fc649eaf9de9..5a9ae3d37815 100644 --- a/nautilus_trader/backtest/__main__.py +++ b/nautilus_trader/backtest/__main__.py @@ -1,3 +1,18 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + from typing import Optional import click diff --git a/nautilus_trader/backtest/engine.pxd b/nautilus_trader/backtest/engine.pxd index 7a86434c184d..60e364996b07 100644 --- a/nautilus_trader/backtest/engine.pxd +++ b/nautilus_trader/backtest/engine.pxd @@ -46,8 +46,5 @@ cdef class BacktestEngine: cdef uint64_t _index cdef uint64_t _iteration - cpdef list list_actors(self) - cpdef list list_strategies(self) - cdef Data _next(self) cdef list _advance_time(self, uint64_t now_ns, list clocks) diff --git a/nautilus_trader/backtest/engine.pyx b/nautilus_trader/backtest/engine.pyx index c81f23be33c9..2cbbbe9f52da 100644 --- a/nautilus_trader/backtest/engine.pyx +++ b/nautilus_trader/backtest/engine.pyx @@ -93,7 +93,7 @@ cdef class BacktestEngine: If `config` is not of type `BacktestEngineConfig`. """ - def __init__(self, config: Optional[BacktestEngineConfig] = None): + def __init__(self, config: Optional[BacktestEngineConfig] = None) -> None: if config is None: config = BacktestEngineConfig() Condition.type(config, BacktestEngineConfig, "config") @@ -137,6 +137,7 @@ cdef class BacktestEngine: load_state=config.load_state, save_state=config.save_state, log_level=log_level_from_str(config.log_level.upper()), + log_rate_limit=config.log_rate_limit, bypass_logging=config.bypass_logging, ) @@ -339,7 +340,7 @@ cdef class BacktestEngine: """ return self._kernel.portfolio - def list_venues(self): + def list_venues(self) -> list[Venue]: """ Return the venues contained within the engine. @@ -686,28 +687,6 @@ cdef class BacktestEngine: # Checked inside trader self.kernel.trader.add_strategies(strategies) - cpdef list list_actors(self): - """ - Return the actors for the backtest. - - Returns - ---------- - list[Actors] - - """ - return self.trader.actors() - - cpdef list list_strategies(self): - """ - Return the strategies for the backtest. - - Returns - ---------- - list[Strategy] - - """ - return self.trader.strategies() - def reset(self) -> None: """ Reset the backtest engine. @@ -718,7 +697,7 @@ cdef class BacktestEngine: if self.kernel.trader.is_running: # End current backtest run - self._end() + self.end() # Change logger clock back to live clock for consistent time stamping self.kernel.logger.change_clock(self._clock) @@ -759,7 +738,7 @@ cdef class BacktestEngine: self._log.info("Reset.") - def clear_data(self): + def clear_data(self) -> None: """ Clear the engines internal data stream. @@ -786,6 +765,7 @@ cdef class BacktestEngine: start: Optional[Union[datetime, str, int]] = None, end: Optional[Union[datetime, str, int]] = None, run_config_id: Optional[str] = None, + streaming: bool = False, ) -> None: """ Run a backtest. @@ -793,6 +773,15 @@ cdef class BacktestEngine: At the end of the run the trader and strategies will be stopped, then post-run analysis performed. + If more data than can fit in memory is to be run through the backtest + engine, then `streaming` mode can be utilized. The expected sequence is as + follows: + - Add initial data batch and strategies. + - Call `run(streaming=True)`. + - Call `clear_data()`. + - Add next batch of data stream. + - Call either `run(streaming=False)` or `end()`. When there is no more data to run on. + Parameters ---------- start : Union[datetime, str, int], optional @@ -803,6 +792,9 @@ cdef class BacktestEngine: to the end of the data. run_config_id : str, optional The tokenized `BacktestRunConfig` ID. + streaming : bool, default False + If running in streaming mode. If False then will end the backtest + following the run iterations. Raises ------ @@ -813,59 +805,37 @@ cdef class BacktestEngine: """ self._run(start, end, run_config_id) - self._end() + if not streaming: + self.end() - def run_streaming( - self, - start: Optional[Union[datetime, str, int]] = None, - end: Optional[Union[datetime, str, int]] = None, - run_config_id: Optional[str] = None, - ): + def end(self): """ - Run a backtest in streaming mode. + Manually end the backtest. - If more data than can fit in memory is to be run through the backtest - engine, then streaming mode can be utilized. The expected sequence is as - follows: - - Add initial data batch and strategies. - - Call `run_streaming()`. - - Call `clear_data()`. - - Add next batch of data stream. - - Call `run_streaming()`. - - Call `end_streaming()` when there is no more data to run on. - - Parameters - ---------- - start : Union[datetime, str, int], optional - The start datetime (UTC) for the current batch of data. If ``None`` - engine runs from the start of the data. - end : Union[datetime, str, int], optional - The end datetime (UTC) for the current batch of data. If ``None`` engine runs - to the end of the data. - run_config_id : str, optional - The tokenized backtest run configuration ID. - - Raises - ------ - ValueError - If no data has been added to the engine. - ValueError - If the `start` is >= the `end` datetime. + Notes + ----- + Only required if you have previously been running with streaming. """ - self._run(start, end, run_config_id) + if self.kernel.trader.is_running: + self.kernel.trader.stop() + if self.kernel.data_engine.is_running: + self.kernel.data_engine.stop() + if self.kernel.risk_engine.is_running: + self.kernel.risk_engine.stop() + if self.kernel.exec_engine.is_running: + self.kernel.exec_engine.stop() + if self.kernel.emulator.is_running: + self.kernel.emulator.stop() - def end_streaming(self): - """ - End the backtest streaming run. + # Process remaining messages + for exchange in self._venues.values(): + exchange.process(self.kernel.clock.timestamp_ns()) - The following sequence of events will occur: - - The trader will be stopped which in turn stops the strategies. - - The exchanges will process all pending messages. - - Post-run analysis is performed. + self._run_finished = self._clock.utc_now() + self._backtest_end = self.kernel.clock.utc_now() - """ - self._end() + self._log_post_run() def get_result(self): """ @@ -1020,27 +990,6 @@ cdef class BacktestEngine: for event_handler in now_events: event_handler.handle() - def _end(self): - if self.kernel.trader.is_running: - self.kernel.trader.stop() - if self.kernel.data_engine.is_running: - self.kernel.data_engine.stop() - if self.kernel.risk_engine.is_running: - self.kernel.risk_engine.stop() - if self.kernel.exec_engine.is_running: - self.kernel.exec_engine.stop() - if self.kernel.emulator.is_running: - self.kernel.emulator.stop() - - # Process remaining messages - for exchange in self._venues.values(): - exchange.process(self.kernel.clock.timestamp_ns()) - - self._run_finished = self._clock.utc_now() - self._backtest_end = self.kernel.clock.utc_now() - - self._log_post_run() - cdef Data _next(self): cdef uint64_t cursor = self._index self._index += 1 diff --git a/nautilus_trader/backtest/exchange.pxd b/nautilus_trader/backtest/exchange.pxd index e93416a1986a..40488b9708c3 100644 --- a/nautilus_trader/backtest/exchange.pxd +++ b/nautilus_trader/backtest/exchange.pxd @@ -17,6 +17,7 @@ from libc.stdint cimport uint64_t from nautilus_trader.accounting.accounts.base cimport Account from nautilus_trader.backtest.execution_client cimport BacktestExecClient +from nautilus_trader.backtest.matching_engine cimport OrderMatchingEngine from nautilus_trader.backtest.models cimport FillModel from nautilus_trader.backtest.models cimport LatencyModel from nautilus_trader.cache.cache cimport Cache @@ -103,6 +104,7 @@ cdef class SimulatedExchange: cpdef Price best_bid_price(self, InstrumentId instrument_id) cpdef Price best_ask_price(self, InstrumentId instrument_id) cpdef OrderBook get_book(self, InstrumentId instrument_id) + cpdef OrderMatchingEngine get_matching_engine(self, InstrumentId instrument_id) cpdef dict get_matching_engines(self) cpdef dict get_books(self) cpdef list get_open_orders(self, InstrumentId instrument_id=*) diff --git a/nautilus_trader/backtest/exchange.pyx b/nautilus_trader/backtest/exchange.pyx index 2a6c876c5c05..22d9cfc3e217 100644 --- a/nautilus_trader/backtest/exchange.pyx +++ b/nautilus_trader/backtest/exchange.pyx @@ -181,6 +181,12 @@ cdef class SimulatedExchange: for module in modules: Condition.not_in(module, self.modules, "module", "modules") module.register_venue(self) + module.register_base( + msgbus=msgbus, + cache=cache, + clock=clock, + logger=logger, + ) self.modules.append(module) self._log.info(f"Loaded {module}.") @@ -387,7 +393,31 @@ cdef class SimulatedExchange: return matching_engine.get_book() + cpdef OrderMatchingEngine get_matching_engine(self, InstrumentId instrument_id): + """ + Return the matching engine for the given instrument ID (if found). + + Parameters + ---------- + instrument_id : InstrumentId + The instrument ID for the matching engine. + + Returns + ------- + OrderMatchingEngine or ``None`` + + """ + return self._matching_engines.get(instrument_id) + cpdef dict get_matching_engines(self): + """ + Return all matching engines for the exchange (for every instrument). + + Returns + ------- + dict[InstrumentId, OrderMatchingEngine] + + """ return self._matching_engines.copy() cpdef dict get_books(self): diff --git a/nautilus_trader/backtest/matching_engine.pxd b/nautilus_trader/backtest/matching_engine.pxd index 80c7b07d900d..4523a3cdf80a 100644 --- a/nautilus_trader/backtest/matching_engine.pxd +++ b/nautilus_trader/backtest/matching_engine.pxd @@ -162,20 +162,22 @@ cdef class OrderMatchingEngine: cpdef void fill_market_order(self, Order order) except * cpdef void fill_limit_order(self, Order order) except * - cpdef void _apply_fills( + cpdef void apply_fills( self, Order order, list fills, - PositionId venue_position_id, - Position position, + LiquiditySide liquidity_side, + PositionId venue_position_id=*, + Position position=*, ) except * - cpdef void _fill_order( + cpdef void fill_order( self, Order order, - PositionId venue_position_id, - Position position, - Quantity last_qty, Price last_px, + Quantity last_qty, + LiquiditySide liquidity_side, + PositionId venue_position_id=*, + Position position=*, ) except * # -- IDENTIFIER GENERATORS ------------------------------------------------------------------------ diff --git a/nautilus_trader/backtest/matching_engine.pyx b/nautilus_trader/backtest/matching_engine.pyx index 2318b753e86b..a6942c260d73 100644 --- a/nautilus_trader/backtest/matching_engine.pyx +++ b/nautilus_trader/backtest/matching_engine.pyx @@ -1077,6 +1077,16 @@ cdef class OrderMatchingEngine: # -- ORDER PROCESSING ----------------------------------------------------------------------------- cpdef void iterate(self, uint64_t timestamp_ns) except *: + """ + Iterate the matching engine by processing the bid and ask order sides + and advancing time up to the given UNIX `timestamp_ns`. + + Parameters + ---------- + timestamp_ns : uint64_t + The UNIX timestamp to advance the matching engine time to. + + """ self._clock.set_time(timestamp_ns) # TODO: Convert order book to use ints rather than doubles @@ -1120,6 +1130,29 @@ cdef class OrderMatchingEngine: self._has_targets = False cpdef list determine_limit_price_and_volume(self, Order order): + """ + Return the projected fills for the given *limit* order filling passively + from its limit price. + + The list may be empty if no fills. + + Parameters + ---------- + order : Order + The order to determine fills for. + + Returns + ------- + list[tuple[Price, Quantity]] + + Raises + ------ + ValueError + If the `order` does not have a LIMIT `price`. + + """ + Condition.true(order.has_price_c(), "order has no limit `price`") + cdef list fills cdef BookOrder submit_order = BookOrder(price=order.price, size=order.leaves_qty, side=order.side) if order.side == OrderSide.BUY: @@ -1203,6 +1236,22 @@ cdef class OrderMatchingEngine: return fills cpdef list determine_market_price_and_volume(self, Order order): + """ + Return the projected fills for the given *marketable* order filling + aggressively into its order side. + + The list may be empty if no fills. + + Parameters + ---------- + order : Order + The order to determine fills for. + + Returns + ------- + list[tuple[Price, Quantity]] + + """ cdef list fills cdef Price price = Price.from_int_c(INT_MAX if order.side == OrderSide.BUY else INT_MIN) cdef BookOrder submit_order = BookOrder(price=price, size=order.leaves_qty, side=order.side) @@ -1261,6 +1310,15 @@ cdef class OrderMatchingEngine: return fills cpdef void fill_market_order(self, Order order) except *: + """ + Fill the given *marketable* order. + + Parameters + ---------- + order : Order + The order to fill. + + """ cdef PositionId venue_position_id = self._get_position_id(order) cdef Position position = None if venue_position_id is not None: @@ -1275,15 +1333,31 @@ cdef class OrderMatchingEngine: order.liquidity_side = LiquiditySide.TAKER - self._apply_fills( + self.apply_fills( order=order, fills=self.determine_market_price_and_volume(order), + liquidity_side=order.liquidity_side, venue_position_id=venue_position_id, position=position, ) cpdef void fill_limit_order(self, Order order) except *: - assert order.has_price_c(), f"{order.type_string_c()} has no LIMIT price" + """ + Fill the given limit order. + + Parameters + ---------- + order : Order + The order to fill. + + Raises + ------ + ValueError + If the `order` does not have a LIMIT `price`. + + """ + Condition.true(order.has_price_c(), "order has no limit `price`") + cdef Price price = order.price if order.liquidity_side == LiquiditySide.MAKER and self._fill_model: if order.side == OrderSide.BUY and self._core.bid_raw == price._mem.raw and not self._fill_model.is_limit_filled(): @@ -1303,20 +1377,55 @@ cdef class OrderMatchingEngine: self.cancel_order(order) return # Order canceled - self._apply_fills( + self.apply_fills( order=order, fills=self.determine_limit_price_and_volume(order), + liquidity_side=order.liquidity_side, venue_position_id=venue_position_id, position=position, ) - cpdef void _apply_fills( + cpdef void apply_fills( self, Order order, list fills, - PositionId venue_position_id, # Can be None - Position position, # Can be None + LiquiditySide liquidity_side, + PositionId venue_position_id: Optional[PositionId] = None, + Position position: Optional[Position] = None, ) except *: + """ + Apply the given list of fills to the given order. Optionally provide + existing position details. + + Parameters + ---------- + order : Order + The order to fill. + fills : list[tuple[Price, Quantity]] + The fills to apply to the order. + liquidity_side : LiquiditySide + The liquidity side for the fill(s). + venue_position_id : PositionId, optional + The current venue position ID related to the order (if assigned). + position : Position, optional + The current position related to the order (if any). + + Raises + ------ + ValueError + If `liquidity_side` is ``NO_LIQUIDITY_SIDE``. + + Warnings + -------- + The `liquidity_side` will override anything previously set on the order. + + """ + Condition.not_none(order, "order") + Condition.not_none(fills, "fills") + Condition.not_equal(liquidity_side, LiquiditySide.NO_LIQUIDITY_SIDE, "liquidity_side", "NO_LIQUIDITY_SIDE") + + order.liquidity_side = liquidity_side + if not fills: return # No fills @@ -1386,12 +1495,13 @@ cdef class OrderMatchingEngine: ) if not fill_qty._mem.raw > 0: return # Done - self._fill_order( + self.fill_order( order=order, + last_px=fill_px, + last_qty=fill_qty, + liquidity_side=order.liquidity_side, venue_position_id=venue_position_id, position=position, - last_qty=fill_qty, - last_px=fill_px, ) if order.order_type == OrderType.MARKET_TO_LIMIT and initial_market_to_limit_fill: return # Filled initial level @@ -1421,22 +1531,60 @@ cdef class OrderMatchingEngine: f"invalid `OrderSide`, was {order.side}", # pragma: no cover (design-time error) ) - self._fill_order( + self.fill_order( order=order, + last_px=fill_px, + last_qty=order.leaves_qty, + liquidity_side=order.liquidity_side, venue_position_id=venue_position_id, position=position, - last_qty=order.leaves_qty, - last_px=fill_px, ) - cpdef void _fill_order( + cpdef void fill_order( self, Order order, - PositionId venue_position_id, # Can be None - Position position: Optional[Position], - Quantity last_qty, Price last_px, + Quantity last_qty, + LiquiditySide liquidity_side, + PositionId venue_position_id: Optional[PositionId] = None, + Position position: Optional[Position] = None, ) except *: + """ + Apply the given list of fills to the given order. Optionally provide + existing position details. + + Parameters + ---------- + order : Order + The order to fill. + last_px : Price + The fill price for the order. + last_qty : Price + The fill quantity for the order. + liquidity_side : LiquiditySide + The liquidity side for the fill. + venue_position_id : PositionId, optional + The current venue position ID related to the order (if assigned). + position : Position, optional + The current position related to the order (if any). + + Raises + ------ + ValueError + If `liquidity_side` is ``NO_LIQUIDITY_SIDE``. + + Warnings + -------- + The `liquidity_side` will override anything previously set on the order. + + """ + Condition.not_none(order, "order") + Condition.not_none(last_px, "last_px") + Condition.not_none(last_qty, "last_qty") + Condition.not_equal(liquidity_side, LiquiditySide.NO_LIQUIDITY_SIDE, "liquidity_side", "NO_LIQUIDITY_SIDE") + + order.liquidity_side = liquidity_side + # Calculate commission cdef double notional = self.instrument.notional_value( quantity=last_qty, @@ -1740,7 +1888,7 @@ cdef class OrderMatchingEngine: strategy_id=order.strategy_id, instrument_id=order.instrument_id, client_order_id=order.client_order_id, - venue_order_id=self._generate_venue_order_id(), + venue_order_id=order.venue_order_id or self._generate_venue_order_id(), account_id=order.account_id or self._account_ids[order.trader_id], event_id=UUID4(), ts_event=timestamp, diff --git a/nautilus_trader/backtest/modules.pyx b/nautilus_trader/backtest/modules.pyx index b63071ff10fc..5ccbff01ef7a 100644 --- a/nautilus_trader/backtest/modules.pyx +++ b/nautilus_trader/backtest/modules.pyx @@ -13,12 +13,14 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from cpython.datetime cimport datetime -from libc.stdint cimport uint64_t - import pandas as pd import pytz +from nautilus_trader.config import ActorConfig + +from cpython.datetime cimport datetime +from libc.stdint cimport uint64_t + from nautilus_trader.accounting.calculators cimport RolloverInterestCalculator from nautilus_trader.backtest.exchange cimport SimulatedExchange from nautilus_trader.core.correctness cimport Condition @@ -32,8 +34,6 @@ from nautilus_trader.model.objects cimport Price from nautilus_trader.model.orderbook.book cimport OrderBook from nautilus_trader.model.position cimport Position -from nautilus_trader.config import ActorConfig - class SimulationModuleConfig(ActorConfig): pass diff --git a/nautilus_trader/backtest/node.py b/nautilus_trader/backtest/node.py index 8dd529b27934..e3974e3fcf7a 100644 --- a/nautilus_trader/backtest/node.py +++ b/nautilus_trader/backtest/node.py @@ -37,10 +37,9 @@ from nautilus_trader.model.identifiers import InstrumentId from nautilus_trader.model.identifiers import Venue from nautilus_trader.model.objects import Money -from nautilus_trader.persistence.batching import batch_files -from nautilus_trader.persistence.batching import extract_generic_data_client_ids -from nautilus_trader.persistence.batching import groupby_datatype -from nautilus_trader.persistence.catalog.parquet import ParquetDataCatalog +from nautilus_trader.persistence.streaming.engine import StreamingEngine +from nautilus_trader.persistence.streaming.engine import extract_generic_data_client_ids +from nautilus_trader.persistence.streaming.engine import groupby_datatype class BacktestNode: @@ -167,6 +166,12 @@ def _create_engine( # Add venues (must be added prior to instruments) for config in venue_configs: base_currency: Optional[str] = config.base_currency + if config.leverages: + leverages = { + InstrumentId.from_str(i): Decimal(v) for i, v in config.leverages.items() + } + else: + leverages = {} engine.add_venue( venue=Venue(config.name), oms_type=OmsType[config.oms_type], @@ -174,11 +179,7 @@ def _create_engine( base_currency=Currency.from_str(base_currency) if base_currency else None, starting_balances=[Money.from_str(m) for m in config.starting_balances], default_leverage=Decimal(config.default_leverage), - leverages={ - InstrumentId.from_str(i): Decimal(v) for i, v in config.leverages.items() - } - if config.leverages - else {}, + leverages=leverages, book_type=book_type_from_str(config.book_type), routing=config.routing, modules=[ActorFactory.create(module) for module in (config.modules or [])], @@ -251,16 +252,14 @@ def _run_streaming( data_configs: list[BacktestDataConfig], batch_size_bytes: int, ) -> None: - config = data_configs[0] - catalog: ParquetDataCatalog = config.catalog() - data_client_ids = extract_generic_data_client_ids(data_configs=data_configs) - for batch in batch_files( - catalog=catalog, + streaming_engine = StreamingEngine( data_configs=data_configs, target_batch_size_bytes=batch_size_bytes, - ): + ) + + for batch in streaming_engine: engine.clear_data() grouped = groupby_datatype(batch) for data in grouped: @@ -271,9 +270,9 @@ def _run_streaming( GenericData(data_type=DataType(data["type"]), data=d) for d in data["data"] ] self._load_engine_data(engine=engine, data=data) - engine.run_streaming(run_config_id=run_config_id) + engine.run(run_config_id=run_config_id, streaming=True) - engine.end_streaming() + engine.end() engine.dispose() def _run_oneshot( diff --git a/nautilus_trader/common/actor.pxd b/nautilus_trader/common/actor.pxd index 94eff0a7ba92..018aa3c42d09 100644 --- a/nautilus_trader/common/actor.pxd +++ b/nautilus_trader/common/actor.pxd @@ -94,7 +94,6 @@ cdef class Actor(Component): cpdef void register_base( self, - TraderId trader_id, MessageBus msgbus, CacheFacade cache, Clock clock, diff --git a/nautilus_trader/common/actor.pyx b/nautilus_trader/common/actor.pyx index 3c611fdb6e60..cb5fa8f0c998 100644 --- a/nautilus_trader/common/actor.pyx +++ b/nautilus_trader/common/actor.pyx @@ -30,7 +30,7 @@ import cython from nautilus_trader.config import ActorConfig from nautilus_trader.config import ImportableActorConfig -from nautilus_trader.persistence.streaming import generate_signal_class +from nautilus_trader.persistence.streaming.writer import generate_signal_class from cpython.datetime cimport datetime from libc.stdint cimport uint64_t @@ -502,7 +502,6 @@ cdef class Actor(Component): cpdef void register_base( self, - TraderId trader_id, MessageBus msgbus, CacheFacade cache, Clock clock, @@ -513,8 +512,6 @@ cdef class Actor(Component): Parameters ---------- - trader_id : TraderId - The trader ID for the actor. msgbus : MessageBus The message bus for the actor. cache : CacheFacade @@ -529,7 +526,6 @@ cdef class Actor(Component): System method (not intended to be called by user code). """ - Condition.not_none(trader_id, "trader_id") Condition.not_none(msgbus, "msgbus") Condition.not_none(cache, "cache") Condition.not_none(clock, "clock") @@ -538,14 +534,15 @@ cdef class Actor(Component): clock.register_default_handler(self.handle_event) self._change_clock(clock) self._change_logger(logger) - self._change_msgbus(msgbus) # The trader ID is also assigned here + self._change_msgbus(msgbus) # The trader ID is assigned here - self.trader_id = trader_id self.msgbus = msgbus self.cache = cache self.clock = self._clock self.log = self._log + self.log.info(f"Registered {self.id}.") + cpdef void register_warning_event(self, type event) except *: """ Register the given event type for warning log levels. diff --git a/nautilus_trader/common/factories.pxd b/nautilus_trader/common/factories.pxd index bc43a0b6390c..36db1654649f 100644 --- a/nautilus_trader/common/factories.pxd +++ b/nautilus_trader/common/factories.pxd @@ -26,7 +26,9 @@ from nautilus_trader.model.enums_c cimport OrderType from nautilus_trader.model.enums_c cimport TimeInForce from nautilus_trader.model.enums_c cimport TrailingOffsetType from nautilus_trader.model.enums_c cimport TriggerType +from nautilus_trader.model.identifiers cimport ClientOrderId from nautilus_trader.model.identifiers cimport InstrumentId +from nautilus_trader.model.identifiers cimport OrderListId from nautilus_trader.model.identifiers cimport StrategyId from nautilus_trader.model.identifiers cimport TraderId from nautilus_trader.model.objects cimport Price @@ -53,10 +55,14 @@ cdef class OrderFactory: cdef readonly StrategyId strategy_id """The order factories trading strategy ID.\n\n:returns: `StrategyId`""" - cpdef void set_order_id_count(self, int count) except * + cpdef void set_client_order_id_count(self, int count) except * cpdef void set_order_list_id_count(self, int count) except * + cpdef ClientOrderId generate_client_order_id(self) except * + cpdef OrderListId generate_order_list_id(self) except * cpdef void reset(self) except * + cpdef OrderList create_list(self, list orders) + cpdef MarketOrder market( self, InstrumentId instrument_id, @@ -206,8 +212,8 @@ cdef class OrderFactory: OrderType tp_order_type=*, TimeInForce time_in_force=*, datetime expire_time=*, - bint post_only_entry=*, - bint post_only_tp=*, + bint entry_post_only=*, + bint tp_post_only=*, TriggerType emulation_trigger=*, ContingencyType contingency_type=*, ) diff --git a/nautilus_trader/common/factories.pyx b/nautilus_trader/common/factories.pyx index b1f9ddbc9ae2..ca318f6b4c1c 100644 --- a/nautilus_trader/common/factories.pyx +++ b/nautilus_trader/common/factories.pyx @@ -97,7 +97,7 @@ cdef class OrderFactory: initial_count=initial_order_list_id_count, ) - cpdef void set_order_id_count(self, int count) except *: + cpdef void set_client_order_id_count(self, int count) except *: """ Set the internal order ID generator count to the given count. @@ -129,6 +129,32 @@ cdef class OrderFactory: """ self._order_list_id_generator.set_count(count) + cpdef ClientOrderId generate_client_order_id(self) except *: + """ + Generate and return a new client order ID. + + The identifier will be the next in the logical sequence. + + Returns + ------- + ClientOrderId + + """ + return self._order_id_generator.generate() + + cpdef OrderListId generate_order_list_id(self) except *: + """ + Generate and return a new order list ID. + + The identifier will be the next in the logical sequence. + + Returns + ------- + OrderListId + + """ + return self._order_list_id_generator.generate() + cpdef void reset(self) except *: """ Reset the order factory. @@ -138,6 +164,36 @@ cdef class OrderFactory: self._order_id_generator.reset() self._order_list_id_generator.reset() + cpdef OrderList create_list(self, list orders): + """ + Return a new order list containing the given `orders`. + + Parameters + ---------- + orders : list[Order] + The orders for the list. + + Returns + ------- + OrderList + + Raises + ------ + ValueError + If `orders` is empty. + + Notes + ----- + The order at index 0 in the list will be considered the 'first' order. + + """ + Condition.not_empty(orders, "orders") + + return OrderList( + order_list_id=self._order_list_id_generator.generate(), + orders=orders, + ) + cpdef MarketOrder market( self, InstrumentId instrument_id, @@ -893,8 +949,8 @@ cdef class OrderFactory: OrderType tp_order_type = OrderType.LIMIT, TimeInForce time_in_force = TimeInForce.GTC, datetime expire_time = None, - bint post_only_entry = False, - bint post_only_tp = True, + bint entry_post_only = False, + bint tp_post_only = True, TriggerType emulation_trigger = TriggerType.NO_TRIGGER, ContingencyType contingency_type = ContingencyType.OUO, ): @@ -931,9 +987,9 @@ cdef class OrderFactory: The entry orders time in force. expire_time : datetime, optional The order expiration (for ``GTD`` orders). - post_only_entry : bool, default False + entry_post_only : bool, default False If the entry order will only provide liquidity (make a market). - post_only_tp : bool, default False + tp_post_only : bool, default False If the take-profit order will only provide liquidity (make a market). emulation_trigger : TriggerType, default ``NO_TRIGGER`` The emulation trigger type for the entry, as well as the TP and SL bracket orders. @@ -983,7 +1039,7 @@ cdef class OrderFactory: ts_init=self._clock.timestamp_ns(), time_in_force=time_in_force, expire_time_ns=0 if expire_time is None else dt_to_unix_nanos(expire_time), - post_only=post_only_entry, + post_only=entry_post_only, emulation_trigger=emulation_trigger, contingency_type=ContingencyType.OTO, order_list_id=order_list_id, @@ -1027,7 +1083,7 @@ cdef class OrderFactory: ts_init=self._clock.timestamp_ns(), time_in_force=time_in_force, expire_time_ns=0 if expire_time is None else dt_to_unix_nanos(expire_time), - post_only=post_only_entry, + post_only=entry_post_only, emulation_trigger=emulation_trigger, contingency_type=ContingencyType.OTO, order_list_id=order_list_id, @@ -1053,7 +1109,7 @@ cdef class OrderFactory: init_id=UUID4(), ts_init=self._clock.timestamp_ns(), time_in_force=TimeInForce.GTC, - post_only=post_only_tp, + post_only=tp_post_only, reduce_only=True, display_qty=None, emulation_trigger=emulation_trigger, @@ -1077,7 +1133,7 @@ cdef class OrderFactory: init_id=UUID4(), ts_init=self._clock.timestamp_ns(), time_in_force=TimeInForce.GTC, - post_only=post_only_tp, + post_only=tp_post_only, reduce_only=True, display_qty=None, emulation_trigger=emulation_trigger, diff --git a/nautilus_trader/adapters/binance/spot/parsing/__init__.py b/nautilus_trader/common/functions.py similarity index 65% rename from nautilus_trader/adapters/binance/spot/parsing/__init__.py rename to nautilus_trader/common/functions.py index ca16b56e4794..8a816cccc080 100644 --- a/nautilus_trader/adapters/binance/spot/parsing/__init__.py +++ b/nautilus_trader/common/functions.py @@ -12,3 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------------------------- + + +def one(iterable): + it = iter(iterable) + + try: + first_value = next(it) + except StopIteration as e: + raise (ValueError("too few items in iterable (expected 1)")) from e + + try: + second_value = next(it) + except StopIteration: + pass + else: + msg = f"Expected exactly one item in iterable, but got {first_value}, {second_value}, and perhaps more." + raise ValueError(msg) + + return first_value diff --git a/nautilus_trader/common/generators.pyx b/nautilus_trader/common/generators.pyx index d6483775756d..34b43d5d7007 100644 --- a/nautilus_trader/common/generators.pyx +++ b/nautilus_trader/common/generators.pyx @@ -26,20 +26,16 @@ from nautilus_trader.model.identifiers cimport TraderId cdef class IdentifierGenerator: """ Provides a generator for unique ID strings. + + Parameters + ---------- + trader_id : TraderId + The ID tag for the trader. + clock : Clock + The internal clock. """ def __init__(self, TraderId trader_id not None, Clock clock not None): - """ - Initialize a new instance of the ``IdentifierGenerator`` class. - - Parameters - ---------- - trader_id : TraderId - The ID tag for the trader. - clock : Clock - The internal clock. - - """ self._clock = clock self._id_tag_trader = trader_id.get_tag() diff --git a/nautilus_trader/common/logging.pxd b/nautilus_trader/common/logging.pxd index c6c54d4d8273..7c3988d9f663 100644 --- a/nautilus_trader/common/logging.pxd +++ b/nautilus_trader/common/logging.pxd @@ -15,13 +15,10 @@ from typing import Callable -from cpython.datetime cimport datetime -from cpython.datetime cimport timedelta from libc.stdint cimport uint64_t from nautilus_trader.common.clock cimport Clock from nautilus_trader.common.logging cimport Logger -from nautilus_trader.common.queue cimport Queue from nautilus_trader.core.rust.common cimport CLogger from nautilus_trader.core.rust.common cimport LogColor from nautilus_trader.core.rust.common cimport LogLevel @@ -81,16 +78,3 @@ cdef class LoggerAdapter: cpdef void nautilus_header(LoggerAdapter logger) except * cpdef void log_memory(LoggerAdapter logger) except * - - -cdef class LiveLogger(Logger): - cdef object _loop - cdef object _run_task - cdef timedelta _blocked_log_interval - cdef Queue _queue - cdef bint _is_running - cdef datetime _last_blocked - - cpdef void start(self) except * - cpdef void stop(self) except * - cdef void _enqueue_sentinel(self) except * diff --git a/nautilus_trader/common/logging.pyx b/nautilus_trader/common/logging.pyx index 276620e07746..b6f9e187f1b2 100644 --- a/nautilus_trader/common/logging.pyx +++ b/nautilus_trader/common/logging.pyx @@ -13,15 +13,11 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -import asyncio import platform import socket import sys import traceback -from asyncio import Task -from collections import defaultdict from platform import python_version -from typing import Optional import aiohttp import msgspec @@ -33,14 +29,11 @@ import pytz from nautilus_trader import __version__ -from cpython.datetime cimport timedelta from libc.stdint cimport uint64_t from nautilus_trader.common.clock cimport Clock -from nautilus_trader.common.clock cimport LiveClock from nautilus_trader.common.enums_c cimport log_level_to_str from nautilus_trader.common.logging cimport Logger -from nautilus_trader.common.queue cimport Queue from nautilus_trader.core.correctness cimport Condition from nautilus_trader.core.rust.common cimport LogColor from nautilus_trader.core.rust.common cimport LogLevel @@ -83,8 +76,10 @@ cdef class Logger: The instance ID. level_stdout : LogLevel The minimum log level for logging messages to stdout. + rate_limit : int, default 100_000 + The maximum messages per second which can be flushed to stdout or stderr. bypass : bool - If the logger should be bypassed. + If the log output is bypassed. """ def __init__( @@ -94,6 +89,7 @@ cdef class Logger: str machine_id = None, UUID4 instance_id = None, LogLevel level_stdout = LogLevel.INFO, + int rate_limit = 100_000, bint bypass = False, ): if trader_id is None: @@ -112,6 +108,7 @@ cdef class Logger: pystr_to_cstr(machine_id), pystr_to_cstr(instance_id_str), level_stdout, + rate_limit, bypass, ) self._sinks = [] @@ -648,221 +645,3 @@ cpdef void log_memory(LoggerAdapter logger) except *: logger.warning(f"RAM-Avail: {ram_avail_mb:,} MB ({ram_avail_pc:.2f}%)") else: logger.info(f"RAM-Avail: {ram_avail_mb:,} MB ({ram_avail_pc:.2f}%)") - - -cdef class LiveLogger(Logger): - """ - Provides a high-performance logger which runs on the event loop. - - Parameters - ---------- - loop : asyncio.AbstractEventLoop - The event loop to run the logger on. - clock : LiveClock - The clock for the logger. - trader_id : TraderId, optional - The trader ID for the logger. - machine_id : str, optional - The machine ID for the logger. - instance_id : UUID4, optional - The systems unique instantiation ID. - level_stdout : LogLevel - The minimum log level for logging messages to stdout. - bypass : bool - If the logger should be bypassed. - maxsize : int, optional - The maximum capacity for the log queue. - """ - _sentinel = None - - def __init__( - self, - loop not None, - LiveClock clock not None, - TraderId trader_id = None, - str machine_id = None, - UUID4 instance_id = None, - LogLevel level_stdout = LogLevel.INFO, - bint bypass = False, - int maxsize = 10000, - ): - super().__init__( - clock=clock, - trader_id=trader_id, - machine_id=machine_id, - instance_id=instance_id, - level_stdout=level_stdout, - bypass=bypass, - ) - - self._loop = loop - self._queue = Queue(maxsize=maxsize) - self._run_task: Optional[Task] = None - self._blocked_log_interval = timedelta(seconds=1) - - self._is_running = False - self._last_blocked: Optional[datetime] = None - - @property - def is_running(self) -> bool: - """ - Return whether the logger is running. - - Returns - ------- - bool - - """ - return self._is_running - - @property - def last_blocked(self) -> Optional[datetime]: - """ - Return the timestamp (UTC) the logger last blocked. - - Returns - ------- - datetime or ``None`` - - """ - return self._last_blocked - - def get_run_task(self) -> asyncio.Task: - """ - Return the internal run queue task for the engine. - - Returns - ------- - asyncio.Task - - """ - return self._run_task - - cdef void log( - self, - uint64_t timestamp_ns, - LogLevel level, - LogColor color, - str component, - str msg, - dict annotations = None, - ) except *: - """ - Log the given message. - - If the internal queue is already full then will log a warning and block - until queue size reduces. - - If the event loop is not running then messages will be passed directly - to the `Logger` base class for logging. - - """ - Condition.not_none(component, "component") - Condition.not_none(msg, "msg") - - if self._is_running: - try: - self._queue.put_nowait((timestamp_ns, level, color, component, msg, annotations)) - except asyncio.QueueFull: - now = self._clock.utc_now() - next_msg = self._queue.peek_front()[4] - - # Log blocking message once a second - if ( - self._last_blocked is None - or now >= self._last_blocked + self._blocked_log_interval - ): - self._last_blocked = now - - messages = [r[4] for r in self._queue.to_list()] - message_types = defaultdict(lambda: 0) - for msg in messages: - message_types[msg] += 1 - sorted_types = sorted( - message_types.items(), - key=lambda kv: kv[1], - reverse=True, - ) - - blocked_msg = '\n'.join([f"'{kv[0]}' [x{kv[1]}]" for kv in sorted_types]) - log_msg = (f"Blocking full log queue at " - f"{self._queue.qsize()} items. " - f"\nNext msg = '{next_msg}'.\n{blocked_msg}") - - self._log( - timestamp_ns, - LogLevel.WARNING, - LogColor.YELLOW, - type(self).__name__, - log_msg, - annotations, - ) - - # If not spamming then add record to event loop - if next_msg != msg: - self._loop.create_task(self._queue.put((timestamp_ns, level, color, component, msg, annotations))) # Blocking until qsize reduces - else: - # If event loop is not running then pass message directly to the - # base class to log. - self._log( - timestamp_ns, - level, - color, - component, - msg, - annotations, - ) - - cpdef void start(self) except *: - """ - Start the logger on a running event loop. - """ - if not self._is_running: - self._run_task = self._loop.create_task(self._consume_messages()) - self._is_running = True - - cpdef void stop(self) except *: - """ - Stop the logger by canceling the internal event loop task. - - Future messages sent to the logger will be passed directly to the - `Logger` base class for logging. - - """ - if self._run_task: - self._is_running = False - self._enqueue_sentinel() - - async def _consume_messages(self): - cdef tuple record - try: - while self._is_running: - record = await self._queue.get() - if record is None: # Sentinel message (fast C-level check) - continue # Returns to the top to check `self._is_running` - self._log( - record[0], - record[1], - record[2], - record[3], - record[4], - record[5], - ) - except asyncio.CancelledError: - pass - finally: - # Pass remaining messages directly to the base class - while not self._queue.empty(): - record = self._queue.get_nowait() - if record: - self._log( - record[0], - record[1], - record[2], - record[3], - record[4], - record[5], - ) - - cdef void _enqueue_sentinel(self) except *: - self._queue.put_nowait(self._sentinel) diff --git a/nautilus_trader/config/backtest.py b/nautilus_trader/config/backtest.py index 49aad56d6ad7..05a663291a7f 100644 --- a/nautilus_trader/config/backtest.py +++ b/nautilus_trader/config/backtest.py @@ -30,10 +30,11 @@ from nautilus_trader.config.common import NautilusKernelConfig from nautilus_trader.config.common import RiskEngineConfig from nautilus_trader.core.datetime import maybe_dt_to_unix_nanos +from nautilus_trader.model.data.bar import Bar from nautilus_trader.model.identifiers import ClientId -class BacktestVenueConfig(NautilusConfig): +class BacktestVenueConfig(NautilusConfig, frozen=True): """ Represents a venue configuration for one specific backtest engine. """ @@ -53,7 +54,7 @@ class BacktestVenueConfig(NautilusConfig): modules: Optional[list[ImportableConfig]] = None -class BacktestDataConfig(NautilusConfig): +class BacktestDataConfig(NautilusConfig, frozen=True): """ Represents the data configuration for one specific backtest run. """ @@ -68,6 +69,9 @@ class BacktestDataConfig(NautilusConfig): filter_expr: Optional[str] = None client_id: Optional[str] = None metadata: Optional[dict] = None + bar_spec: Optional[str] = None + use_rust: Optional[bool] = False + batch_size: Optional[int] = 10_000 @property def data_type(self): @@ -80,13 +84,21 @@ def data_type(self): @property def query(self): + if self.data_cls is Bar and self.bar_spec: + bar_type = f"{self.instrument_id}-{self.bar_spec}-EXTERNAL" + filter_expr = f'field("bar_type") == "{bar_type}"' + else: + filter_expr = self.filter_expr + return dict( cls=self.data_type, instrument_ids=[self.instrument_id] if self.instrument_id else None, start=self.start_time, end=self.end_time, - filter_expr=self.filter_expr, + filter_expr=parse_filters_expr(filter_expr), as_nautilus=True, + metadata=self.metadata, + use_rust=self.use_rust, ) @property @@ -114,14 +126,14 @@ def load( self, start_time: Optional[pd.Timestamp] = None, end_time: Optional[pd.Timestamp] = None, + as_nautilus: bool = True, ): query = self.query query.update( { "start": start_time or query["start"], "end": end_time or query["end"], - "filter_expr": parse_filters_expr(query.pop("filter_expr", "None")), - "metadata": self.metadata, + "as_nautilus": as_nautilus, }, ) @@ -138,7 +150,7 @@ def load( } -class BacktestEngineConfig(NautilusKernelConfig): +class BacktestEngineConfig(NautilusKernelConfig, frozen=True): """ Configuration for ``BacktestEngine`` instances. @@ -185,7 +197,7 @@ class BacktestEngineConfig(NautilusKernelConfig): run_analysis: bool = True -class BacktestRunConfig(NautilusConfig): +class BacktestRunConfig(NautilusConfig, frozen=True): """ Represents the configuration for one specific backtest run. diff --git a/nautilus_trader/config/common.py b/nautilus_trader/config/common.py index a256fe1f2756..9ccb1a338d13 100644 --- a/nautilus_trader/config/common.py +++ b/nautilus_trader/config/common.py @@ -33,7 +33,7 @@ def resolve_path(path: str): return cls -class NautilusConfig(msgspec.Struct, kw_only=True): +class NautilusConfig(msgspec.Struct, kw_only=True, frozen=True): """ The base class for all Nautilus configuration objects. """ @@ -107,7 +107,7 @@ def validate(self) -> bool: return bool(msgspec.json.decode(self.json(), type=self.__class__)) -class CacheConfig(NautilusConfig): +class CacheConfig(NautilusConfig, frozen=True): """ Configuration for ``Cache`` instances. @@ -123,7 +123,7 @@ class CacheConfig(NautilusConfig): bar_capacity: PositiveInt = 1000 -class CacheDatabaseConfig(NautilusConfig): +class CacheDatabaseConfig(NautilusConfig, frozen=True): """ Configuration for ``CacheDatabase`` instances. @@ -154,7 +154,7 @@ class CacheDatabaseConfig(NautilusConfig): flush: bool = False -class InstrumentProviderConfig(NautilusConfig): +class InstrumentProviderConfig(NautilusConfig, frozen=True): """ Configuration for ``InstrumentProvider`` instances. @@ -190,26 +190,30 @@ def __hash__(self): log_warnings: bool = True -class DataEngineConfig(NautilusConfig): +class DataEngineConfig(NautilusConfig, frozen=True): """ Configuration for ``DataEngine`` instances. Parameters ---------- - build_time_bars_with_no_updates : bool, default True + time_bars_build_with_no_updates : bool, default True If time bar aggregators will build and emit bars with no new market updates. + time_bars_timestamp_on_close : bool, default True + If time bar aggregators will timestamp `ts_event` on bar close. + If False then will timestamp on bar open. validate_data_sequence : bool, default False If data objects timestamp sequencing will be validated and handled. debug : bool, default False If debug mode is active (will provide extra debug logging). """ - build_time_bars_with_no_updates: bool = True + time_bars_build_with_no_updates: bool = True + time_bars_timestamp_on_close: bool = True validate_data_sequence: bool = False debug: bool = False -class RiskEngineConfig(NautilusConfig): +class RiskEngineConfig(NautilusConfig, frozen=True): """ Configuration for ``RiskEngine`` instances. @@ -238,7 +242,7 @@ class RiskEngineConfig(NautilusConfig): debug: bool = False -class ExecEngineConfig(NautilusConfig): +class ExecEngineConfig(NautilusConfig, frozen=True): """ Configuration for ``ExecutionEngine`` instances. @@ -257,13 +261,13 @@ class ExecEngineConfig(NautilusConfig): debug: bool = False -class OrderEmulatorConfig(NautilusConfig): +class OrderEmulatorConfig(NautilusConfig, frozen=True): """ Configuration for ``OrderEmulator`` instances. """ -class StreamingConfig(NautilusConfig): +class StreamingConfig(NautilusConfig, frozen=True): """ Configuration for streaming live or backtest runs to the catalog in feather format. @@ -300,7 +304,7 @@ def as_catalog(self) -> ParquetDataCatalog: ) -class ActorConfig(NautilusConfig, kw_only=True): +class ActorConfig(NautilusConfig, kw_only=True, frozen=True): """ The base model for all actor configurations. @@ -315,7 +319,7 @@ class ActorConfig(NautilusConfig, kw_only=True): component_id: Optional[str] = None -class ImportableActorConfig(NautilusConfig): +class ImportableActorConfig(NautilusConfig, frozen=True): """ Represents an actor configuration for one specific backtest run. @@ -365,7 +369,7 @@ def create(config: ImportableActorConfig): return actor_cls(config=config_cls(**config.config)) -class StrategyConfig(NautilusConfig, kw_only=True): +class StrategyConfig(NautilusConfig, kw_only=True, frozen=True): """ The base model for all trading strategy configurations. @@ -386,7 +390,7 @@ class StrategyConfig(NautilusConfig, kw_only=True): oms_type: Optional[str] = None -class ImportableStrategyConfig(NautilusConfig): +class ImportableStrategyConfig(NautilusConfig, frozen=True): """ Represents a trading strategy configuration for one specific backtest run. @@ -436,7 +440,7 @@ def create(config: ImportableStrategyConfig): return strategy_cls(config=config_cls(**config.config)) -class NautilusKernelConfig(NautilusConfig): +class NautilusKernelConfig(NautilusConfig, frozen=True): """ Configuration for core system ``NautilusKernel`` instances. @@ -470,6 +474,8 @@ class NautilusKernelConfig(NautilusConfig): If the asyncio event loop should be in debug mode. log_level : str, default "INFO" The stdout log level for the node. + log_rate_limit : int, default 100_000 + The maximum messages per second which can be flushed to stdout or stderr. bypass_logging : bool, default False If logging to stdout should be bypassed. """ @@ -489,10 +495,11 @@ class NautilusKernelConfig(NautilusConfig): save_state: bool = False loop_debug: bool = False log_level: str = "INFO" + log_rate_limit: int = 100_000 bypass_logging: bool = False -class ImportableFactoryConfig(NautilusConfig): +class ImportableFactoryConfig(NautilusConfig, frozen=True): """ Represents an importable (json) Factory config. """ @@ -504,9 +511,9 @@ def create(self): return cls() -class ImportableConfig(NautilusConfig): +class ImportableConfig(NautilusConfig, frozen=True): """ - Represents an importable (typically live data or execution) client configuration. + Represents an importable (typically live data client or live execution client) configuration. """ path: str diff --git a/nautilus_trader/config/live.py b/nautilus_trader/config/live.py index 889c98c97868..a4d731a5909e 100644 --- a/nautilus_trader/config/live.py +++ b/nautilus_trader/config/live.py @@ -28,7 +28,7 @@ from nautilus_trader.config.validation import PositiveInt -class LiveDataEngineConfig(DataEngineConfig): +class LiveDataEngineConfig(DataEngineConfig, frozen=True): """ Configuration for ``LiveDataEngine`` instances. @@ -41,7 +41,7 @@ class LiveDataEngineConfig(DataEngineConfig): qsize: PositiveInt = 10000 -class LiveRiskEngineConfig(RiskEngineConfig): +class LiveRiskEngineConfig(RiskEngineConfig, frozen=True): """ Configuration for ``LiveRiskEngine`` instances. @@ -54,7 +54,7 @@ class LiveRiskEngineConfig(RiskEngineConfig): qsize: PositiveInt = 10000 -class LiveExecEngineConfig(ExecEngineConfig): +class LiveExecEngineConfig(ExecEngineConfig, frozen=True): """ Configuration for ``LiveExecEngine`` instances. @@ -82,7 +82,7 @@ class LiveExecEngineConfig(ExecEngineConfig): qsize: PositiveInt = 10000 -class RoutingConfig(NautilusConfig): +class RoutingConfig(NautilusConfig, frozen=True): """ Configuration for live client message routing. @@ -99,7 +99,7 @@ class RoutingConfig(NautilusConfig): venues: Optional[frozenset[str]] = None -class LiveDataClientConfig(NautilusConfig): +class LiveDataClientConfig(NautilusConfig, frozen=True): """ Configuration for ``LiveDataClient`` instances. @@ -118,7 +118,7 @@ class LiveDataClientConfig(NautilusConfig): routing: RoutingConfig = RoutingConfig() -class LiveExecClientConfig(NautilusConfig): +class LiveExecClientConfig(NautilusConfig, frozen=True): """ Configuration for ``LiveExecutionClient`` instances. @@ -134,7 +134,7 @@ class LiveExecClientConfig(NautilusConfig): routing: RoutingConfig = RoutingConfig() -class TradingNodeConfig(NautilusKernelConfig): +class TradingNodeConfig(NautilusKernelConfig, frozen=True): """ Configuration for ``TradingNode`` instances. @@ -154,9 +154,9 @@ class TradingNodeConfig(NautilusKernelConfig): The live execution engine configuration. streaming : StreamingConfig, optional The configuration for streaming to feather files. - data_clients : dict[str, ImportableConfig], optional + data_clients : dict[str, ImportableConfig | LiveDataClientConfig], optional The data client configurations. - exec_clients : dict[str, ImportableConfig], optional + exec_clients : dict[str, ImportableConfig | LiveExecClientConfig], optional The execution client configurations. strategies : list[ImportableStrategyConfig] The strategy configurations for the node. @@ -178,7 +178,6 @@ class TradingNodeConfig(NautilusKernelConfig): The timeout for all engine clients to disconnect. timeout_post_stop : PositiveFloat (seconds) The timeout after stopping the node to await residual events before final shutdown. - """ environment: Environment = Environment.LIVE diff --git a/nautilus_trader/core/asynchronous.py b/nautilus_trader/core/asynchronous.py index d97bc0b7d467..7ebc4ab5c659 100644 --- a/nautilus_trader/core/asynchronous.py +++ b/nautilus_trader/core/asynchronous.py @@ -22,7 +22,7 @@ def sleep0(): Skip one event loop run cycle. This is equivalent to `asyncio.sleep(0)` however avoids the overhead - of the pure Python function call and integer comparison <= 0. + of the Python function call and integer comparison <= 0. Uses a bare 'yield' expression (which Task.__step knows how to handle) instead of creating a Future object. diff --git a/nautilus_trader/core/data.pyx b/nautilus_trader/core/data.pyx index d7783f7da6d3..77ae5e01ecd3 100644 --- a/nautilus_trader/core/data.pyx +++ b/nautilus_trader/core/data.pyx @@ -37,7 +37,7 @@ cdef class Data: This class should not be used directly, but through a concrete subclass. """ - def __init__(self, uint64_t ts_event, uint64_t ts_init): + def __init__(self, uint64_t ts_event, uint64_t ts_init) -> None: # Design-time invariant: correct ordering of timestamps. # This was originally an `assert` to aid initial development of the core # system. It can be used to assist development by uncommenting below. diff --git a/nautilus_trader/core/includes/common.h b/nautilus_trader/core/includes/common.h index 94bbf7a5ca52..65556eeaeb40 100644 --- a/nautilus_trader/core/includes/common.h +++ b/nautilus_trader/core/includes/common.h @@ -210,12 +210,11 @@ struct CLogger logger_new(const char *trader_id_ptr, const char *machine_id_ptr, const char *instance_id_ptr, enum LogLevel level_stdout, + uintptr_t rate_limit, uint8_t is_bypassed); void logger_free(struct CLogger logger); -void flush(struct CLogger *logger); - const char *logger_get_trader_id_cstr(const struct CLogger *logger); const char *logger_get_machine_id_cstr(const struct CLogger *logger); diff --git a/nautilus_trader/core/includes/core.h b/nautilus_trader/core/includes/core.h index 839e5ef6ad7f..1a435fb597ce 100644 --- a/nautilus_trader/core/includes/core.h +++ b/nautilus_trader/core/includes/core.h @@ -8,7 +8,7 @@ typedef struct Rc_String Rc_String; /** - * CVec is a C compatible struct that stores an opaque pointer to a block of + * `CVec` is a C compatible struct that stores an opaque pointer to a block of * memory, it's length and the capacity of the vector it was allocated from. * * NOTE: Changing the values here may lead to undefined behaviour when the @@ -125,6 +125,8 @@ void uuid4_free(struct UUID4_t uuid4); * * # Safety * - Assumes `ptr` is a valid C string pointer. + * # Panics + * - If `ptr` cannot be cast to a valid C string. */ struct UUID4_t uuid4_from_cstr(const char *ptr); diff --git a/nautilus_trader/core/includes/model.h b/nautilus_trader/core/includes/model.h index 845990b78d57..2336bf2caaed 100644 --- a/nautilus_trader/core/includes/model.h +++ b/nautilus_trader/core/includes/model.h @@ -368,8 +368,6 @@ typedef struct Money_t { */ const char *bar_specification_to_cstr(const struct BarSpecification_t *bar_spec); -void bar_specification_free(struct BarSpecification_t bar_spec); - uint64_t bar_specification_hash(const struct BarSpecification_t *bar_spec); struct BarSpecification_t bar_specification_new(uint64_t step, @@ -1120,8 +1118,6 @@ struct Price_t price_new(double value, uint8_t precision); struct Price_t price_from_raw(int64_t raw, uint8_t precision); -void price_free(struct Price_t price); - double price_as_f64(const struct Price_t *price); void price_add_assign(struct Price_t a, struct Price_t b); @@ -1132,8 +1128,6 @@ struct Quantity_t quantity_new(double value, uint8_t precision); struct Quantity_t quantity_from_raw(uint64_t raw, uint8_t precision); -void quantity_free(struct Quantity_t qty); - double quantity_as_f64(const struct Quantity_t *qty); void quantity_add_assign(struct Quantity_t a, struct Quantity_t b); diff --git a/nautilus_trader/core/rust/common.pxd b/nautilus_trader/core/rust/common.pxd index bedb9e7d53bc..040e20feb007 100644 --- a/nautilus_trader/core/rust/common.pxd +++ b/nautilus_trader/core/rust/common.pxd @@ -172,12 +172,11 @@ cdef extern from "../includes/common.h": const char *machine_id_ptr, const char *instance_id_ptr, LogLevel level_stdout, + uintptr_t rate_limit, uint8_t is_bypassed); void logger_free(CLogger logger); - void flush(CLogger *logger); - const char *logger_get_trader_id_cstr(const CLogger *logger); const char *logger_get_machine_id_cstr(const CLogger *logger); diff --git a/nautilus_trader/core/rust/core.pxd b/nautilus_trader/core/rust/core.pxd index ef37e45db925..16bd7b1e9298 100644 --- a/nautilus_trader/core/rust/core.pxd +++ b/nautilus_trader/core/rust/core.pxd @@ -7,7 +7,7 @@ cdef extern from "../includes/core.h": cdef struct Rc_String: pass - # CVec is a C compatible struct that stores an opaque pointer to a block of + # `CVec` is a C compatible struct that stores an opaque pointer to a block of # memory, it's length and the capacity of the vector it was allocated from. # # NOTE: Changing the values here may lead to undefined behaviour when the @@ -88,6 +88,8 @@ cdef extern from "../includes/core.h": # # # Safety # - Assumes `ptr` is a valid C string pointer. + # # Panics + # - If `ptr` cannot be cast to a valid C string. UUID4_t uuid4_from_cstr(const char *ptr); const char *uuid4_to_cstr(const UUID4_t *uuid); diff --git a/nautilus_trader/core/rust/model.pxd b/nautilus_trader/core/rust/model.pxd index 429599072e84..b3d2cb2f1936 100644 --- a/nautilus_trader/core/rust/model.pxd +++ b/nautilus_trader/core/rust/model.pxd @@ -309,8 +309,6 @@ cdef extern from "../includes/model.h": # Returns a [`BarSpecification`] as a C string pointer. const char *bar_specification_to_cstr(const BarSpecification_t *bar_spec); - void bar_specification_free(BarSpecification_t bar_spec); - uint64_t bar_specification_hash(const BarSpecification_t *bar_spec); BarSpecification_t bar_specification_new(uint64_t step, @@ -913,8 +911,6 @@ cdef extern from "../includes/model.h": Price_t price_from_raw(int64_t raw, uint8_t precision); - void price_free(Price_t price); - double price_as_f64(const Price_t *price); void price_add_assign(Price_t a, Price_t b); @@ -925,8 +921,6 @@ cdef extern from "../includes/model.h": Quantity_t quantity_from_raw(uint64_t raw, uint8_t precision); - void quantity_free(Quantity_t qty); - double quantity_as_f64(const Quantity_t *qty); void quantity_add_assign(Quantity_t a, Quantity_t b); diff --git a/nautilus_trader/data/aggregation.pxd b/nautilus_trader/data/aggregation.pxd index 9aea5c89d416..e3857924091c 100644 --- a/nautilus_trader/data/aggregation.pxd +++ b/nautilus_trader/data/aggregation.pxd @@ -55,7 +55,7 @@ cdef class BarBuilder: cpdef void update(self, Price price, Quantity size, uint64_t ts_event) except * cpdef void reset(self) except * cpdef Bar build_now(self) - cpdef Bar build(self, uint64_t ts_event) + cpdef Bar build(self, uint64_t ts_event, uint64_t ts_init) cdef class BarAggregator: @@ -70,7 +70,7 @@ cdef class BarAggregator: cpdef void handle_trade_tick(self, TradeTick tick) except * cdef void _apply_update(self, Price price, Quantity size, uint64_t ts_event) except * cdef void _build_now_and_send(self) except * - cdef void _build_and_send(self, uint64_t ts_event) except * + cdef void _build_and_send(self, uint64_t ts_event, uint64_t ts_init) except * cdef class TickBarAggregator(BarAggregator): @@ -90,10 +90,12 @@ cdef class ValueBarAggregator(BarAggregator): cdef class TimeBarAggregator(BarAggregator): cdef Clock _clock cdef bint _build_on_next_tick + cdef uint64_t _stored_open_ns cdef uint64_t _stored_close_ns cdef tuple _cached_update cdef str _timer_name - cdef bint _build_bars_with_no_updates + cdef bint _build_with_no_updates + cdef bint _timestamp_on_close cdef readonly timedelta interval """The aggregators time interval.\n\n:returns: `timedelta`""" diff --git a/nautilus_trader/data/aggregation.pyx b/nautilus_trader/data/aggregation.pyx index 1c16f6df5988..bd22187a8eb8 100644 --- a/nautilus_trader/data/aggregation.pyx +++ b/nautilus_trader/data/aggregation.pyx @@ -25,6 +25,7 @@ from nautilus_trader.common.logging cimport Logger from nautilus_trader.common.logging cimport LoggerAdapter from nautilus_trader.common.timer cimport TimeEvent from nautilus_trader.core.correctness cimport Condition +from nautilus_trader.core.datetime cimport dt_to_unix_nanos from nautilus_trader.core.rust.core cimport millis_to_nanos from nautilus_trader.core.rust.core cimport secs_to_nanos from nautilus_trader.model.data.bar cimport Bar @@ -182,16 +183,18 @@ cdef class BarBuilder: Bar """ - return self.build(self.ts_last) + return self.build(self.ts_last, self.ts_last) - cpdef Bar build(self, uint64_t ts_event): + cpdef Bar build(self, uint64_t ts_event, uint64_t ts_init): """ Return the aggregated bar with the given closing timestamp, and reset. Parameters ---------- ts_event : uint64_t - The UNIX timestamp (nanoseconds) of the bar close. + The UNIX timestamp (nanoseconds) for the bar event. + ts_init : uint64_t + The UNIX timestamp (nanoseconds) for the bar initialization. Returns ------- @@ -212,7 +215,7 @@ cdef class BarBuilder: close=self._close, volume=Quantity(self.volume, self.size_precision), ts_event=ts_event, - ts_init=ts_event, + ts_init=ts_init, ) self._last_close = self._close @@ -304,8 +307,8 @@ cdef class BarAggregator: cdef Bar bar = self._builder.build_now() self._handler(bar) - cdef void _build_and_send(self, uint64_t ts_event) except *: - cdef Bar bar = self._builder.build(ts_event) + cdef void _build_and_send(self, uint64_t ts_event, uint64_t ts_init) except *: + cdef Bar bar = self._builder.build(ts_event=ts_event, ts_init=ts_init) self._handler(bar) @@ -526,8 +529,11 @@ cdef class TimeBarAggregator(BarAggregator): The clock for the aggregator. logger : Logger The logger for the aggregator. - build_bars_with_no_updates : bool, default True + build_with_no_updates : bool, default True If build and emit bars with no new market updates. + timestamp_on_close : bool, default True + If timestamp `ts_event` will be bar close. + If False then timestamp will be bar open. Raises ------ @@ -541,7 +547,8 @@ cdef class TimeBarAggregator(BarAggregator): handler not None: Callable[[Bar], None], Clock clock not None, Logger logger not None, - bint build_bars_with_no_updates = True, + bint build_with_no_updates = True, + bint timestamp_on_close = True, ): super().__init__( instrument=instrument, @@ -557,9 +564,11 @@ cdef class TimeBarAggregator(BarAggregator): self._set_build_timer() self.next_close_ns = self._clock.next_time_ns(self._timer_name) self._build_on_next_tick = False + self._stored_open_ns = dt_to_unix_nanos(self.get_start_time()) self._stored_close_ns = 0 self._cached_update = None - self._build_bars_with_no_updates = build_bars_with_no_updates + self._build_with_no_updates = build_with_no_updates + self._timestamp_on_close = timestamp_on_close def __str__(self): return f"{type(self).__name__}(interval_ns={self.interval_ns}, next_close_ns={self.next_close_ns})" @@ -705,7 +714,12 @@ cdef class TimeBarAggregator(BarAggregator): cdef void _apply_update(self, Price price, Quantity size, uint64_t ts_event) except *: self._builder.update(price, size, ts_event) if self._build_on_next_tick: # (fast C-level check) - self._build_and_send(self._stored_close_ns) + ts_init = ts_event + ts_event = self._stored_close_ns + if not self._timestamp_on_close: + # Timestamp on open + ts_event = self._stored_open_ns + self._build_and_send(ts_event=ts_event, ts_init=ts_init) # Reset flag and clear stored close self._build_on_next_tick = False self._stored_close_ns = 0 @@ -717,10 +731,18 @@ cdef class TimeBarAggregator(BarAggregator): self._stored_close_ns = self.next_close_ns return - if not self._build_bars_with_no_updates and self._builder.count == 0: + if not self._build_with_no_updates and self._builder.count == 0: return # Do not build and emit bar - self._build_and_send(ts_event=event.ts_event) + cdef uint64_t ts_init = event.ts_event + cdef uint64_t ts_event = event.ts_event + if not self._timestamp_on_close: + # Timestamp on open + ts_event = self._stored_open_ns + self._build_and_send(ts_event=ts_event, ts_init=ts_init) + + # Close time becomes the next open time + self._stored_open_ns = event.ts_event # On receiving this event, timer should now have a new `next_time_ns` self.next_close_ns = self._clock.next_time_ns(self._timer_name) diff --git a/nautilus_trader/data/client.pyx b/nautilus_trader/data/client.pyx index 9438c513a794..ed3c4d1acd9f 100644 --- a/nautilus_trader/data/client.pyx +++ b/nautilus_trader/data/client.pyx @@ -94,7 +94,7 @@ cdef class DataClient(Component): cpdef void _set_connected(self, bint value=True) except *: """ - Setter for pure Python implementations to change the readonly property. + Setter for Python implementations to change the readonly property. Parameters ---------- @@ -1008,7 +1008,7 @@ cdef class MarketDataClient(DataClient): # -- PYTHON WRAPPERS ------------------------------------------------------------------------------ - # Convenient pure Python wrappers for the data handlers. Often Python methods + # Convenient Python wrappers for the data handlers. Often Python methods # involving threads or the event loop don't work with `cpdef` methods. def _handle_data_py(self, Data data): diff --git a/nautilus_trader/data/engine.pxd b/nautilus_trader/data/engine.pxd index 8f9178d3af0b..1a17414ccd2e 100644 --- a/nautilus_trader/data/engine.pxd +++ b/nautilus_trader/data/engine.pxd @@ -48,7 +48,8 @@ cdef class DataEngine(Component): cdef dict _routing_map cdef dict _order_book_intervals cdef dict _bar_aggregators - cdef bint _build_time_bars_with_no_updates + cdef bint _time_bars_build_with_no_updates + cdef bint _time_bars_timestamp_on_close cdef bint _validate_data_sequence cdef readonly bint debug diff --git a/nautilus_trader/data/engine.pyx b/nautilus_trader/data/engine.pyx index c64ed467768d..08d7837b54df 100644 --- a/nautilus_trader/data/engine.pyx +++ b/nautilus_trader/data/engine.pyx @@ -126,7 +126,8 @@ cdef class DataEngine(Component): # Settings self.debug = config.debug - self._build_time_bars_with_no_updates = config.build_time_bars_with_no_updates + self._time_bars_build_with_no_updates = config.time_bars_build_with_no_updates + self._time_bars_timestamp_on_close = config.time_bars_timestamp_on_close self._validate_data_sequence = config.validate_data_sequence # Counters @@ -1330,7 +1331,8 @@ cdef class DataEngine(Component): handler=self.process, clock=self._clock, logger=self._log.get_logger(), - build_bars_with_no_updates=self._build_time_bars_with_no_updates, + build_with_no_updates=self._time_bars_build_with_no_updates, + timestamp_on_close=self._time_bars_timestamp_on_close, ) elif bar_type.spec.aggregation == BarAggregation.TICK: aggregator = TickBarAggregator( diff --git a/nautilus_trader/examples/strategies/orderbook_imbalance.py b/nautilus_trader/examples/strategies/orderbook_imbalance.py index fe9010eb3a2e..178f5b645424 100644 --- a/nautilus_trader/examples/strategies/orderbook_imbalance.py +++ b/nautilus_trader/examples/strategies/orderbook_imbalance.py @@ -65,6 +65,7 @@ class OrderBookImbalanceConfig(StrategyConfig): trigger_imbalance_ratio: float = 0.20 book_type: str = "L2_MBP" use_quote_ticks: bool = False + subscribe_ticker: bool = False class OrderBookImbalance(Strategy): @@ -105,10 +106,12 @@ def on_start(self): if self.config.use_quote_ticks: book_type = BookType.L1_TBBO - self.subscribe_quote_ticks(instrument_id=self.instrument.id) + self.subscribe_quote_ticks(self.instrument.id) else: book_type = book_type_from_str(self.config.book_type) - self.subscribe_order_book_deltas(instrument_id=self.instrument.id, book_type=book_type) + self.subscribe_order_book_deltas(self.instrument.id, book_type) + if self.config.subscribe_ticker: + self.subscribe_ticker(self.instrument.id) self._book = OrderBook.create(instrument=self.instrument, book_type=book_type) def on_order_book_delta(self, data: OrderBookData): diff --git a/nautilus_trader/execution/client.pyx b/nautilus_trader/execution/client.pyx index 877e2a81bd4c..a427d0015e24 100644 --- a/nautilus_trader/execution/client.pyx +++ b/nautilus_trader/execution/client.pyx @@ -142,7 +142,7 @@ cdef class ExecutionClient(Component): return f"{type(self).__name__}-{self.id.value}" cpdef void _set_connected(self, bint value=True) except *: - # Setter for pure Python implementations to change the readonly property + # Setter for Python implementations to change the readonly property self.is_connected = value cpdef void _set_account_id(self, AccountId account_id) except *: diff --git a/nautilus_trader/execution/emulator.pyx b/nautilus_trader/execution/emulator.pyx index a92b08fe6fe8..67a95a77b9cd 100644 --- a/nautilus_trader/execution/emulator.pyx +++ b/nautilus_trader/execution/emulator.pyx @@ -90,7 +90,6 @@ cdef class OrderEmulator(Actor): super().__init__() self.register_base( - trader_id=trader_id, msgbus=msgbus, cache=cache, clock=clock, diff --git a/nautilus_trader/indicators/linear_regression.pyx b/nautilus_trader/indicators/linear_regression.pyx index f73f97521e39..d957f898c757 100644 --- a/nautilus_trader/indicators/linear_regression.pyx +++ b/nautilus_trader/indicators/linear_regression.pyx @@ -28,23 +28,19 @@ from nautilus_trader.model.data.bar cimport Bar cdef class LinearRegression(Indicator): """ An indicator that calculates a simple linear regression. - """ - def __init__(self, int period=0): - """ - Initialize a new instance of the ``LinearRegression`` class. + Parameters + ---------- + period : int + The period for the indicator. - Parameters - ---------- - period : int - The period for the indicator. - - Raises - ------ - ValueError - If `period` is not greater than zero. + Raises + ------ + ValueError + If `period` is not greater than zero. + """ - """ + def __init__(self, int period=0): Condition.positive_int(period, "period") super().__init__(params=[period]) diff --git a/nautilus_trader/indicators/macd.pyx b/nautilus_trader/indicators/macd.pyx index 0f873a2be9ae..1ae1e049766b 100644 --- a/nautilus_trader/indicators/macd.pyx +++ b/nautilus_trader/indicators/macd.pyx @@ -29,6 +29,26 @@ cdef class MovingAverageConvergenceDivergence(Indicator): """ An indicator which calculates the difference between two moving averages. Different moving average types can be selected for the inner calculation. + + Parameters + ---------- + fast_period : int + The period for the fast moving average (> 0). + slow_period : int + The period for the slow moving average (> 0 & > fast_sma). + ma_type : MovingAverageType + The moving average type for the calculations. + price_type : PriceType + The specified price type for extracting values from quote ticks. + + Raises + ------ + ValueError + If `fast_period` is not positive (> 0). + ValueError + If `slow_period` is not positive (> 0). + ValueError + If `fast_period` is not < `slow_period`. """ def __init__( @@ -38,30 +58,6 @@ cdef class MovingAverageConvergenceDivergence(Indicator): ma_type not None: MovingAverageType=MovingAverageType.EXPONENTIAL, PriceType price_type=PriceType.LAST, ): - """ - Initialize a new instance of the ``MovingAverageConvergenceDivergence`` class. - - Parameters - ---------- - fast_period : int - The period for the fast moving average (> 0). - slow_period : int - The period for the slow moving average (> 0 & > fast_sma). - ma_type : MovingAverageType - The moving average type for the calculations. - price_type : PriceType - The specified price type for extracting values from quote ticks. - - Raises - ------ - ValueError - If `fast_period` is not positive (> 0). - ValueError - If `slow_period` is not positive (> 0). - ValueError - If `fast_period` is not < `slow_period`. - - """ Condition.positive_int(fast_period, "fast_period") Condition.positive_int(slow_period, "slow_period") Condition.true(slow_period > fast_period, "slow_period was <= fast_period") diff --git a/nautilus_trader/indicators/obv.pyx b/nautilus_trader/indicators/obv.pyx index dc5e71a056dc..7b0276dcc9c6 100644 --- a/nautilus_trader/indicators/obv.pyx +++ b/nautilus_trader/indicators/obv.pyx @@ -24,23 +24,19 @@ cdef class OnBalanceVolume(Indicator): """ An indicator which calculates the momentum of relative positive or negative volume. - """ - def __init__(self, int period=0): - """ - Initialize a new instance of the ``OnBalanceVolume`` class. + Parameters + ---------- + period : int + The period for the indicator, zero indicates no window (>= 0). - Parameters - ---------- - period : int - The period for the indicator, zero indicates no window (>= 0). - - Raises - ------ - ValueError - If `period` is negative (< 0). + Raises + ------ + ValueError + If `period` is negative (< 0). + """ - """ + def __init__(self, int period=0): Condition.not_negative(period, "period") super().__init__(params=[period]) diff --git a/nautilus_trader/indicators/pressure.pyx b/nautilus_trader/indicators/pressure.pyx index 90150d735b30..2cb9017bf493 100644 --- a/nautilus_trader/indicators/pressure.pyx +++ b/nautilus_trader/indicators/pressure.pyx @@ -26,6 +26,22 @@ cdef class Pressure(Indicator): """ An indicator which calculates the relative volume (multiple of average volume) to move the market across a relative range (multiple of ATR). + + Parameters + ---------- + period : int + The period for the indicator (> 0). + ma_type : MovingAverageType + The moving average type for the calculations. + atr_floor : double + The ATR floor (minimum) output value for the indicator (>= 0.). + + Raises + ------ + ValueError + If `period` is not positive (> 0). + ValueError + If `atr_floor` is negative (< 0). """ def __init__( @@ -34,26 +50,6 @@ cdef class Pressure(Indicator): ma_type not None: MovingAverageType=MovingAverageType.EXPONENTIAL, double atr_floor=0, ): - """ - Initialize a new instance of the ``Pressure`` class. - - Parameters - ---------- - period : int - The period for the indicator (> 0). - ma_type : MovingAverageType - The moving average type for the calculations. - atr_floor : double - The ATR floor (minimum) output value for the indicator (>= 0.). - - Raises - ------ - ValueError - If `period` is not positive (> 0). - ValueError - If `atr_floor` is negative (< 0). - - """ Condition.positive_int(period, "period") Condition.not_negative(atr_floor, "atr_floor") diff --git a/nautilus_trader/indicators/roc.pyx b/nautilus_trader/indicators/roc.pyx index 3153ea12c9b7..7fed366d2c0a 100644 --- a/nautilus_trader/indicators/roc.pyx +++ b/nautilus_trader/indicators/roc.pyx @@ -25,25 +25,21 @@ cdef class RateOfChange(Indicator): """ An indicator which calculates the rate of change of price over a defined period. The return output can be simple or log. + + Parameters + ---------- + period : int + The period for the indicator. + use_log : bool + Use log returns for value calculation. + + Raises + ------ + ValueError + If `period` is not > 1. """ def __init__(self, int period, bint use_log=False): - """ - Initialize a new instance of the ``RateOfChange`` class. - - Parameters - ---------- - period : int - The period for the indicator. - use_log : bool - Use log returns for value calculation. - - Raises - ------ - ValueError - If `period` is not > 1. - - """ Condition.true(period > 1, "period was <= 1") super().__init__(params=[period]) diff --git a/nautilus_trader/indicators/rsi.pyx b/nautilus_trader/indicators/rsi.pyx index 8411b9833831..6e16a180142c 100644 --- a/nautilus_trader/indicators/rsi.pyx +++ b/nautilus_trader/indicators/rsi.pyx @@ -24,6 +24,18 @@ from nautilus_trader.model.data.bar cimport Bar cdef class RelativeStrengthIndex(Indicator): """ An indicator which calculates a relative strength index (RSI) across a rolling window. + + Parameters + ---------- + ma_type : int + The moving average type for average gain/loss. + period : MovingAverageType + The rolling window period for the indicator. + + Raises + ------ + ValueError + If `period` is not positive (> 0). """ def __init__( @@ -31,22 +43,6 @@ cdef class RelativeStrengthIndex(Indicator): int period, ma_type not None: MovingAverageType=MovingAverageType.EXPONENTIAL, ): - """ - Initialize a new instance of the ``RelativeStrengthIndex`` class. - - Parameters - ---------- - ma_type : int - The moving average type for average gain/loss. - period : MovingAverageType - The rolling window period for the indicator. - - Raises - ------ - ValueError - If `period` is not positive (> 0). - - """ Condition.positive_int(period, "period") super().__init__(params=[period, ma_type.name]) diff --git a/nautilus_trader/indicators/spread_analyzer.pyx b/nautilus_trader/indicators/spread_analyzer.pyx index 4afa3928b5c8..f7021038799d 100644 --- a/nautilus_trader/indicators/spread_analyzer.pyx +++ b/nautilus_trader/indicators/spread_analyzer.pyx @@ -28,25 +28,21 @@ from nautilus_trader.model.objects cimport Price cdef class SpreadAnalyzer(Indicator): """ Provides various spread analysis metrics. - """ - - def __init__(self, InstrumentId instrument_id not None, int capacity): - """ - Initialize a new instance of the ``SpreadAnalyzer`` class. - - Parameters - ---------- - instrument_id : InstrumentId - The instrument ID for the tick updates. - capacity : int - The max length for the internal `QuoteTick` deque (determines averages). - Raises - ------ - ValueError - If `capacity` is not positive (> 0). + Parameters + ---------- + instrument_id : InstrumentId + The instrument ID for the tick updates. + capacity : int + The max length for the internal `QuoteTick` deque (determines averages). + + Raises + ------ + ValueError + If `capacity` is not positive (> 0). + """ - """ + def __init__(self, InstrumentId instrument_id not None, int capacity) -> None: Condition.positive_int(capacity, "capacity") super().__init__(params=[instrument_id, capacity]) diff --git a/nautilus_trader/indicators/stochastics.pyx b/nautilus_trader/indicators/stochastics.pyx index 1de7823cc6a5..c06892013d9a 100644 --- a/nautilus_trader/indicators/stochastics.pyx +++ b/nautilus_trader/indicators/stochastics.pyx @@ -25,30 +25,26 @@ cdef class Stochastics(Indicator): An oscillator which can indicate when an asset may be over bought or over sold. + Parameters + ---------- + period_k : int + The period for the K line. + period_d : int + The period for the D line. + + Raises + ------ + ValueError + If `period_k` is not positive (> 0). + ValueError + If `period_d` is not positive (> 0). + References ---------- https://www.forextraders.com/forex-education/forex-indicators/stochastics-indicator-explained/ """ def __init__(self, int period_k, int period_d): - """ - Initialize a new instance of the ``Stochastics`` class. - - Parameters - ---------- - period_k : int - The period for the K line. - period_d : int - The period for the D line. - - Raises - ------ - ValueError - If `period_k` is not positive (> 0). - ValueError - If `period_d` is not positive (> 0). - - """ Condition.positive_int(period_k, "period_k") Condition.positive_int(period_d, "period_d") super().__init__(params=[period_k, period_d]) diff --git a/nautilus_trader/indicators/swings.pyx b/nautilus_trader/indicators/swings.pyx index de7e63f2c2e6..91040a122848 100644 --- a/nautilus_trader/indicators/swings.pyx +++ b/nautilus_trader/indicators/swings.pyx @@ -26,18 +26,14 @@ from nautilus_trader.model.data.bar cimport Bar cdef class Swings(Indicator): """ A swing indicator which calculates and stores various swing metrics. + + Parameters + ---------- + period : int + The rolling window period for the indicator (> 0). """ def __init__(self, int period): - """ - Initialize a new instance of the Swings class. - - Parameters - ---------- - period : int - The rolling window period for the indicator (> 0). - - """ Condition.positive_int(period, "period") super().__init__(params=[period]) diff --git a/nautilus_trader/infrastructure/cache.pyx b/nautilus_trader/infrastructure/cache.pyx index c71b13ec8df7..0ccba648fda6 100644 --- a/nautilus_trader/infrastructure/cache.pyx +++ b/nautilus_trader/infrastructure/cache.pyx @@ -13,7 +13,6 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -import pickle import warnings from typing import Optional diff --git a/nautilus_trader/live/__main__.py b/nautilus_trader/live/__main__.py index 4d018852ba0d..c79aa6c9b7ff 100644 --- a/nautilus_trader/live/__main__.py +++ b/nautilus_trader/live/__main__.py @@ -1,3 +1,18 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + from typing import Optional import click @@ -25,7 +40,10 @@ def main( node = TradingNode(config=config) node.build() if start: - node.start() + try: + node.run() + finally: + node.dispose() if __name__ == "__main__": diff --git a/nautilus_trader/live/data_client.py b/nautilus_trader/live/data_client.py index 3d4260de9f09..d657011d6c6b 100644 --- a/nautilus_trader/live/data_client.py +++ b/nautilus_trader/live/data_client.py @@ -445,7 +445,7 @@ def subscribe_order_book_deltas( instrument_id: InstrumentId, book_type: BookType, depth: Optional[int] = None, - kwargs: dict[str, Any] = None, + kwargs: Optional[dict[str, Any]] = None, ) -> None: self.create_task( self._subscribe_order_book_deltas( @@ -463,7 +463,7 @@ def subscribe_order_book_snapshots( instrument_id: InstrumentId, book_type: BookType, depth: Optional[int] = None, - kwargs: dict = None, + kwargs: Optional[dict[str, Any]] = None, ) -> None: self.create_task( self._subscribe_order_book_snapshots( @@ -707,7 +707,7 @@ async def _subscribe_order_book_deltas( instrument_id: InstrumentId, book_type: BookType, depth: Optional[int] = None, - kwargs: dict[str, Any] = None, + kwargs: Optional[dict[str, Any]] = None, ) -> None: raise NotImplementedError( # pragma: no cover "implement the `_subscribe_order_book_deltas` coroutine", # pragma: no cover @@ -718,7 +718,7 @@ async def _subscribe_order_book_snapshots( instrument_id: InstrumentId, book_type: BookType, depth: Optional[int] = None, - kwargs: dict[str, Any] = None, + kwargs: Optional[dict[str, Any]] = None, ) -> None: raise NotImplementedError( # pragma: no cover "implement the `_subscribe_order_book_snapshots` coroutine", # pragma: no cover diff --git a/nautilus_trader/live/factories.py b/nautilus_trader/live/factories.py index 228f83c2bfcf..cae783f7a823 100644 --- a/nautilus_trader/live/factories.py +++ b/nautilus_trader/live/factories.py @@ -17,7 +17,7 @@ from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.config import LiveDataClientConfig from nautilus_trader.config import LiveExecClientConfig from nautilus_trader.msgbus.bus import MessageBus @@ -36,7 +36,7 @@ def create( msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ): """ Return a new data client. @@ -55,7 +55,7 @@ def create( The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns @@ -79,7 +79,7 @@ def create( msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, ): """ Return a new execution client. @@ -98,7 +98,7 @@ def create( The cache for the client. clock : LiveClock The clock for the client. - logger : LiveLogger + logger : Logger The logger for the client. Returns diff --git a/nautilus_trader/live/node.py b/nautilus_trader/live/node.py index a4b9c55e92c0..5bf7e5eb024d 100644 --- a/nautilus_trader/live/node.py +++ b/nautilus_trader/live/node.py @@ -22,7 +22,7 @@ from nautilus_trader.common import Environment from nautilus_trader.common.enums import LogColor from nautilus_trader.common.enums import log_level_from_str -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.config import CacheConfig from nautilus_trader.config import CacheDatabaseConfig from nautilus_trader.config import LiveDataEngineConfig @@ -80,6 +80,7 @@ def __init__(self, config: Optional[TradingNodeConfig] = None): save_state=config.save_state, loop_sig_callback=self._loop_sig_handler, log_level=log_level_from_str(config.log_level.upper()), + log_rate_limit=config.log_rate_limit, ) self._builder = TradingNodeBuilder( @@ -204,13 +205,13 @@ def get_event_loop(self) -> Optional[asyncio.AbstractEventLoop]: """ return self.kernel.loop - def get_logger(self) -> LiveLogger: + def get_logger(self) -> Logger: """ Return the logger for the trading node. Returns ------- - LiveLogger + Logger """ return self.kernel.logger @@ -268,21 +269,15 @@ def build(self) -> None: self._builder.build_exec_clients(self._config.exec_clients) self._is_built = True - def start(self) -> None: + def run(self) -> None: """ - Start the trading node. + Start and run the trading node. """ - if not self._is_built: - raise RuntimeError( - "The trading nodes clients have not been built. " - "Run `node.build()` prior to start.", - ) - try: if self.kernel.loop.is_running(): - self.kernel.loop.create_task(self._run()) + self.kernel.loop.create_task(self.run_async()) else: - self.kernel.loop.run_until_complete(self._run()) + self.kernel.loop.run_until_complete(self.run_async()) except RuntimeError as e: self.kernel.log.exception("Error on run", e) @@ -290,16 +285,15 @@ def stop(self) -> None: """ Stop the trading node gracefully. - After a specified delay the internal `Trader` residuals will be checked. - - If save strategy is specified then strategy states will then be saved. + After a specified delay the internal `Trader` residual state will be checked. + If save strategy is configured, then strategy states will be saved. """ try: if self.kernel.loop.is_running(): - self.kernel.loop.create_task(self._stop()) + self.kernel.loop.create_task(self.stop_async()) else: - self.kernel.loop.run_until_complete(self._stop()) + self.kernel.loop.run_until_complete(self.stop_async()) except RuntimeError as e: self.kernel.log.exception("Error on stop", e) @@ -308,7 +302,6 @@ def dispose(self) -> None: # noqa C901 'TradingNode.dispose' is too complex (11 Dispose of the trading node. Gracefully shuts down the executor and event loop. - """ try: timeout = self.kernel.clock.utc_now() + timedelta( @@ -392,13 +385,21 @@ def _loop_sig_handler(self, sig) -> None: self.kernel.log.warning(f"Received {sig!s}, shutting down...") self.stop() - async def _run(self) -> None: + async def run_async(self) -> None: + """ + Start and run the trading node asynchronously. + """ try: + if not self._is_built: + raise RuntimeError( + "The trading nodes clients have not been built. " + "Run `node.build()` prior to start.", + ) + self.kernel.log.info("STARTING...") self._is_running = True # Start system - self.kernel.logger.start() self.kernel.data_engine.start() self.kernel.risk_engine.start() self.kernel.exec_engine.start() @@ -520,8 +521,14 @@ async def _await_portfolio_initialized(self) -> bool: return True # Portfolio initialized - async def _stop(self) -> None: - self._is_stopping = True + async def stop_async(self) -> None: + """ + Stop the trading node gracefully, asynchronously. + + After a specified delay the internal `Trader` residual state will be checked. + + If save strategy is configured, then strategy states will be saved. + """ self.kernel.log.info("STOPPING...") if self.kernel.trader.is_running: @@ -575,7 +582,6 @@ async def _stop(self) -> None: self.kernel.writer.flush() self.kernel.log.info("STOPPED.") - self.kernel.logger.stop() self._is_running = False async def _await_engines_disconnected(self) -> bool: diff --git a/nautilus_trader/live/node_builder.py b/nautilus_trader/live/node_builder.py index 7f9939836d72..641d1e2df5e9 100644 --- a/nautilus_trader/live/node_builder.py +++ b/nautilus_trader/live/node_builder.py @@ -17,7 +17,7 @@ from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.common.logging import LoggerAdapter from nautilus_trader.config import ImportableConfig from nautilus_trader.config import LiveDataClientConfig @@ -49,7 +49,7 @@ class TradingNodeBuilder: The cache for building clients. clock : LiveClock The clock for building clients. - logger : LiveLogger + logger : Logger The logger for building clients. log : LoggerAdapter The trading nodes logger. @@ -63,7 +63,7 @@ def __init__( msgbus: MessageBus, cache: Cache, clock: LiveClock, - logger: LiveLogger, + logger: Logger, log: LoggerAdapter, ): self._msgbus = msgbus @@ -143,7 +143,7 @@ def build_data_clients(self, config: dict[str, ImportableConfig]): Parameters ---------- - config : dict[str, object] + config : dict[str, ImportableConfig | LiveDataClientConfig] The data clients configuration. """ @@ -191,7 +191,7 @@ def build_exec_clients(self, config: dict[str, ImportableConfig]): Parameters ---------- - config : dict[str, object] + config : dict[str, ImportableConfig | LiveExecClientConfig] The execution clients configuration. """ diff --git a/nautilus_trader/live/risk_engine.pyx b/nautilus_trader/live/risk_engine.pyx index 9b85e0ab2a77..c83765505ee4 100644 --- a/nautilus_trader/live/risk_engine.pyx +++ b/nautilus_trader/live/risk_engine.pyx @@ -67,32 +67,6 @@ cdef class LiveRiskEngine(RiskEngine): Logger logger not None, config: Optional[LiveRiskEngineConfig] = None, ): - """ - Initialize a new instance of the ``LiveRiskEngine`` class. - - Parameters - ---------- - loop : asyncio.AbstractEventLoop - The event loop for the engine. - portfolio : PortfolioFacade - The portfolio for the engine. - msgbus : MessageBus - The message bus for the engine. - cache : CacheFacade - The read-only cache for the engine. - clock : Clock - The clock for the engine. - logger : Logger - The logger for the engine. - config : LiveRiskEngineConfig - The configuration for the instance. - - Raises - ------ - TypeError - If `config` is not of type `LiveRiskEngineConfig`. - - """ if config is None: config = LiveRiskEngineConfig() Condition.type(config, LiveRiskEngineConfig, "config") diff --git a/nautilus_trader/model/data/bar.pyx b/nautilus_trader/model/data/bar.pyx index 366cb16834ec..a6f8fe5bcdc6 100644 --- a/nautilus_trader/model/data/bar.pyx +++ b/nautilus_trader/model/data/bar.pyx @@ -26,7 +26,6 @@ from nautilus_trader.core.rust.model cimport bar_hash from nautilus_trader.core.rust.model cimport bar_new from nautilus_trader.core.rust.model cimport bar_new_from_raw from nautilus_trader.core.rust.model cimport bar_specification_eq -from nautilus_trader.core.rust.model cimport bar_specification_free from nautilus_trader.core.rust.model cimport bar_specification_ge from nautilus_trader.core.rust.model cimport bar_specification_gt from nautilus_trader.core.rust.model cimport bar_specification_hash @@ -109,10 +108,6 @@ cdef class BarSpecification: state[2] ) - def __del__(self) -> None: - # Never allocation heap memory - bar_specification_free(self._mem) # `self._mem` moved to Rust (then dropped) - cdef str to_str(self): return cstr_to_pystr(bar_specification_to_cstr(&self._mem)) diff --git a/nautilus_trader/model/data/tick.pxd b/nautilus_trader/model/data/tick.pxd index 4c0657de5d83..481378085403 100644 --- a/nautilus_trader/model/data/tick.pxd +++ b/nautilus_trader/model/data/tick.pxd @@ -54,6 +54,9 @@ cdef class QuoteTick(Data): @staticmethod cdef list capsule_to_quote_tick_list(object capsule) + @staticmethod + cdef object quote_tick_list_to_capsule(list items) + @staticmethod cdef QuoteTick from_dict_c(dict values) @@ -88,8 +91,17 @@ cdef class TradeTick(Data): @staticmethod cdef list capsule_to_trade_tick_list(object capsule) + @staticmethod + cdef object trade_tick_list_to_capsule(list items) + @staticmethod cdef TradeTick from_dict_c(dict values) @staticmethod cdef dict to_dict_c(TradeTick obj) + + @staticmethod + cdef TradeTick from_mem_c(TradeTick_t mem) + + @staticmethod + cdef list capsule_to_trade_tick_list(object capsule) diff --git a/nautilus_trader/model/data/tick.pyx b/nautilus_trader/model/data/tick.pyx index 8fc24587f4da..c037814e3b16 100644 --- a/nautilus_trader/model/data/tick.pyx +++ b/nautilus_trader/model/data/tick.pyx @@ -12,8 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------------------------- - +from cpython.mem cimport PyMem_Free +from cpython.mem cimport PyMem_Malloc +from cpython.pycapsule cimport PyCapsule_Destructor from cpython.pycapsule cimport PyCapsule_GetPointer +from cpython.pycapsule cimport PyCapsule_New from libc.stdint cimport int64_t from libc.stdint cimport uint8_t from libc.stdint cimport uint64_t @@ -45,6 +48,12 @@ from nautilus_trader.model.objects cimport Price from nautilus_trader.model.objects cimport Quantity +cdef void capsule_destructor(object capsule): + cdef CVec* cvec = PyCapsule_GetPointer(capsule, NULL) + PyMem_Free(cvec[0].ptr) # de-allocate buffer + PyMem_Free(cvec) # de-allocate cvec + + cdef class QuoteTick(Data): """ Represents a single quote tick in a financial market. @@ -300,10 +309,35 @@ cdef class QuoteTick(Data): return ticks + @staticmethod + cdef inline quote_tick_list_to_capsule(list items): + + # create a C struct buffer + cdef uint64_t len_ = len(items) + cdef QuoteTick_t * data = PyMem_Malloc(len_ * sizeof(QuoteTick_t)) + cdef uint64_t i + for i in range(len_): + data[i] = ( items[i])._mem + if not data: + raise MemoryError() + + # create CVec + cdef CVec * cvec = PyMem_Malloc(1 * sizeof(CVec)) + cvec.ptr = data + cvec.len = len_ + cvec.cap = len_ + + # create PyCapsule + return PyCapsule_New(cvec, NULL, capsule_destructor) + @staticmethod def list_from_capsule(capsule) -> list[QuoteTick]: return QuoteTick.capsule_to_quote_tick_list(capsule) + @staticmethod + def capsule_from_list(items): + return QuoteTick.quote_tick_list_to_capsule(items) + @staticmethod def from_raw( InstrumentId instrument_id, @@ -667,10 +701,35 @@ cdef class TradeTick(Data): return ticks + @staticmethod + cdef inline trade_tick_list_to_capsule(list items): + + # create a C struct buffer + cdef uint64_t len_ = len(items) + cdef TradeTick_t * data = PyMem_Malloc(len_ * sizeof(TradeTick_t)) + cdef uint64_t i + for i in range(len_): + data[i] = ( items[i])._mem + if not data: + raise MemoryError() + + # create CVec + cdef CVec * cvec = PyMem_Malloc(1 * sizeof(CVec)) + cvec.ptr = data + cvec.len = len_ + cvec.cap = len_ + + # create PyCapsule + return PyCapsule_New(cvec, NULL, capsule_destructor) + @staticmethod def list_from_capsule(capsule) -> list[TradeTick]: return TradeTick.capsule_to_trade_tick_list(capsule) + @staticmethod + def capsule_from_list(items): + return TradeTick.trade_tick_list_to_capsule(items) + @staticmethod cdef TradeTick from_dict_c(dict values): Condition.not_none(values, "values") diff --git a/nautilus_trader/model/instruments/base.pyx b/nautilus_trader/model/instruments/base.pyx index e880173fca58..c981ce3a7660 100644 --- a/nautilus_trader/model/instruments/base.pyx +++ b/nautilus_trader/model/instruments/base.pyx @@ -518,6 +518,7 @@ cdef class Instrument(Data): """ Condition.not_none(quantity, "quantity") + Condition.not_none(price, "price") if self.is_inverse: if inverse_as_quote: diff --git a/nautilus_trader/model/objects.pyx b/nautilus_trader/model/objects.pyx index caa9b0113801..8ba8966a6309 100644 --- a/nautilus_trader/model/objects.pyx +++ b/nautilus_trader/model/objects.pyx @@ -46,10 +46,8 @@ from nautilus_trader.core.rust.model cimport currency_eq from nautilus_trader.core.rust.model cimport money_free from nautilus_trader.core.rust.model cimport money_from_raw from nautilus_trader.core.rust.model cimport money_new -from nautilus_trader.core.rust.model cimport price_free from nautilus_trader.core.rust.model cimport price_from_raw from nautilus_trader.core.rust.model cimport price_new -from nautilus_trader.core.rust.model cimport quantity_free from nautilus_trader.core.rust.model cimport quantity_from_raw from nautilus_trader.core.rust.model cimport quantity_new from nautilus_trader.core.string cimport cstr_to_pystr @@ -121,10 +119,6 @@ cdef class Quantity: self._mem = quantity_new(value, precision) - def __del__(self) -> None: - # Never allocating heap memory - quantity_free(self._mem) # `self._mem` moved to Rust (then dropped) - def __getstate__(self): return self._mem.raw, self._mem.precision @@ -511,10 +505,6 @@ cdef class Price: self._mem = price_new(value, precision) - def __del__(self) -> None: - # Never allocating heap memory - price_free(self._mem) # `self._mem` moved to Rust (then dropped) - def __getstate__(self): return self._mem.raw, self._mem.precision diff --git a/nautilus_trader/model/orders/base.pyx b/nautilus_trader/model/orders/base.pyx index 29bb7671a1b6..5d80b4cf3cfd 100644 --- a/nautilus_trader/model/orders/base.pyx +++ b/nautilus_trader/model/orders/base.pyx @@ -807,7 +807,7 @@ cdef class Order: # Handle event (FSM can raise InvalidStateTrigger) if isinstance(event, OrderInitialized): - Condition.true(not self._events, "`OrderInitialized` should be the first order event") + pass # Do nothing else elif isinstance(event, OrderDenied): self._fsm.trigger(OrderStatus.DENIED) self._denied(event) diff --git a/nautilus_trader/model/orders/list.pyx b/nautilus_trader/model/orders/list.pyx index bf29f0ca618b..6a532f4884ca 100644 --- a/nautilus_trader/model/orders/list.pyx +++ b/nautilus_trader/model/orders/list.pyx @@ -20,7 +20,7 @@ from nautilus_trader.model.orders.base cimport Order cdef class OrderList: """ - Represents a list of bulk or related parent-child contingent orders. + Represents a list of bulk or related contingent orders. Parameters ---------- @@ -59,6 +59,9 @@ cdef class OrderList: def __hash__(self) -> int: return hash(self.id) + def __len__(self) -> int: + return len(self.orders) + def __repr__(self) -> str: return ( f"OrderList(" diff --git a/nautilus_trader/network/socket.pxd b/nautilus_trader/network/socket.pxd index bc9a4208dd76..1320b51d6757 100644 --- a/nautilus_trader/network/socket.pxd +++ b/nautilus_trader/network/socket.pxd @@ -26,10 +26,7 @@ cdef class SocketClient: cdef bytes _crlf cdef str _encoding cdef int _incomplete_read_count - cdef readonly bint is_running - cdef readonly bint is_stopped - cdef readonly int reconnection_count - cdef readonly bint is_stopping + cdef readonly int _reconnection_count # readonly for test cdef readonly object host # TODO(cs): Temporary `object` typing """The host for the socket client.\n\n:returns: `str`""" @@ -37,5 +34,9 @@ cdef class SocketClient: """The port for the socket client.\n\n:returns: `int`""" cdef readonly bint ssl """If the socket client is using SSL.\n\n:returns: `bool`""" + cdef readonly bint is_stopping + """If the client is stopping.\n\n:returns: `bool`""" + cdef readonly bint is_running + """If the client is running.\n\n:returns: `bool`""" cdef readonly bint is_connected - """If the socket is connected.\n\n:returns: `bool`""" + """If the client is connected.\n\n:returns: `bool`""" diff --git a/nautilus_trader/network/socket.pyx b/nautilus_trader/network/socket.pyx index 9ab54486861a..e9c57fafd147 100644 --- a/nautilus_trader/network/socket.pyx +++ b/nautilus_trader/network/socket.pyx @@ -82,11 +82,10 @@ cdef class SocketClient: self._crlf = crlf or b"\r\n" self._encoding = encoding - self.is_running = False self._incomplete_read_count = 0 + self._reconnection_count = 0 + self.is_stopping = False self.is_running = False - self.is_stopped = False - self.reconnection_count = 0 self.is_connected = False async def connect(self): @@ -105,18 +104,23 @@ cdef class SocketClient: await self.post_connection() self._log.debug("Starting main loop") self._loop.create_task(self.start()) - self.is_running = True self.is_connected = True self._log.info("Connected.") + async def post_connection(self): + """ + The actions to perform post-connection. i.e. sending further connection messages. + """ + await sleep0() + async def disconnect(self): self._log.info("Disconnecting .. ") - self.stop() - self._log.debug("Main loop stop triggered.") - while not self.is_stopped: - self._log.debug("Waiting for stop") - await asyncio.sleep(0.25) - self._log.debug("Stopped, closing connections") + self.is_stopping = True + self._log.debug("main loop stop triggered.") + while not self.is_running: + await sleep0() + await self.post_disconnection() + self._log.debug("main loop stopped, closing connections") self._writer.close() await self._writer.wait_closed() self._log.debug("Connections closed") @@ -125,19 +129,28 @@ cdef class SocketClient: self.is_connected = False self._log.info("Disconnected.") - def stop(self): - self.is_running = False + async def post_disconnection(self) -> None: + """ + Actions to be performed post disconnection. + + """ + # Override to implement additional disconnection related behaviour + # (canceling ping tasks etc.). + pass async def reconnect(self): self._log.info("Reconnecting") await self.disconnect() await self.connect() - async def post_connection(self): + async def post_reconnection(self) -> None: """ - The actions to perform post-connection. i.e. sending further connection messages. + Actions to be performed post reconnection. + """ - await sleep0() + # Override to implement additional reconnection related behaviour + # (resubscribing etc.). + pass async def send(self, bytes raw): self._log.debug("[SEND] " + raw.decode()) @@ -146,11 +159,12 @@ cdef class SocketClient: async def start(self): self._log.debug("Starting recv loop") + self.is_running = True cdef: bytes partial = b"" bytes raw = b"" - while self.is_running: + while not self.is_stopping: try: raw = await self._reader.readuntil(separator=self._crlf) if partial: @@ -162,14 +176,15 @@ cdef class SocketClient: await sleep0() except asyncio.IncompleteReadError as e: partial = e.partial + if self.is_stopping: + break self._log.warning(str(e)) self._incomplete_read_count += 1 - await asyncio.sleep(0.010) + await sleep0() if self._incomplete_read_count > 10: # Something probably wrong; reconnect - self._log.warning(f"Incomplete read error ({self._incomplete_read_count=}), reconnecting.. ({self.reconnection_count=})") - self.is_running = False - self.reconnection_count += 1 + self._log.warning(f"Incomplete read error ({self._incomplete_read_count=}), reconnecting.. ({self._reconnection_count=})") + self._reconnection_count += 1 self._loop.create_task(self.reconnect()) return await sleep0() @@ -177,4 +192,4 @@ cdef class SocketClient: except ConnectionResetError: self._loop.create_task(self.reconnect()) return - self.is_running = True + self.is_running = False diff --git a/nautilus_trader/persistence/base.py b/nautilus_trader/persistence/base.py deleted file mode 100644 index 99ccad28b337..000000000000 --- a/nautilus_trader/persistence/base.py +++ /dev/null @@ -1,56 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -import inspect - - -def freeze_dict(dict_like: dict): - return tuple(sorted(dict_like.items())) - - -def check_value(v): - if isinstance(v, dict): - return freeze_dict(dict_like=v) - return v - - -def resolve_kwargs(func, *args, **kwargs): - kw = inspect.getcallargs(func, *args, **kwargs) - return {k: check_value(v) for k, v in kw.items()} - - -def clear_singleton_instances(cls: type): - assert isinstance(cls, Singleton) - cls._instances = {} - - -class Singleton(type): - """ - The base class to ensure a singleton. - """ - - def __init__(cls, name, bases, dict_like): - super().__init__(name, bases, dict_like) - cls._instances = {} - - def __call__(cls, *args, **kw): - full_kwargs = resolve_kwargs(cls.__init__, None, *args, **kw) - if full_kwargs == {"self": None, "args": (), "kwargs": {}}: - full_kwargs = {} - full_kwargs.pop("self", None) - key = tuple(full_kwargs.items()) - if key not in cls._instances: - cls._instances[key] = super().__call__(*args, **kw) - return cls._instances[key] diff --git a/nautilus_trader/persistence/batching.py b/nautilus_trader/persistence/batching.py index 803140ec7250..e69de29bb2d1 100644 --- a/nautilus_trader/persistence/batching.py +++ b/nautilus_trader/persistence/batching.py @@ -1,174 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -import heapq -import itertools -import sys -from collections import namedtuple -from collections.abc import Iterator - -import fsspec -import pandas as pd -import pyarrow.dataset as ds -import pyarrow.parquet as pq -from pyarrow.lib import ArrowInvalid - -from nautilus_trader.config import BacktestDataConfig -from nautilus_trader.persistence.catalog.parquet import ParquetDataCatalog -from nautilus_trader.persistence.funcs import parse_bytes -from nautilus_trader.serialization.arrow.serializer import ParquetSerializer -from nautilus_trader.serialization.arrow.util import clean_key - - -FileMeta = namedtuple("FileMeta", "filename datatype instrument_id client_id start end") - - -def dataset_batches( - file_meta: FileMeta, - fs: fsspec.AbstractFileSystem, - n_rows: int, -) -> Iterator[pd.DataFrame]: - try: - d: ds.Dataset = ds.dataset(file_meta.filename, filesystem=fs) - except ArrowInvalid: - return - for fn in sorted(map(str, d.files)): - f = pq.ParquetFile(fs.open(fn)) - for batch in f.iter_batches(batch_size=n_rows): - if batch.num_rows == 0: - break - df = batch.to_pandas() - df = df[(df["ts_init"] >= file_meta.start) & (df["ts_init"] <= file_meta.end)] - if df.empty: - continue - if file_meta.instrument_id: - df.loc[:, "instrument_id"] = file_meta.instrument_id - yield df - - -def build_filenames( - catalog: ParquetDataCatalog, - data_configs: list[BacktestDataConfig], -) -> list[FileMeta]: - files = [] - for config in data_configs: - filename = catalog._make_path(cls=config.data_type) - if config.instrument_id: - filename += f"/instrument_id={clean_key(config.instrument_id)}" - if not catalog.fs.exists(filename): - continue - files.append( - FileMeta( - filename=filename, - datatype=config.data_type, - instrument_id=config.instrument_id, - client_id=config.client_id, - start=config.start_time_nanos, - end=config.end_time_nanos, - ), - ) - return files - - -def frame_to_nautilus(df: pd.DataFrame, cls: type): - return ParquetSerializer.deserialize(cls=cls, chunk=df.to_dict("records")) - - -def batch_files( # noqa: C901 - catalog: ParquetDataCatalog, - data_configs: list[BacktestDataConfig], - read_num_rows: int = 10000, - target_batch_size_bytes: int = parse_bytes("100mb"), # noqa: B008, -): - files = build_filenames(catalog=catalog, data_configs=data_configs) - buffer = {fn.filename: pd.DataFrame() for fn in files} - datasets = { - f.filename: dataset_batches(file_meta=f, fs=catalog.fs, n_rows=read_num_rows) for f in files - } - completed: set[str] = set() - bytes_read = 0 - values = [] - sent_count = 0 - while {f.filename for f in files} != completed: - # Fill buffer (if required) - for fn in buffer: - if len(buffer[fn]) < read_num_rows: - next_buf = next(datasets[fn], None) - if next_buf is None: - completed.add(fn) - continue - buffer[fn] = pd.concat([buffer[fn], next_buf]) - - # Determine minimum timestamp - max_ts_per_frame = {fn: df["ts_init"].max() for fn, df in buffer.items() if not df.empty} - if not max_ts_per_frame: - continue - min_ts = min(max_ts_per_frame.values()) - - # Filter buffer dataframes based on min_timestamp - batches = [] - for f in files: - df = buffer[f.filename] - if df.empty: - continue - ts_filter = df["ts_init"] <= min_ts # min of max timestamps - batch = df[ts_filter] - buffer[f.filename] = df[~ts_filter] - objs = frame_to_nautilus(df=batch, cls=f.datatype) - batches.append(objs) - bytes_read += sum([sys.getsizeof(x) for x in objs]) - - # Merge ticks - values.extend(list(heapq.merge(*batches, key=lambda x: x.ts_init))) - if bytes_read > target_batch_size_bytes: - yield values - sent_count += len(values) - bytes_read = 0 - values = [] - - if values: - yield values - sent_count += len(values) - - if sent_count == 0: - raise ValueError("No data found, check data_configs") - - -def groupby_datatype(data): - def _groupby_key(x): - return type(x).__name__ - - return [ - {"type": type(v[0]), "data": v} - for v in [ - list(v) for _, v in itertools.groupby(sorted(data, key=_groupby_key), key=_groupby_key) - ] - ] - - -def extract_generic_data_client_ids(data_configs: list[BacktestDataConfig]) -> dict: - """ - Extract a mapping of data_type : client_id from the list of `data_configs`. - In the process of merging the streaming data, we lose the `client_id` for - generic data, we need to inject this back in so the backtest engine can be - correctly loaded. - """ - data_client_ids = [ - (config.data_type, config.client_id) for config in data_configs if config.client_id - ] - assert len(set(data_client_ids)) == len( - dict(data_client_ids), - ), "data_type found with multiple client_ids" - return dict(data_client_ids) diff --git a/nautilus_trader/persistence/catalog/base.py b/nautilus_trader/persistence/catalog/base.py index 0985837be945..17dde2c9d5ac 100644 --- a/nautilus_trader/persistence/catalog/base.py +++ b/nautilus_trader/persistence/catalog/base.py @@ -27,7 +27,7 @@ from nautilus_trader.model.data.venue import InstrumentStatusUpdate from nautilus_trader.model.instruments.base import Instrument from nautilus_trader.model.orderbook.data import OrderBookData -from nautilus_trader.persistence.base import Singleton +from nautilus_trader.persistence.external.util import Singleton from nautilus_trader.serialization.arrow.util import GENERIC_DATA_PREFIX diff --git a/nautilus_trader/persistence/catalog/parquet.py b/nautilus_trader/persistence/catalog/parquet.py index d6cd9513a7e8..5a97d33d247f 100644 --- a/nautilus_trader/persistence/catalog/parquet.py +++ b/nautilus_trader/persistence/catalog/parquet.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------------------------- - +import heapq import itertools import os import pathlib import platform +import sys from pathlib import Path from typing import Callable, Optional, Union @@ -31,10 +32,10 @@ from fsspec.utils import infer_storage_options from pyarrow import ArrowInvalid +from nautilus_trader.core.datetime import dt_to_unix_nanos from nautilus_trader.core.inspect import is_nautilus_class -from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReader -from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReaderType -from nautilus_trader.core.nautilus_pyo3.persistence import ParquetType +from nautilus_trader.model.data.bar import Bar +from nautilus_trader.model.data.bar import BarSpecification from nautilus_trader.model.data.base import DataType from nautilus_trader.model.data.base import GenericData from nautilus_trader.model.data.tick import QuoteTick @@ -42,6 +43,8 @@ from nautilus_trader.model.objects import FIXED_SCALAR from nautilus_trader.persistence.catalog.base import BaseDataCatalog from nautilus_trader.persistence.external.metadata import load_mappings +from nautilus_trader.persistence.external.util import is_filename_in_time_range +from nautilus_trader.persistence.streaming.batching import generate_batches_rust from nautilus_trader.serialization.arrow.serializer import ParquetSerializer from nautilus_trader.serialization.arrow.serializer import list_schemas from nautilus_trader.serialization.arrow.util import camel_to_snake_case @@ -71,7 +74,7 @@ class ParquetDataCatalog(BaseDataCatalog): def __init__( self, path: str, - fs_protocol: str = "file", + fs_protocol: Optional[str] = "file", fs_storage_options: Optional[dict] = None, ): self.fs_protocol = fs_protocol @@ -109,7 +112,6 @@ def from_uri(cls, uri): # -- QUERIES ----------------------------------------------------------------------------------- def query(self, cls, filter_expr=None, instrument_ids=None, as_nautilus=False, **kwargs): - if not is_nautilus_class(cls): # Special handling for generic data return self.generic_data( @@ -152,11 +154,11 @@ def _query( # noqa (too complex) instrument_ids = list(set(map(clean_key, instrument_ids))) filters.append(ds.field(instrument_id_column).cast("string").isin(instrument_ids)) if start is not None: - filters.append(ds.field(ts_column) >= int(pd.Timestamp(start).to_datetime64())) + filters.append(ds.field(ts_column) >= pd.Timestamp(start).value) if end is not None: - filters.append(ds.field(ts_column) <= int(pd.Timestamp(end).to_datetime64())) + filters.append(ds.field(ts_column) <= pd.Timestamp(end).value) - full_path = self._make_path(cls=cls) + full_path = self.make_path(cls=cls) if not (self.fs.exists(full_path) or self.fs.isdir(full_path)): if raise_on_empty: @@ -164,6 +166,43 @@ def _query( # noqa (too complex) else: return pd.DataFrame() if as_dataframe else None + # Load rust objects + if isinstance(start, int) or start is None: + start_nanos = start + else: + start_nanos = dt_to_unix_nanos(start) # datetime > nanos + + if isinstance(end, int) or end is None: + end_nanos = end + else: + end_nanos = dt_to_unix_nanos(end) # datetime > nanos + + use_rust = kwargs.get("use_rust") and cls in (QuoteTick, TradeTick) + if use_rust and kwargs.get("as_nautilus"): + assert instrument_ids is not None + assert len(instrument_ids) > 0 + + to_merge = [] + for instrument_id in instrument_ids: + files = self.get_files(cls, instrument_id, start_nanos, end_nanos) + + if raise_on_empty and not files: + raise RuntimeError("No files found.") + + batches = generate_batches_rust( + files=files, + cls=cls, + batch_size=sys.maxsize, + start_nanos=start_nanos, + end_nanos=end_nanos, + ) + objs = list(itertools.chain.from_iterable(batches)) + if len(instrument_ids) == 1: + return objs # skip merge, only 1 instrument + to_merge.append(objs) + + return list(heapq.merge(*to_merge, key=lambda x: x.ts_init)) + dataset = ds.dataset(full_path, partitioning="hive", filesystem=self.fs) table_kwargs = table_kwargs or {} @@ -176,44 +215,19 @@ def _query( # noqa (too complex) except Exception as e: print(e) raise e - mappings = self.load_inverse_mappings(path=full_path) - if ( - cls in (QuoteTick, TradeTick) - and kwargs.get("use_rust") - and not kwargs.get("as_nautilus") - ): - return int_to_float_dataframe(table.to_pandas()) + if use_rust: + df = int_to_float_dataframe(table.to_pandas()) + if start_nanos and end_nanos is None: + return df + if start_nanos is None: + start_nanos = 0 + if end_nanos is None: + end_nanos = sys.maxsize + df = df[(df["ts_init"] >= start_nanos) & (df["ts_init"] <= end_nanos)] + return df - if cls in (QuoteTick, TradeTick) and kwargs.get("use_rust"): - if cls == QuoteTick: - parquet_type = ParquetType.QuoteTick - elif cls == TradeTick: - parquet_type = ParquetType.TradeTick - else: - RuntimeError() - - ticks = [] - for file in dataset.files: - with open(file, "rb") as f: - file_data = f.read() - reader = ParquetReader( - "", - 1000, - parquet_type, - ParquetReaderType.Buffer, - file_data, - ) - - if cls == QuoteTick: - data = map(QuoteTick.list_from_capsule, reader) - elif cls == TradeTick: - data = map(TradeTick.list_from_capsule, reader) - else: - RuntimeError() - ticks.extend(list(itertools.chain.from_iterable(data))) - - return ticks + mappings = self.load_inverse_mappings(path=full_path) if "as_nautilus" in kwargs: as_dataframe = not kwargs.pop("as_nautilus") @@ -225,8 +239,79 @@ def _query( # noqa (too complex) else: return self._handle_table_nautilus(table=table, cls=cls, mappings=mappings) - def _make_path(self, cls: type) -> str: - return f"{self.path}/data/{class_to_filename(cls=cls)}.parquet" + def make_path(self, cls: type, instrument_id: Optional[str] = None) -> str: + path = f"{self.path}/data/{class_to_filename(cls=cls)}.parquet" + if instrument_id is not None: + path += f"/instrument_id={clean_key(instrument_id)}" + return path + + def get_files( + self, + cls: type, + instrument_id: Optional[str] = None, + start_nanos: Optional[int] = None, + end_nanos: Optional[int] = None, + bar_spec: Optional[BarSpecification] = None, + ) -> list[str]: + if instrument_id is None: + folder = self.path + else: + folder = self.make_path(cls=cls, instrument_id=instrument_id) + + "/var/folders/fc/g4mqb35j0jvf7zpj4k76j4yw0000gn/T/tmp7cdq2cbx/data/order_book_data.parquet/instrument_id=1.166564490-237491-0.0.BETFAIR" + "/var/folders/fc/g4mqb35j0jvf7zpj4k76j4yw0000gn/T/tmp7cdq2cbx/data/order_book_data.parquet/instrument_id=1.166564490-237491-0.0.BETFAIR" + + if not self.fs.isdir(folder): + return [] + + paths = self.fs.glob(f"{folder}/**") + + file_paths = [] + for path in paths: + # Filter by BarType + bar_spec_matched = False + if cls is Bar: + bar_spec_matched = bar_spec and str(bar_spec) in path + if not bar_spec_matched: + continue + + # Filter by time range + file_path = pathlib.PurePosixPath(path).name + matched = is_filename_in_time_range(file_path, start_nanos, end_nanos) + if matched: + file_paths.append(str(path)) + + file_paths = sorted(file_paths, key=lambda x: Path(x).stem) + + return file_paths + + def _get_files( + self, + cls: type, + instrument_id: Optional[str] = None, + start_nanos: Optional[int] = None, + end_nanos: Optional[int] = None, + ) -> list[str]: + if instrument_id is None: + folder = self.path + else: + folder = self.make_path(cls=cls, instrument_id=instrument_id) + + if not os.path.exists(folder): + return [] + + paths = self.fs.glob(f"{folder}/**") + + files = [] + for path in paths: + fn = pathlib.PurePosixPath(path).name + matched = is_filename_in_time_range(fn, start_nanos, end_nanos) + if matched: + files.append(str(path)) + + files = sorted(files, key=lambda x: Path(x).stem) + + return files def load_inverse_mappings(self, path): mappings = load_mappings(fs=self.fs, path=path) diff --git a/nautilus_trader/persistence/external/core.py b/nautilus_trader/persistence/external/core.py index 6aa5fb93e653..75d6c5986856 100644 --- a/nautilus_trader/persistence/external/core.py +++ b/nautilus_trader/persistence/external/core.py @@ -14,8 +14,8 @@ # ------------------------------------------------------------------------------------------------- import logging +import os import pathlib -import re from concurrent.futures import Executor from concurrent.futures import ThreadPoolExecutor from io import BytesIO @@ -25,20 +25,25 @@ import fsspec import pandas as pd import pyarrow as pa -import pyarrow.dataset as ds -import pyarrow.parquet as pq from fsspec.core import OpenFile from pyarrow import ArrowInvalid +from pyarrow import dataset as ds +from pyarrow import parquet as pq from tqdm import tqdm from nautilus_trader.core.correctness import PyCondition +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetWriter from nautilus_trader.model.data.base import GenericData +from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.tick import TradeTick from nautilus_trader.model.instruments.base import Instrument from nautilus_trader.persistence.catalog.base import BaseDataCatalog from nautilus_trader.persistence.catalog.parquet import ParquetDataCatalog from nautilus_trader.persistence.external.metadata import load_mappings from nautilus_trader.persistence.external.metadata import write_partition_column_mappings from nautilus_trader.persistence.external.readers import Reader +from nautilus_trader.persistence.external.util import parse_filename_start +from nautilus_trader.persistence.external.util import py_type_to_parquet_type from nautilus_trader.persistence.funcs import parse_bytes from nautilus_trader.serialization.arrow.serializer import ParquetSerializer from nautilus_trader.serialization.arrow.serializer import get_cls_table @@ -52,7 +57,16 @@ class RawFile: """ - Provides a wrapper of fsspec.OpenFile that processes a raw file and writes to parquet. + Provides a wrapper of `fsspec.OpenFile` that processes a raw file and writes to parquet. + + Parameters + ---------- + open_file : fsspec.core.OpenFile + The fsspec.OpenFile source of this data. + block_size: int + The max block (chunk) size in bytes to read from the file. + progress: bool, default False + If a progress bar should be shown when processing this individual file. """ def __init__( @@ -61,19 +75,6 @@ def __init__( block_size: Optional[int] = None, progress: bool = False, ): - """ - Initialize a new instance of the ``RawFile`` class. - - Parameters - ---------- - open_file : fsspec.core.OpenFile - The fsspec.OpenFile source of this data. - block_size: int - The max block (chunk) size in bytes to read from the file. - progress: bool, default False - If a progress bar should be shown when processing this individual file. - - """ self.open_file = open_file self.block_size = block_size # TODO - waiting for tqdm support in fsspec https://github.com/intake/filesystem_spec/pulls?q=callback @@ -95,13 +96,23 @@ def iter(self): yield raw -def process_raw_file(catalog: ParquetDataCatalog, raw_file: RawFile, reader: Reader): +def process_raw_file( + catalog: ParquetDataCatalog, + raw_file: RawFile, + reader: Reader, + use_rust=False, + instrument=None, +): n_rows = 0 for block in raw_file.iter(): objs = [x for x in reader.parse(block) if x is not None] - dicts = split_and_serialize(objs) - dataframes = dicts_to_dataframes(dicts) - n_rows += write_tables(catalog=catalog, tables=dataframes) + if use_rust: + write_parquet_rust(catalog, objs, instrument) + n_rows += len(objs) + else: + dicts = split_and_serialize(objs) + dataframes = dicts_to_dataframes(dicts) + n_rows += write_tables(catalog=catalog, tables=dataframes) reader.on_file_complete() return n_rows @@ -113,9 +124,13 @@ def process_files( block_size: str = "128mb", compression: str = "infer", executor: Optional[Executor] = None, + use_rust=False, + instrument: Instrument = None, **kwargs, ): PyCondition.type_or_none(executor, Executor, "executor") + if use_rust: + assert instrument, "Instrument needs to be provided when saving rust data." executor = executor or ThreadPoolExecutor() @@ -128,7 +143,14 @@ def process_files( futures = {} for rf in raw_files: - futures[rf] = executor.submit(process_raw_file, catalog=catalog, raw_file=rf, reader=reader) + futures[rf] = executor.submit( + process_raw_file, + catalog=catalog, + raw_file=rf, + reader=reader, + instrument=instrument, + use_rust=use_rust, + ) # Show progress for _ in tqdm(list(futures.values())): @@ -260,6 +282,38 @@ def write_tables( return rows_written +def write_parquet_rust(catalog: ParquetDataCatalog, objs: list, instrument: Instrument): + cls = type(objs[0]) + + assert cls in (QuoteTick, TradeTick) + instrument_id = str(instrument.id) + + min_timestamp = str(objs[0].ts_init).rjust(19, "0") + max_timestamp = str(objs[-1].ts_init).rjust(19, "0") + + parent = catalog.make_path(cls=cls, instrument_id=instrument_id) + file_path = f"{parent}/{min_timestamp}-{max_timestamp}-0.parquet" + + metadata = { + "instrument_id": instrument_id, + "price_precision": str(instrument.price_precision), + "size_precision": str(instrument.size_precision), + } + writer = ParquetWriter(py_type_to_parquet_type(cls), metadata) + + capsule = cls.capsule_from_list(objs) + + writer.write(capsule) + + data: bytes = writer.flush_bytes() + + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, "wb") as f: + f.write(data) + + write_objects(catalog, [instrument], existing_data_behavior="overwrite_or_ignore") + + def write_parquet( fs: fsspec.AbstractFileSystem, path: str, @@ -362,30 +416,6 @@ def inner(*args, **kwargs): return inner -def _parse_file_start_by_filename(fn: str): - """ - Parse start time by filename. - - >>> _parse_file_start_by_filename('/data/test/sample.parquet/instrument_id=a/1577836800000000000-1578182400000000000-0.parquet') - '1577836800000000000' - - >>> _parse_file_start_by_filename('/data/test/sample.parquet/instrument_id=a/0648140b1fd7491a97983c0c6ece8d57.parquet') - - """ - match = re.match(r"(?P\d{19})\-\d{19}\-\d", pathlib.Path(fn).stem) - if match: - return int(match.groups()[0]) - - -def _parse_file_start(fn: str) -> Optional[tuple[str, pd.Timestamp]]: - instrument_id = re.findall(r"instrument_id\=(.*)\/", fn)[0] if "instrument_id" in fn else None - start = _parse_file_start_by_filename(fn=fn) - if start is not None: - start = pd.Timestamp(start) - return instrument_id, start - return None - - def _validate_dataset(catalog: ParquetDataCatalog, path: str, new_partition_format="%Y%m%d"): """ Repartition dataset into sorted time chunks (default dates) and drop duplicates. @@ -393,7 +423,7 @@ def _validate_dataset(catalog: ParquetDataCatalog, path: str, new_partition_form fs = catalog.fs dataset = ds.dataset(path, filesystem=fs) fn_to_start = [ - (fn, _parse_file_start(fn=fn)) for fn in dataset.files if _parse_file_start(fn=fn) + (fn, parse_filename_start(fn=fn)) for fn in dataset.files if parse_filename_start(fn=fn) ] sort_key = lambda x: (x[1][0], x[1][1].strftime(new_partition_format)) # noqa: E731 diff --git a/nautilus_trader/persistence/external/util.py b/nautilus_trader/persistence/external/util.py new file mode 100644 index 000000000000..1067f5066dcc --- /dev/null +++ b/nautilus_trader/persistence/external/util.py @@ -0,0 +1,118 @@ +import inspect +import os +import re +import sys +from typing import Optional + +import pandas as pd + +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetType +from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.tick import TradeTick + + +class Singleton(type): + """ + The base class to ensure a singleton. + """ + + def __init__(cls, name, bases, dict_like): + super().__init__(name, bases, dict_like) + cls._instances = {} + + def __call__(cls, *args, **kw): + full_kwargs = resolve_kwargs(cls.__init__, None, *args, **kw) + if full_kwargs == {"self": None, "args": (), "kwargs": {}}: + full_kwargs = {} + full_kwargs.pop("self", None) + key = tuple(full_kwargs.items()) + if key not in cls._instances: + cls._instances[key] = super().__call__(*args, **kw) + return cls._instances[key] + + +def clear_singleton_instances(cls: type): + assert isinstance(cls, Singleton) + cls._instances = {} + + +def resolve_kwargs(func, *args, **kwargs): + kw = inspect.getcallargs(func, *args, **kwargs) + return {k: check_value(v) for k, v in kw.items()} + + +def check_value(v): + if isinstance(v, dict): + return freeze_dict(dict_like=v) + return v + + +def freeze_dict(dict_like: dict): + return tuple(sorted(dict_like.items())) + + +def parse_filename(fn: str) -> tuple[Optional[int], Optional[int]]: + match = re.match(r"\d{19}-\d{19}", fn) + + if match is None: + return (None, None) + + parts = fn.split("-") + return int(parts[0]), int(parts[1]) + + +def is_filename_in_time_range(fn: str, start: Optional[int], end: Optional[int]) -> bool: + """ + Return True if a filename is within a start and end timestamp range. + """ + timestamps = parse_filename(fn) + if timestamps == (None, None): + return False # invalid filename + + if start is None and end is None: + return True + + if start is None: + start = 0 + if end is None: + end = sys.maxsize + + a, b = start, end + x, y = timestamps + + no_overlap = y < a or b < x + + return not no_overlap + + +def parse_filename_start(fn: str) -> Optional[tuple[str, pd.Timestamp]]: + """ + Parse start time by filename. + + >>> parse_filename('/data/test/sample.parquet/instrument_id=a/1577836800000000000-1578182400000000000-0.parquet') + '1577836800000000000' + + >>> parse_filename(1546383600000000000-1577826000000000000-SIM-1-HOUR-BID-EXTERNAL-0.parquet) + '1546383600000000000' + + >>> parse_filename('/data/test/sample.parquet/instrument_id=a/0648140b1fd7491a97983c0c6ece8d57.parquet') + + """ + instrument_id = re.findall(r"instrument_id\=(.*)\/", fn)[0] if "instrument_id" in fn else None + + start, _ = parse_filename(os.path.basename(fn)) + + if start is None: + return None + + start = pd.Timestamp(start) + return instrument_id, start + + +def py_type_to_parquet_type(cls: type) -> ParquetType: + if cls == QuoteTick: + return ParquetType.QuoteTick + elif cls == TradeTick: + return ParquetType.TradeTick + else: + raise RuntimeError(f"Type {cls} not supported as a `ParquetType` yet.") diff --git a/nautilus_trader/persistence/streaming/__init__.py b/nautilus_trader/persistence/streaming/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/nautilus_trader/persistence/streaming/batching.py b/nautilus_trader/persistence/streaming/batching.py new file mode 100644 index 000000000000..2440ea65bf76 --- /dev/null +++ b/nautilus_trader/persistence/streaming/batching.py @@ -0,0 +1,156 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import itertools +import sys +from collections.abc import Generator +from pathlib import Path +from typing import Optional, Union + +import fsspec +import numpy as np +import pyarrow as pa +import pyarrow.parquet as pq + +from nautilus_trader.core.data import Data +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReader +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReaderType +from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.tick import TradeTick +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.persistence.external.util import py_type_to_parquet_type +from nautilus_trader.serialization.arrow.serializer import ParquetSerializer + + +def _generate_batches_within_time_range( + batches: Generator[list[Data], None, None], + start_nanos: int = None, + end_nanos: int = None, +) -> Generator[list[Data], None, None]: + if start_nanos is None and end_nanos is None: + yield from batches + return + + if start_nanos is None: + start_nanos = 0 + + if end_nanos is None: + end_nanos = sys.maxsize + + start = start_nanos + end = end_nanos + started = False + for batch in batches: + min = batch[0].ts_init + max = batch[-1].ts_init + if min < start and max < start: + batch = [] # not started yet + + if max >= start and not started: + timestamps = np.array([x.ts_init for x in batch]) + mask = timestamps >= start + masked = list(itertools.compress(batch, mask)) + batch = masked + started = True + + if max > end: + timestamps = np.array([x.ts_init for x in batch]) + mask = timestamps <= end + masked = list(itertools.compress(batch, mask)) + batch = masked + if batch: + yield batch + return # stop iterating + + yield batch + + +def _generate_batches_rust( + files: list[str], + cls: type, + batch_size: int = 10_000, +) -> Generator[list[Union[QuoteTick, TradeTick]], None, None]: + assert cls in (QuoteTick, TradeTick) + + files = sorted(files, key=lambda x: Path(x).stem) + for file in files: + reader = ParquetReader( + file, + batch_size, + py_type_to_parquet_type(cls), + ParquetReaderType.File, + ) + for capsule in reader: + # PyCapsule > List + if cls == QuoteTick: + objs = QuoteTick.list_from_capsule(capsule) + elif cls == TradeTick: + objs = TradeTick.list_from_capsule(capsule) + + yield objs + + +def generate_batches_rust( + files: list[str], + cls: type, + batch_size: int = 10_000, + start_nanos: int = None, + end_nanos: int = None, +) -> Generator[list[Data], None, None]: + batches = _generate_batches_rust(files=files, cls=cls, batch_size=batch_size) + yield from _generate_batches_within_time_range(batches, start_nanos, end_nanos) + + +def _generate_batches( + files: list[str], + cls: type, + fs: fsspec.AbstractFileSystem, + instrument_id: Optional[InstrumentId] = None, # should be stored in metadata of parquet file? + batch_size: int = 10_000, +) -> Generator[list[Data], None, None]: + files = sorted(files, key=lambda x: Path(x).stem) + for file in files: + for batch in pq.ParquetFile(fs.open(file)).iter_batches(batch_size=batch_size): + if batch.num_rows == 0: + break + + table = pa.Table.from_batches([batch]) + + if instrument_id is not None and "instrument_id" not in batch.schema.names: + table = table.append_column( + "instrument_id", + pa.array([str(instrument_id)] * len(table), pa.string()), + ) + objs = ParquetSerializer.deserialize(cls=cls, chunk=table.to_pylist()) + yield objs + + +def generate_batches( + files: list[str], + cls: type, + fs: fsspec.AbstractFileSystem, + instrument_id: Optional[InstrumentId] = None, + batch_size: int = 10_000, + start_nanos: int = None, + end_nanos: int = None, +) -> Generator[list[Data], None, None]: + batches = _generate_batches( + files=files, + cls=cls, + instrument_id=instrument_id, + fs=fs, + batch_size=batch_size, + ) + yield from _generate_batches_within_time_range(batches, start_nanos, end_nanos) diff --git a/nautilus_trader/persistence/streaming/engine.py b/nautilus_trader/persistence/streaming/engine.py new file mode 100644 index 000000000000..d9fda852c300 --- /dev/null +++ b/nautilus_trader/persistence/streaming/engine.py @@ -0,0 +1,238 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import heapq +import itertools +import sys +from collections.abc import Generator + +import fsspec +import numpy as np + +from nautilus_trader.config import BacktestDataConfig +from nautilus_trader.core.data import Data +from nautilus_trader.model.data.bar import Bar +from nautilus_trader.model.data.bar import BarSpecification +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.persistence.funcs import parse_bytes +from nautilus_trader.persistence.streaming.batching import generate_batches +from nautilus_trader.persistence.streaming.batching import generate_batches_rust + + +class _StreamingBuffer: + def __init__(self, batches: Generator): + self._data: list = [] + self._is_complete = False + self._batches = batches + self._size = 10_000 + + @property + def is_complete(self) -> bool: + return self._is_complete and len(self) == 0 + + def remove_front(self, timestamp_ns: int) -> list: + if len(self) == 0 or timestamp_ns < self._data[0].ts_init: + return [] # nothing to remove + + timestamps = np.array([x.ts_init for x in self._data]) + mask = timestamps <= timestamp_ns + removed = list(itertools.compress(self._data, mask)) + self._data = list(itertools.compress(self._data, np.invert(mask))) + return removed + + def add_data(self) -> None: + if len(self) >= self._size: + return # buffer filled already + + objs = next(self._batches, None) + if objs is None: + self._is_complete = True + else: + self._data.extend(objs) + + @property + def max_timestamp(self) -> int: + return self._data[-1].ts_init + + def __len__(self) -> int: + return len(self._data) + + def __repr__(self): + return f"{self.__class__.__name__}({len(self)})" + + +class _BufferIterator: + """ + Streams merged batches of nautilus objects from _StreamingBuffer objects + """ + + def __init__( + self, + buffers: list[_StreamingBuffer], + target_batch_size_bytes: int = parse_bytes("100mb"), # noqa: B008, + ): + self._buffers = buffers + self._target_batch_size_bytes = target_batch_size_bytes + + def __iter__(self) -> Generator[list[Data], None, None]: + yield from self._iterate_batches_to_target_memory() + + def _iterate_batches_to_target_memory(self) -> Generator[list[Data], None, None]: + bytes_read = 0 + values = [] + + for objs in self._iterate_batches(): + values.extend(objs) + + bytes_read += sum([sys.getsizeof(x) for x in values]) + + if bytes_read > self._target_batch_size_bytes: + yield values + bytes_read = 0 + values = [] + + if values: # yield remaining values + yield values + + def _iterate_batches(self) -> Generator[list[Data], None, None]: + while True: + for buffer in self._buffers: + buffer.add_data() + + self._remove_completed() + + if len(self._buffers) == 0: + return # stop iterating + + yield self._remove_front() + + self._remove_completed() + + def _remove_front(self) -> list[Data]: + # Get the timestamp to trim at (the minimum of the maximum timestamps) + trim_timestamp = min(buffer.max_timestamp for buffer in self._buffers if len(buffer) > 0) + + # Trim front of buffers by timestamp + chunks = [] + for buffer in self._buffers: + chunk = buffer.remove_front(trim_timestamp) + if chunk == []: + continue + chunks.append(chunk) + + if not chunks: + return [] + + # Merge chunks together + objs = list(heapq.merge(*chunks, key=lambda x: x.ts_init)) + return objs + + def _remove_completed(self) -> None: + self._buffers = [b for b in self._buffers if not b.is_complete] + + +class StreamingEngine(_BufferIterator): + """ + Streams merged batches of nautilus objects from BacktestDataConfig objects + + """ + + def __init__( + self, + data_configs: list[BacktestDataConfig], + target_batch_size_bytes: int = parse_bytes("100mb"), # noqa: B008, + ): + # Sort configs (larger time_aggregated bar specifications first) + # Define the order of objects with the same timestamp. + # Larger bar aggregations first. H4 > H1 + def _sort_larger_specifications_first(config) -> tuple[int, int]: + if config.bar_spec is None: + return sys.maxsize, sys.maxsize # last + else: + spec = BarSpecification.from_str(config.bar_spec) + return spec.aggregation * -1, spec.step * -1 + + self._configs = sorted(data_configs, key=_sort_larger_specifications_first) + + buffers = list(map(self._config_to_buffer, data_configs)) + + super().__init__( + buffers=buffers, + target_batch_size_bytes=target_batch_size_bytes, + ) + + @staticmethod + def _config_to_buffer(config: BacktestDataConfig) -> _StreamingBuffer: + if config.data_type is Bar: + assert config.bar_spec + + files = config.catalog().get_files( + cls=config.data_type, + instrument_id=config.instrument_id, + start_nanos=config.start_time_nanos, + end_nanos=config.end_time_nanos, + bar_spec=BarSpecification.from_str(config.bar_spec) if config.bar_spec else None, + ) + assert files, f"No files found for {config}" + if config.use_rust: + batches = generate_batches_rust( + files=files, + cls=config.data_type, + batch_size=config.batch_size, + start_nanos=config.start_time_nanos, + end_nanos=config.end_time_nanos, + ) + else: + batches = generate_batches( + files=files, + cls=config.data_type, + instrument_id=InstrumentId.from_str(config.instrument_id) + if config.instrument_id + else None, + fs=fsspec.filesystem(config.catalog_fs_protocol or "file"), + batch_size=config.batch_size, + start_nanos=config.start_time_nanos, + end_nanos=config.end_time_nanos, + ) + + return _StreamingBuffer(batches=batches) + + +def extract_generic_data_client_ids(data_configs: list["BacktestDataConfig"]) -> dict: + """ + Extract a mapping of data_type : client_id from the list of `data_configs`. + In the process of merging the streaming data, we lose the `client_id` for + generic data, we need to inject this back in so the backtest engine can be + correctly loaded. + """ + data_client_ids = [ + (config.data_type, config.client_id) for config in data_configs if config.client_id + ] + assert len(set(data_client_ids)) == len( + dict(data_client_ids), + ), "data_type found with multiple client_ids" + return dict(data_client_ids) + + +def groupby_datatype(data): + def _groupby_key(x): + return type(x).__name__ + + return [ + {"type": type(v[0]), "data": v} + for v in [ + list(v) for _, v in itertools.groupby(sorted(data, key=_groupby_key), key=_groupby_key) + ] + ] diff --git a/nautilus_trader/persistence/streaming.py b/nautilus_trader/persistence/streaming/writer.py similarity index 89% rename from nautilus_trader/persistence/streaming.py rename to nautilus_trader/persistence/streaming/writer.py index 60af777fac13..c67c263c6110 100644 --- a/nautilus_trader/persistence/streaming.py +++ b/nautilus_trader/persistence/streaming/writer.py @@ -1,18 +1,3 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - import datetime from typing import BinaryIO, Optional diff --git a/nautilus_trader/risk/engine.pyx b/nautilus_trader/risk/engine.pyx index dcc879deaa0d..b0351b8a7fcf 100644 --- a/nautilus_trader/risk/engine.pyx +++ b/nautilus_trader/risk/engine.pyx @@ -721,7 +721,7 @@ cdef class RiskEngine(Component): Money cum_notional_sell = None double xrate for order in orders: - if order.order_type == OrderType.MARKET: + if order.order_type == OrderType.MARKET or order.order_type == OrderType.MARKET_TO_LIMIT: if last_px is None: # Determine entry price last_quote = self._cache.quote_tick(instrument.id) diff --git a/nautilus_trader/system/kernel.py b/nautilus_trader/system/kernel.py index b4181c87534b..4601b88eb968 100644 --- a/nautilus_trader/system/kernel.py +++ b/nautilus_trader/system/kernel.py @@ -31,7 +31,6 @@ from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.clock import TestClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger from nautilus_trader.common.logging import Logger from nautilus_trader.common.logging import LoggerAdapter from nautilus_trader.common.logging import nautilus_header @@ -61,7 +60,7 @@ from nautilus_trader.live.risk_engine import LiveRiskEngine from nautilus_trader.model.identifiers import TraderId from nautilus_trader.msgbus.bus import MessageBus -from nautilus_trader.persistence.streaming import StreamingFeatherWriter +from nautilus_trader.persistence.streaming.writer import StreamingFeatherWriter from nautilus_trader.portfolio.base import PortfolioFacade from nautilus_trader.portfolio.portfolio import Portfolio from nautilus_trader.risk.engine import RiskEngine @@ -122,6 +121,8 @@ class NautilusKernel: If strategy state should be saved on stop. log_level : LogLevel, default LogLevel.INFO The log level for the kernels logger. + log_rate_limit : int, default 100_000 + The maximum messages per second which can be flushed to stdout or stderr. bypass_logging : bool, default False If logging to stdout should be bypassed. @@ -156,6 +157,7 @@ def __init__( # noqa (too complex) load_state: bool = False, save_state: bool = False, log_level: LogLevel = LogLevel.INFO, + log_rate_limit: int = 100_000, bypass_logging: bool = False, ): PyCondition.not_none(environment, "environment") @@ -174,21 +176,14 @@ def __init__( # noqa (too complex) PyCondition.valid_string(name, "name") PyCondition.type(cache_config, CacheConfig, "cache_config") PyCondition.type(cache_database_config, CacheDatabaseConfig, "cache_database_config") - PyCondition.true( - isinstance(data_config, (DataEngineConfig, LiveDataEngineConfig)), - "data_config was unrecognized type", - ex_type=TypeError, - ) - PyCondition.true( - isinstance(risk_config, (RiskEngineConfig, LiveRiskEngineConfig)), - "risk_config was unrecognized type", - ex_type=TypeError, - ) - PyCondition.true( - isinstance(exec_config, (ExecEngineConfig, LiveExecEngineConfig)), - "exec_config was unrecognized type", - ex_type=TypeError, - ) + if environment == Environment.BACKTEST: + PyCondition.type(data_config, DataEngineConfig, "data_config") + PyCondition.type(risk_config, RiskEngineConfig, "risk_config") + PyCondition.type(exec_config, ExecEngineConfig, "exec_config") + else: + PyCondition.type(data_config, LiveDataEngineConfig, "data_config") + PyCondition.type(risk_config, LiveRiskEngineConfig, "risk_config") + PyCondition.type(exec_config, LiveExecEngineConfig, "exec_config") PyCondition.type_or_none(streaming_config, StreamingConfig, "streaming_config") self._environment = environment @@ -205,29 +200,24 @@ def __init__( # noqa (too complex) # Components if self._environment == Environment.BACKTEST: self._clock = TestClock() - self._logger = Logger( - clock=LiveClock(loop=loop), - trader_id=self._trader_id, - machine_id=self._machine_id, - instance_id=self._instance_id, - level_stdout=log_level, - bypass=bypass_logging, - ) elif self.environment in (Environment.SANDBOX, Environment.LIVE): self._clock = LiveClock(loop=loop) - self._logger = LiveLogger( - loop=loop, - clock=self._clock, - trader_id=self._trader_id, - machine_id=self._machine_id, - instance_id=self._instance_id, - level_stdout=log_level, - ) + bypass_logging = False # Safety measure so live logging is visible else: raise NotImplementedError( # pragma: no cover (design-time error) f"environment {environment} not recognized", # pragma: no cover (design-time error) ) + self._logger = Logger( + clock=self._clock, + trader_id=self._trader_id, + machine_id=self._machine_id, + instance_id=self._instance_id, + level_stdout=log_level, + rate_limit=log_rate_limit, + bypass=bypass_logging, + ) + # Setup logging self._log = LoggerAdapter( component_name=name, @@ -239,7 +229,7 @@ def __init__( # noqa (too complex) # Setup loop (if live) if environment == Environment.LIVE: - self._loop: asyncio.AbstractEventLoop = loop or asyncio.get_event_loop() + self._loop: Optional[asyncio.AbstractEventLoop] = loop or asyncio.get_event_loop() if loop is not None: self._executor = concurrent.futures.ThreadPoolExecutor() self._loop.set_default_executor(self.executor) diff --git a/nautilus_trader/test_kit/mocks/data.py b/nautilus_trader/test_kit/mocks/data.py index 98f648945de8..64bdb1e7c299 100644 --- a/nautilus_trader/test_kit/mocks/data.py +++ b/nautilus_trader/test_kit/mocks/data.py @@ -27,11 +27,11 @@ from nautilus_trader.model.identifiers import Venue from nautilus_trader.model.objects import Price from nautilus_trader.model.objects import Quantity -from nautilus_trader.persistence.base import clear_singleton_instances from nautilus_trader.persistence.catalog.parquet import ParquetDataCatalog from nautilus_trader.persistence.external.core import process_files from nautilus_trader.persistence.external.readers import CSVReader from nautilus_trader.persistence.external.readers import Reader +from nautilus_trader.persistence.external.util import clear_singleton_instances from nautilus_trader.trading.filters import NewsEvent diff --git a/nautilus_trader/test_kit/stubs/component.py b/nautilus_trader/test_kit/stubs/component.py index 83e94de82c83..636335b24c0b 100644 --- a/nautilus_trader/test_kit/stubs/component.py +++ b/nautilus_trader/test_kit/stubs/component.py @@ -24,7 +24,7 @@ from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import log_level_from_str from nautilus_trader.common.factories import OrderFactory -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.core.data import Data from nautilus_trader.model.currencies import USD from nautilus_trader.model.currency import Currency @@ -51,9 +51,8 @@ def clock() -> LiveClock: return LiveClock() @staticmethod - def logger(level="INFO") -> LiveLogger: - return LiveLogger( - loop=asyncio.get_event_loop(), + def logger(level="INFO") -> Logger: + return Logger( clock=TestComponentStubs.clock(), level_stdout=log_level_from_str(level), ) diff --git a/nautilus_trader/trading/strategy.pyx b/nautilus_trader/trading/strategy.pyx index 720569f4ad00..cf816ecc226a 100644 --- a/nautilus_trader/trading/strategy.pyx +++ b/nautilus_trader/trading/strategy.pyx @@ -271,7 +271,6 @@ cdef class Strategy(Actor): Condition.not_none(logger, "logger") self.register_base( - trader_id=trader_id, msgbus=msgbus, cache=cache, clock=clock, @@ -300,7 +299,7 @@ cdef class Strategy(Actor): cdef int order_id_count = len(client_order_ids) cdef int order_list_id_count = len(order_list_ids) - self.order_factory.set_order_id_count(order_id_count) + self.order_factory.set_client_order_id_count(order_id_count) self.order_factory.set_order_list_id_count(order_list_id_count) self.log.info(f"Set ClientOrderIdGenerator client_order_id count to {order_id_count}.") self.log.info(f"Set ClientOrderIdGenerator order_list_id count to {order_list_id_count}.") diff --git a/nautilus_trader/trading/trader.pyx b/nautilus_trader/trading/trader.pyx index 7ce04bc334ab..8d23b06f7467 100644 --- a/nautilus_trader/trading/trader.pyx +++ b/nautilus_trader/trading/trader.pyx @@ -363,7 +363,6 @@ cdef class Trader(Component): # Wire component into trader actor.register_base( - trader_id=self.id, msgbus=self._msgbus, cache=self._cache, clock=clock, # Clock per component diff --git a/poetry.lock b/poetry.lock index c30120f393ce..0b2b7a7fb69c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -17,106 +17,106 @@ pycares = ">=4.0.0" [[package]] name = "aiohttp" -version = "3.8.3" +version = "3.8.4" description = "Async http client/server framework (asyncio)" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b"}, - {file = "aiohttp-3.8.3-cp310-cp310-win32.whl", hash = "sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb"}, - {file = "aiohttp-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34"}, - {file = "aiohttp-3.8.3-cp311-cp311-win32.whl", hash = "sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697"}, - {file = "aiohttp-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290"}, - {file = "aiohttp-3.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win32.whl", hash = "sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494"}, - {file = "aiohttp-3.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win32.whl", hash = "sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937"}, - {file = "aiohttp-3.8.3-cp38-cp38-win32.whl", hash = "sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76"}, - {file = "aiohttp-3.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403"}, - {file = "aiohttp-3.8.3-cp39-cp39-win32.whl", hash = "sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618"}, - {file = "aiohttp-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7"}, - {file = "aiohttp-3.8.3.tar.gz", hash = "sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, + {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, + {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, + {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, + {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, + {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, + {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, + {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, + {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, + {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, + {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, + {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, ] [package.dependencies] aiosignal = ">=1.1.2" async-timeout = ">=4.0.0a3,<5.0" attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<3.0" +charset-normalizer = ">=2.0,<4.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" @@ -199,14 +199,14 @@ pytz = ">=2015.7" [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.11.2" description = "Screen-scraping library" category = "dev" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, + {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, + {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, ] [package.dependencies] @@ -234,32 +234,46 @@ msgspec = ">=0.11" [[package]] name = "black" -version = "22.12.0" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -371,19 +385,102 @@ files = [ [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.6.0" +python-versions = "*" files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, ] -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "click" version = "8.1.3" @@ -480,47 +577,49 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "39.0.0" +version = "39.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-39.0.0-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52a1a6f81e738d07f43dab57831c29e57d21c81a942f4602fac7ee21b27f288"}, - {file = "cryptography-39.0.0-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:80ee674c08aaef194bc4627b7f2956e5ba7ef29c3cc3ca488cf15854838a8f72"}, - {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:887cbc1ea60786e534b00ba8b04d1095f4272d380ebd5f7a7eb4cc274710fad9"}, - {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f97109336df5c178ee7c9c711b264c502b905c2d2a29ace99ed761533a3460f"}, - {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a6915075c6d3a5e1215eab5d99bcec0da26036ff2102a1038401d6ef5bef25b"}, - {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:76c24dd4fd196a80f9f2f5405a778a8ca132f16b10af113474005635fe7e066c"}, - {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bae6c7f4a36a25291b619ad064a30a07110a805d08dc89984f4f441f6c1f3f96"}, - {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:875aea1039d78557c7c6b4db2fe0e9d2413439f4676310a5f269dd342ca7a717"}, - {file = "cryptography-39.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f6c0db08d81ead9576c4d94bbb27aed8d7a430fa27890f39084c2d0e2ec6b0df"}, - {file = "cryptography-39.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f3ed2d864a2fa1666e749fe52fb8e23d8e06b8012e8bd8147c73797c506e86f1"}, - {file = "cryptography-39.0.0-cp36-abi3-win32.whl", hash = "sha256:f671c1bb0d6088e94d61d80c606d65baacc0d374e67bf895148883461cd848de"}, - {file = "cryptography-39.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:e324de6972b151f99dc078defe8fb1b0a82c6498e37bff335f5bc6b1e3ab5a1e"}, - {file = "cryptography-39.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:754978da4d0457e7ca176f58c57b1f9de6556591c19b25b8bcce3c77d314f5eb"}, - {file = "cryptography-39.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ee1fd0de9851ff32dbbb9362a4d833b579b4a6cc96883e8e6d2ff2a6bc7104f"}, - {file = "cryptography-39.0.0-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:fec8b932f51ae245121c4671b4bbc030880f363354b2f0e0bd1366017d891458"}, - {file = "cryptography-39.0.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:407cec680e811b4fc829de966f88a7c62a596faa250fc1a4b520a0355b9bc190"}, - {file = "cryptography-39.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7dacfdeee048814563eaaec7c4743c8aea529fe3dd53127313a792f0dadc1773"}, - {file = "cryptography-39.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad04f413436b0781f20c52a661660f1e23bcd89a0e9bb1d6d20822d048cf2856"}, - {file = "cryptography-39.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50386acb40fbabbceeb2986332f0287f50f29ccf1497bae31cf5c3e7b4f4b34f"}, - {file = "cryptography-39.0.0-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e5d71c5d5bd5b5c3eebcf7c5c2bb332d62ec68921a8c593bea8c394911a005ce"}, - {file = "cryptography-39.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:844ad4d7c3850081dffba91cdd91950038ee4ac525c575509a42d3fc806b83c8"}, - {file = "cryptography-39.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e0a05aee6a82d944f9b4edd6a001178787d1546ec7c6223ee9a848a7ade92e39"}, - {file = "cryptography-39.0.0.tar.gz", hash = "sha256:f964c7dcf7802d133e8dbd1565914fa0194f9d683d82411989889ecd701e8adf"}, + {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"}, + {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"}, + {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"}, + {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"}, + {file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"}, + {file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"}, + {file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"}, ] [package.dependencies] cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "ruff"] +pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test-randomorder = ["pytest-randomly"] +tox = ["tox"] [[package]] name = "css-html-js-minify" @@ -707,29 +806,28 @@ pyflakes = ">=3.0.0,<3.1.0" [[package]] name = "frozendict" -version = "2.3.4" +version = "2.3.5" description = "A simple immutable dictionary" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "frozendict-2.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc"}, - {file = "frozendict-2.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782"}, - {file = "frozendict-2.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:b98a0d65a59af6da03f794f90b0c3085a7ee14e7bf8f0ef36b079ee8aa992439"}, - {file = "frozendict-2.3.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d8042b7dab5e992e30889c9b71b781d5feef19b372d47d735e4d7d45846fd4a"}, - {file = "frozendict-2.3.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a6d2e8b7cf6b6e5677a1a4b53b4073e5d9ec640d1db30dc679627668d25e90"}, - {file = "frozendict-2.3.4-cp36-cp36m-win_amd64.whl", hash = "sha256:dbbe1339ac2646523e0bb00d1896085d1f70de23780e4927ca82b36ab8a044d3"}, - {file = "frozendict-2.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95bac22f7f09d81f378f2b3f672b7a50a974ca180feae1507f5e21bc147e8bc8"}, - {file = "frozendict-2.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae686722c144b333c4dbdc16323a5de11406d26b76d2be1cc175f90afacb5ba"}, - {file = "frozendict-2.3.4-cp37-cp37m-win_amd64.whl", hash = "sha256:389f395a74eb16992217ac1521e689c1dea2d70113bcb18714669ace1ed623b9"}, - {file = "frozendict-2.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ccb6450a416c9cc9acef7683e637e28356e3ceeabf83521f74cc2718883076b7"}, - {file = "frozendict-2.3.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aca59108b77cadc13ba7dfea7e8f50811208c7652a13dc6c7f92d7782a24d299"}, - {file = "frozendict-2.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:3ec86ebf143dd685184215c27ec416c36e0ba1b80d81b1b9482f7d380c049b4e"}, - {file = "frozendict-2.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5809e6ff6b7257043a486f7a3b73a7da71cf69a38980b4171e4741291d0d9eb3"}, - {file = "frozendict-2.3.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c550ed7fdf1962984bec21630c584d722b3ee5d5f57a0ae2527a0121dc0414a"}, - {file = "frozendict-2.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:3e93aebc6e69a8ef329bbe9afb8342bd33c7b5c7a0c480cb9f7e60b0cbe48072"}, - {file = "frozendict-2.3.4-py3-none-any.whl", hash = "sha256:d722f3d89db6ae35ef35ecc243c40c800eb344848c83dba4798353312cd37b15"}, - {file = "frozendict-2.3.4.tar.gz", hash = "sha256:15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78"}, + {file = "frozendict-2.3.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fa08c3f361e26c698c22f008804cac4a5b51437c12feafb983daadac12f66ead"}, + {file = "frozendict-2.3.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b8cbed40c96fce53e5a31ff2db30ca2c56992ba033555b08c22d099c3576ec"}, + {file = "frozendict-2.3.5-cp310-cp310-win_amd64.whl", hash = "sha256:64a00bcad55ff122293b0d362856dce0b248e894f1dcb0a0f68227a5ba9e4be6"}, + {file = "frozendict-2.3.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:08f8efd6fbe885e6217d210302cdc12cb8134aeac2b83db898511bc5e34719c5"}, + {file = "frozendict-2.3.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a2c371d23f148886864a5b82f1e5eefed35ce145b5d59dcfd3d66c9391bb45"}, + {file = "frozendict-2.3.5-cp36-cp36m-win_amd64.whl", hash = "sha256:de96ccf6e574482c9537ffa68b2cb381537a5a085483001d4a2b93847089bc04"}, + {file = "frozendict-2.3.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1dbe11318b423fb3591e08d8b832d27dfd7b74dc20486d3384b8e05d6de2bcf7"}, + {file = "frozendict-2.3.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30af9f39a5e29edca96b09c8d0a17fc78a0efd5f31f74d5eebb4c9a28d03032f"}, + {file = "frozendict-2.3.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d1677e53d370ba44a07fbcc036fa24d4ae5693f0ed785496caf49e12a238d41f"}, + {file = "frozendict-2.3.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1411ef255b7a55fc337022ba158acf1391cd0d9a5c13142abbb7367936ab6f78"}, + {file = "frozendict-2.3.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4a1c8febc23f3c81c2b94d70268b5b760ed7e5e81c90c3baa22bf144db3d2f9"}, + {file = "frozendict-2.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:210a59a5267ae79b5d92cd50310cd5bcb122f1783a3d9016ad6db9cc179d4fbe"}, + {file = "frozendict-2.3.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21dd627c5bdcdf0743d49f7667dd186234baa85db91517de8cb80d3bda7018d9"}, + {file = "frozendict-2.3.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d58ca5f9094725c2f44b09fe4e71f7ddd250d5cdaca7219c674bd691373fed3a"}, + {file = "frozendict-2.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:f407d9d661d77896b7a6dae6ab7545c913e65d23a312cf2893406432069408db"}, + {file = "frozendict-2.3.5.tar.gz", hash = "sha256:65d7e3995c9174b77d7d80514d7062381750491e112bbeb44323368baa3e636a"}, ] [[package]] @@ -853,101 +951,101 @@ tqdm = ["tqdm"] [[package]] name = "hiredis" -version = "2.1.1" +version = "2.2.2" description = "Python wrapper for hiredis" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "hiredis-2.1.1-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:f15e48545dadf3760220821d2f3c850e0c67bbc66aad2776c9d716e6216b5103"}, - {file = "hiredis-2.1.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b3a437e3af246dd06d116f1615cdf4e620e639dfcc923fe3045e00f6a967fc27"}, - {file = "hiredis-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b61732d75e2222a3b0060b97395df78693d5c3487fe4a5d0b75f6ac1affc68b9"}, - {file = "hiredis-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:170c2080966721b42c5a8726e91c5fc271300a4ac9ddf8a5b79856cfd47553e1"}, - {file = "hiredis-2.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2d6e4caaffaf42faf14cfdf20b1d6fff6b557137b44e9569ea6f1877e6f375d"}, - {file = "hiredis-2.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d64b2d90302f0dd9e9ba43e89f8640f35b6d5968668da82ba2d2652b2cc3c3d2"}, - {file = "hiredis-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61fd1c55efb48ba734628f096e7a50baf0df3f18e91183face5c07fba3b4beb7"}, - {file = "hiredis-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfc5e923828714f314737e7f856b3dccf8805e5679fe23f07241b397cd785f6c"}, - {file = "hiredis-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ef2aa0485735c8608a92964e52ab9025ceb6003776184a1eb5d1701742cc910b"}, - {file = "hiredis-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2d39193900a03b900a25d474b9f787434f05a282b402f063d4ca02c62d61bdb9"}, - {file = "hiredis-2.1.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:4b51f5eb47e61c6b82cb044a1815903a77a4f840fa050fd2ff40d617c102d16c"}, - {file = "hiredis-2.1.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d9145d011b74bef972b485a09f391babaa101626dbb54afc2313d5682a746593"}, - {file = "hiredis-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6f45509b43d720d64837c1211fcdea42acd48e71539b7152d74c16413ceea080"}, - {file = "hiredis-2.1.1-cp310-cp310-win32.whl", hash = "sha256:3a284bbf6503cd6ac1183b3542fe853a8be47fb52a631224f6dda46ba229d572"}, - {file = "hiredis-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:f60fad285db733b2badba43f7036a1241cb3e19c17260348f3ff702e6eaa4980"}, - {file = "hiredis-2.1.1-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:69c20816ac2af11701caf10e5b027fd33c6e8dfe7806ab71bc5191aa2a6d50f9"}, - {file = "hiredis-2.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cd43dbaa73322a0c125122114cbc2c37141353b971751d05798f3b9780091e90"}, - {file = "hiredis-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c9632cd480fbc09c14622038a9a5f2f21ef6ce35892e9fa4df8d3308d3f2cedf"}, - {file = "hiredis-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252d4a254f1566012b94e35cba577a001d3a732fa91e824d2076233222232cf9"}, - {file = "hiredis-2.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b901e68f3a6da279388e5dbe8d3bc562dd6dd3ff8a4b90e4f62e94de36461777"}, - {file = "hiredis-2.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f45f296998043345ecfc4f69a51fa4f3e80ca3659864df80b459095580968a6"}, - {file = "hiredis-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f2acf237428dd61faa5b49247999ff68f45b3552c57303fcfabd2002eab249"}, - {file = "hiredis-2.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82bc6f5b92c9fcd5b5d6506000dd433006b126b193932c52a9bcc10dcc10e4fc"}, - {file = "hiredis-2.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19843e4505069085301c3126c91b4e48970070fb242d7c617fb6777e83b55541"}, - {file = "hiredis-2.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7336fddae533cbe786360d7a0316c71fe96313872c06cde20a969765202ab04"}, - {file = "hiredis-2.1.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:90b4355779970e121c219def3e35533ec2b24773a26fc4aa0f8271dd262fa2f2"}, - {file = "hiredis-2.1.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4beaac5047317a73b27cf15b4f4e0d2abaafa8378e1a6ed4cf9ff420d8f88aba"}, - {file = "hiredis-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7e25dc06e02689a45a49fa5e2f48bdfdbc11c5b52bef792a8cb37e0b82a7b0ae"}, - {file = "hiredis-2.1.1-cp311-cp311-win32.whl", hash = "sha256:f8b3233c1de155743ef34b0cae494e33befed5e0adba77762f5d8a8e417c5015"}, - {file = "hiredis-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:4ced076af04e28761d486501c58259247c1882fd19c7f94c18a257d143248eee"}, - {file = "hiredis-2.1.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f4300e063045e11ee79b79a7c9426813ab8d97e340b15843374093225dde407d"}, - {file = "hiredis-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04b6c04fe13e1e30ba6f9340d3d0fb776a7e52611d11809fb59341871e050e5"}, - {file = "hiredis-2.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436dcbbe3104737e8b4e2d63a019a764d107d72d6b6ee3cd107097c1c263fd1e"}, - {file = "hiredis-2.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11801d9e96f39286ab558c6db940c39fc00150450ae1007d18b35437d2f79ad7"}, - {file = "hiredis-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7d8d0ca7b4f6136f8a29845d31cfbc3f562cbe71f26da6fca55aa4977e45a18"}, - {file = "hiredis-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c040af9eb9b12602b4b714b90a1c2ac1109e939498d47b0748ec33e7a948747"}, - {file = "hiredis-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f448146b86a8693dda5f02bb4cb2ef65c894db2cf743e7bf351978354ce685e3"}, - {file = "hiredis-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:649c5a1f0952af50f008f0bbec5f0b1e519150220c0a71ef80541a0c128d0c13"}, - {file = "hiredis-2.1.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b8e7415b0952b0dd6df3aa2d37b5191c85e54d6a0ac1449ddb1e9039bbb39fa5"}, - {file = "hiredis-2.1.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:38c1a56a30b953e3543662f950f498cfb17afed214b27f4fc497728fb623e0c9"}, - {file = "hiredis-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6050b519fb3b62d68a28a1941ae9dc5122e8820fef2b8e20a65cb3c1577332a0"}, - {file = "hiredis-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:96add2a205efffe5e19a256a50be0ed78fcb5e9503242c65f57928e95cf4c901"}, - {file = "hiredis-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8ceb101095f8cce9ac672ed7244b002d83ea97af7f27bb73f2fbe7fe8e8f03c7"}, - {file = "hiredis-2.1.1-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:9f068136e5119f2ba939ecd45c47b4e3cf6dd7ca9a65b6078c838029c5c1f564"}, - {file = "hiredis-2.1.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8a42e246a03086ae1430f789e37d7192113db347417932745c4700d8999f853a"}, - {file = "hiredis-2.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5359811bfdb10fca234cba4629e555a1cde6c8136025395421f486ce43129ae3"}, - {file = "hiredis-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d304746e2163d3d2cbc4c08925539e00d2bb3edc9e79fce531b5468d4e264d15"}, - {file = "hiredis-2.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fe297a52a8fc1204eef646bebf616263509d089d472e25742913924b1449099"}, - {file = "hiredis-2.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637e563d5cbf79d8b04224f99cfce8001146647e7ce198f0b032e32e62079e3c"}, - {file = "hiredis-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b61340ff2dcd99d5ded0ca5fc33c878d89a1426e2f7b6dbc7c7381e330bc8a"}, - {file = "hiredis-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66eaf6d5ea5207177ba8ffb9ee479eea743292267caf1d6b89b51cf9d5885d23"}, - {file = "hiredis-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4d2d0e458c32cdafd9a0f0b0aaeb61b169583d074287721eee740b730b7654bd"}, - {file = "hiredis-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8a92781e466f2f1f9d38720d8920cb094bc0d59f88219591bc12b1c12c9d471c"}, - {file = "hiredis-2.1.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:5560b09304ebaac5323a7402f5090f2a8559843200014f5adf1ff7517dd3805b"}, - {file = "hiredis-2.1.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4732a0bf877bbd69d4d1b38a3db2160252acb31894a48f324fd54f742f6b2123"}, - {file = "hiredis-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b5bd33ac8a572e2aa94b489dec35b0c00ca554b27e56ad19953e0bf2cbcf3ad8"}, - {file = "hiredis-2.1.1-cp38-cp38-win32.whl", hash = "sha256:07e86649773e486a21e170d1396217e15833776d9e8f4a7121c28a1d37e032c9"}, - {file = "hiredis-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:b964d81db8f11a99552621acd24c97381a0fd401a57187ce9f8cb9a53f4b6f4e"}, - {file = "hiredis-2.1.1-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:27e89e7befc785a273cccb105840db54b7f93005adf4e68c516d57b19ea2aac2"}, - {file = "hiredis-2.1.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ea6f0f98e1721741b5bc3167a495a9f16459fe67648054be05365a67e67c29ba"}, - {file = "hiredis-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:40c34aeecccb9474999839299c9d2d5ff46a62ed47c58645b7965f48944abd74"}, - {file = "hiredis-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65927e75da4265ec88d06cbdab20113a9e69bbac3aea1ec053d4d940f1c88fc8"}, - {file = "hiredis-2.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72cab67bcceb2e998da2f28aad9ec7b1a5ece5888f7ac3d3723cccba62338703"}, - {file = "hiredis-2.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d67429ff99231137491d8c3daa097c767a9c273bb03ac412ed8f6acb89e2e52f"}, - {file = "hiredis-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c596bce5e9dd379c68c17208716da2767bb6f6f2a71d748f9e4c247ced31e6"}, - {file = "hiredis-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e0aab2d6e60aa9f9e14c83396b4a58fb4aded712806486c79189bcae4a175ac"}, - {file = "hiredis-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:17deb7d218a5ae9f05d2b19d51936231546973303747924fc17a2869aef0029a"}, - {file = "hiredis-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d3d60e2af4ce93d6e45a50a9b5795156a8725495e411c7987a2f81ab14e99665"}, - {file = "hiredis-2.1.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:fbc960cd91e55e2281e1a330e7d1c4970b6a05567dd973c96e412b4d012e17c6"}, - {file = "hiredis-2.1.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0ae718e9db4b622072ff73d38bc9cd7711edfedc8a1e08efe25a6c8170446da4"}, - {file = "hiredis-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e51e3fa176fecd19660f898c4238232e8ca0f5709e6451a664c996f9aec1b8e1"}, - {file = "hiredis-2.1.1-cp39-cp39-win32.whl", hash = "sha256:0258bb84b4a1e015f14f891d91957042fa88f6f4e86cc0808d735ebbc1e3fc88"}, - {file = "hiredis-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c5a47c964c58c044a323336a798d8729722e09865d7e087eb3512df6146b39a8"}, - {file = "hiredis-2.1.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8de0334c212e069d49952e476e16c6b42ba9677cc1e2d2f4588bd9a39489a3ab"}, - {file = "hiredis-2.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:653e33f69202c00eca35416ee23091447ad1e9f9a556cc2b715b2befcfc31b3c"}, - {file = "hiredis-2.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14cccf931c859ba3169d766e892a3673a79649ec2ceca7ba95ea376b23fd222"}, - {file = "hiredis-2.1.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86c56359fd7aca6a9ca41af91636aef15d5ad6d19e631ebd662f233c79f7e100"}, - {file = "hiredis-2.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:c2b197e3613c3aef3933b2c6eb095bd4be9c84022aea52057697b709b400c4bc"}, - {file = "hiredis-2.1.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ec060d6db9576f6723b5290448aea67160608556b5506eb947997d9d1ca6f7b7"}, - {file = "hiredis-2.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8781f5b91d75abef529a33cf3509ba5fe540d2814de0c4602f0f5ba6f1669739"}, - {file = "hiredis-2.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bd6b934794bea92a15b10ac35889df63b28d2abf9d020a7c87c05dd9c6e1edd"}, - {file = "hiredis-2.1.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf6d85c1ffb4ec4a859b2f31cd8845e633f91ed971a3cce6f59a722dcc361b8c"}, - {file = "hiredis-2.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:bbf80c686e3f63d40b0ab42d3605d3b6d415c368a5d8a9764a314ebda6138650"}, - {file = "hiredis-2.1.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c1d85dfdf37a8df0e0174fc0c762b485b80a2fc7ce9592ae109aaf4a5d45ba9a"}, - {file = "hiredis-2.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:816b9ea96e7cc2496a1ac9c4a76db670827c1e31045cc377c66e64a20bb4b3ff"}, - {file = "hiredis-2.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db59afa0edf194bea782e4686bfc496fc1cea2e24f310d769641e343d14cc929"}, - {file = "hiredis-2.1.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c7a7e4ccec7164cdf2a9bbedc0e7430492eb56d9355a41377f40058c481bccc"}, - {file = "hiredis-2.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:646f150fa73f9cbc69419e34a1aae318c9f39bd9640760aa46624b2815da0c2d"}, - {file = "hiredis-2.1.1.tar.gz", hash = "sha256:21751e4b7737aaf7261a068758b22f7670155099592b28d8dde340bf6874313d"}, + {file = "hiredis-2.2.2-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:ba6123ff137275e2f4c31fc74b93813fcbb79160d43f5357163e09638c7743de"}, + {file = "hiredis-2.2.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d995846acc8e3339fb7833cd19bf6f3946ff5157c8488a4df9c51cd119a36870"}, + {file = "hiredis-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82f869ca44bcafa37cd71cfa1429648fa354d6021dcd72f03a2f66bcb339c546"}, + {file = "hiredis-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa90a5ee7a7f30c3d72d3513914b8f51f953a71b8cbd52a241b6db6685e55645"}, + {file = "hiredis-2.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01e2e588392b5fdcc3a6aa0eb62a2eb2a142f829082fa4c3354228029d3aa1ce"}, + {file = "hiredis-2.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dac177a6ab8b4eb4d5e74978c29eef7cc9eef14086f814cb3893f7465578044"}, + {file = "hiredis-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb992e3f9753c5a0c637f333c2010d1ad702aebf2d730ee4d484f32b19bae97"}, + {file = "hiredis-2.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61c22fda5fc25d31bbced24a8322d33c5cb8cad9ba698634c16edb5b3e79a91"}, + {file = "hiredis-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9873898e26e50cd41415e9d1ea128bfdb60eb26abb4f5be28a4500fd7834dc0c"}, + {file = "hiredis-2.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2c18b00a382546e19bcda8b83dcca5b6e0dbc238d235723434405f48a18e8f77"}, + {file = "hiredis-2.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:8c3a6998f6f88d7ca4d082fd26525074df13162b274d7c64034784b6fdc56666"}, + {file = "hiredis-2.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0fc1f9a9791d028b2b8afa318ccff734c7fc8861d37a04ca9b3d27c9b05f9718"}, + {file = "hiredis-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f2cfd323f83985f2bed6ed013107873275025af270485b7d04c338bfb47bd14"}, + {file = "hiredis-2.2.2-cp310-cp310-win32.whl", hash = "sha256:55c7e9a9e05f8c0555bfba5c16d98492f8b6db650e56d0c35cc28aeabfc86020"}, + {file = "hiredis-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:eaff526c2fed31c971b0fa338a25237ae5513550ef75d0b85b9420ec778cca45"}, + {file = "hiredis-2.2.2-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:688b9b7458b4f3f452fea6ed062c04fa1fd9a69d9223d95c6cb052581aba553b"}, + {file = "hiredis-2.2.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:544d52fde3a8dac7854673eac20deca05214758193c01926ffbb0d57c6bf4ffe"}, + {file = "hiredis-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:990916e8b0b4eedddef787e73549b562f8c9e73a7fea82f9b8ff517806774ad0"}, + {file = "hiredis-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10dc34854e9acfb3e7cc4157606e2efcb497b1c6fca07bd6c3be34ae5e413f13"}, + {file = "hiredis-2.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c446a2007985ae49c2ecd946dd819dea72b931beb5f647ba08655a1a1e133fa8"}, + {file = "hiredis-2.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b9f928dc6cd43ed0f0ffc1c75fb209fb180f004b7e2e19994805f998d247aa"}, + {file = "hiredis-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a355aff8dfa02ebfe67f0946dd706e490bddda9ea260afac9cdc43942310c53"}, + {file = "hiredis-2.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831461abe5b63e73719621a5f31d8fc175528a05dc09d5a8aa8ef565d6deefa4"}, + {file = "hiredis-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75349f7c8f77eb0fd33ede4575d1e5b0a902a8176a436bf03293d7fec4bd3894"}, + {file = "hiredis-2.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1eb39b34d15220095dc49ad1e1082580d35cd3b6d9741def52988b5075e4ff03"}, + {file = "hiredis-2.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a9b306f4e870747eea8b008dcba2e9f1e4acd12b333a684bc1cc120e633a280e"}, + {file = "hiredis-2.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:03dfb4ab7a2136ce1be305592553f102e1bd91a96068ab2778e3252aed20d9bc"}, + {file = "hiredis-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8bc89c7e33fecb083a199ade0131a34d20365a8c32239e218da57290987ca9a"}, + {file = "hiredis-2.2.2-cp311-cp311-win32.whl", hash = "sha256:ed44b3c711cecde920f238ac35f70ac08744f2079b6369655856e43944464a72"}, + {file = "hiredis-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:2e2f0ce3e8ab1314a52f562386220f6714fd24d7968a95528135ad04e88cc741"}, + {file = "hiredis-2.2.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:e7e61ab75b851aac2d6bc634d03738a242a6ef255a44178437b427c5ebac0a87"}, + {file = "hiredis-2.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb14339e399554bb436cc4628e8aaa3943adf7afcf34aba4cbd1e3e6b9ec7ec"}, + {file = "hiredis-2.2.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4ec57886f20f4298537cb1ab9dbda98594fb8d7c724c5fbf9a4b55329fd4a63"}, + {file = "hiredis-2.2.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a89f5afb9827eab07b9c8c585cd4dc95e5232c727508ae2c935d09531abe9e33"}, + {file = "hiredis-2.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3645590b9234cafd21c8ecfbf252ad9aa1d67629f4bdc98ba3627f48f8f7b5aa"}, + {file = "hiredis-2.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99350e89f52186146938bdba0b9c6cd68802c20346707d6ca8366f2d69d89b2f"}, + {file = "hiredis-2.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b5d290f3d8f7a05c4adbe6c355055b87c7081bfa1eccd1ae5491216307ee5f53"}, + {file = "hiredis-2.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c95be6f20377d5995ef41a98314542e194d2dc9c2579d8f130a1aea78d48fd42"}, + {file = "hiredis-2.2.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e4e2da61a04251121cb551f569c3250e6e27e95f2a80f8351c36822eda1f5d2b"}, + {file = "hiredis-2.2.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ac7f8d68826f95a3652e44b0c12bfa74d3aa6531d47d5dbe6a2fbfc7979bc20f"}, + {file = "hiredis-2.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:359e662324318baadb768d3c4ade8c4bdcfbb313570eb01e15d75dc5db781815"}, + {file = "hiredis-2.2.2-cp37-cp37m-win32.whl", hash = "sha256:fd0ca35e2cf44866137cbb5ae7e439fab18a0b0e0e1cf51d45137622d59ec012"}, + {file = "hiredis-2.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c9488ffb10acc6b121c498875278b0a6715d193742dc92d21a281712169ac06d"}, + {file = "hiredis-2.2.2-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:1570fe4f93bc1ea487fb566f2b863fd0ed146f643a4ea31e4e07036db9e0c7f8"}, + {file = "hiredis-2.2.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8753c561b37cccbda7264c9b4486e206a6318c18377cd647beb3aa41a15a6beb"}, + {file = "hiredis-2.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a06d0dd84f10be6b15a92edbca2490b64917280f66d8267c63de99b6550308ad"}, + {file = "hiredis-2.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40ff3f1ec3a4046732e9e41df08dcb1a559847196755d295d43e32528aae39e6"}, + {file = "hiredis-2.2.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24d856e13c02bd9d28a189e47be70cbba6f2c2a4bd85a8cc98819db9e7e3e06"}, + {file = "hiredis-2.2.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ee9fe7cef505e8d925c70bebcc16bfab12aa7af922f948346baffd4730f7b00"}, + {file = "hiredis-2.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03ab1d545794bb0e09f3b1e2c8b3adcfacd84f6f2d402bfdcd441a98c0e9643c"}, + {file = "hiredis-2.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14dfccf4696d75395c587a5dafafb4f7aa0a5d55309341d10bc2e7f1eaa20771"}, + {file = "hiredis-2.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2ddc573809ca4374da1b24b48604f34f3d5f0911fcccfb1c403ff8d8ca31c232"}, + {file = "hiredis-2.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:24301ca2bf9b2f843b4c3015c90f161798fa3bbc5b95fd494785751b137dbbe2"}, + {file = "hiredis-2.2.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b083a69e158138ffa95740ff6984d328259387b5596908021b3ccb946469ff66"}, + {file = "hiredis-2.2.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8e16dc949cc2e9c5fbcd08de05b5fb61b89ff65738d772863c5c96248628830e"}, + {file = "hiredis-2.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:674f296c3c89cb53f97aa9ba2508d3f360ad481b9e0c0e3a59b342a15192adaf"}, + {file = "hiredis-2.2.2-cp38-cp38-win32.whl", hash = "sha256:20ecbf87aac4f0f33f9c55ae15cb73b485d256c57518c590b7d0c9c152150632"}, + {file = "hiredis-2.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:b11960237a3025bf248135e5b497dc4923e83d137eb798fbfe78b40d57c4b156"}, + {file = "hiredis-2.2.2-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:18103090b8eda9c529830e26594e88b0b1472055785f3ed29b8adc694d03862a"}, + {file = "hiredis-2.2.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:d1acb7c957e5343303b3862947df3232dc7395da320b3b9ae076dfaa56ad59dc"}, + {file = "hiredis-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4997f55e1208af95a8fbd0fa187b04c672fcec8f66e49b9ab7fcc45cc1657dc4"}, + {file = "hiredis-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:449e18506d22af40977abd0f5a8979f57f88d4562fe591478a3438d76a15133d"}, + {file = "hiredis-2.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a32a4474f7a4abdea954f3365608edee3f90f1de9fa05b81d214d4cad04c718a"}, + {file = "hiredis-2.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e86c800c6941698777fc58419216a66a7f76504f1cea72381d2ee206888e964d"}, + {file = "hiredis-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c73aa295c5369135247ff63aa1fbb116067485d0506cd787cc0c868e72bbee55"}, + {file = "hiredis-2.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e10a66680023bd5c5a3d605dae0844e3dde60eac5b79e39f51395a2aceaf634"}, + {file = "hiredis-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03ab760fc96e0c5d36226eb727f30645bf6a53c97f14bfc0a4d0401bfc9b8af7"}, + {file = "hiredis-2.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:855d258e7f1aee3d7fbd5b1dc87790b1b5016e23d369a97b934a25ae7bc0171f"}, + {file = "hiredis-2.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ccc33d87866d213f84f857a98f69c13f94fbf99a3304e328869890c9e49c8d65"}, + {file = "hiredis-2.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:339af17bb9817f8acb127247c79a99cad63db6738c0fb2aec9fa3d4f35d2a250"}, + {file = "hiredis-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:57f73aa04d0b70ff436fb35fa7ea2b796aa7addbd7ebb8d1aa1f3d1b3e4439f1"}, + {file = "hiredis-2.2.2-cp39-cp39-win32.whl", hash = "sha256:e97d4e650b8d933a1229f341db92b610fc52b8d752490235977b63b81fbbc2cb"}, + {file = "hiredis-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8d43a7bba66a800279e33229a206861be09c279e261eaa8db4824e59465f4848"}, + {file = "hiredis-2.2.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632d79fd02b03e8d9fbaebbe40bfe34b920c5d0a9c0ef6270752e0db85208175"}, + {file = "hiredis-2.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a5fefac31c84143782ec1ebc323c04e733a6e4bfebcef9907a34e47a465e648"}, + {file = "hiredis-2.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5155bc1710df8e21aa48c9b2f4d4e13e4987e1efff363a1ef9c84fae2cc6c145"}, + {file = "hiredis-2.2.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f220b71235d2deab1b4b22681c8aee444720d973b80f1b86a4e2a85f6bcf1e1"}, + {file = "hiredis-2.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f1f1efbe9cc29a3af39cf7eed27225f951aed3f48a1149c7fb74529fb5ab86d4"}, + {file = "hiredis-2.2.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1f1c44242c18b1f02e6d1162f133d65d00e09cc10d9165dccc78662def72abc2"}, + {file = "hiredis-2.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e0f444d9062f7e487ef42bab2fb2e290f1704afcbca48ad3ec23de63eef0fda"}, + {file = "hiredis-2.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac15e7e1efca51b4695e540c80c328accb352c9608da7c2df82d1fa1a3c539ef"}, + {file = "hiredis-2.2.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20cfbc469400669a5999aa34ccba3872a1e34490ec3d5c84e8c0752c27977b7c"}, + {file = "hiredis-2.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:bae004a0b978bf62e38d0eef5ab9156f8101d01167b3ca7054bd0994b773e917"}, + {file = "hiredis-2.2.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1ce725542133dbdda9e8704867ef52651886bd1ef568c6fd997a27404381985"}, + {file = "hiredis-2.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e6ea7532221c97fa6d79f7d19d452cd9d1141d759c54279cc4774ce24728f13"}, + {file = "hiredis-2.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7114961ed78d708142f6c6eb1d2ed65dc3da4b5ae8a4660ad889dd7fc891971"}, + {file = "hiredis-2.2.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b084fbc3e69f99865242f8e1ccd4ea2a34bf6a3983d015d61133377526c0ce2"}, + {file = "hiredis-2.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2d1ba0799f3487294f72b2157944d5c3a4fb33c99e2d495d63eab98c7ec7234b"}, + {file = "hiredis-2.2.2.tar.gz", hash = "sha256:9c270bd0567a9c60673284e000132f603bb4ecbcd707567647a68f85ef45c4d4"}, ] [[package]] @@ -968,14 +1066,14 @@ nest-asyncio = "*" [[package]] name = "identify" -version = "2.5.16" +version = "2.5.18" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.16-py2.py3-none-any.whl", hash = "sha256:832832a58ecc1b8f33d5e8cb4f7d3db2f5c7fbe922dfee5f958b48fed691501a"}, - {file = "identify-2.5.16.tar.gz", hash = "sha256:c47acedfe6495b1c603ed7e93469b26e839cab38db4155113f36f718f8b3dc47"}, + {file = "identify-2.5.18-py2.py3-none-any.whl", hash = "sha256:93aac7ecf2f6abf879b8f29a8002d3c6de7086b8c28d88e1ad15045a15ab63f9"}, + {file = "identify-2.5.18.tar.gz", hash = "sha256:89e144fa560cc4cffb6ef2ab5e9fb18ed9f9b3cb054384bab4b95c12f6c309fe"}, ] [package.extras] @@ -1318,43 +1416,50 @@ files = [ [[package]] name = "msgspec" -version = "0.12.0" -description = "A fast and friendly JSON/MessagePack library, with optional schema validation" +version = "0.13.1" +description = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "msgspec-0.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b3b193fc6e5399040f2c657f2fe77962b8d39bddb9923d4e4850e2e8111ef83"}, - {file = "msgspec-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b90c8aa5b029f8fb8f9a4e71429cb37b4110382731058f7c4dfa125a005c459"}, - {file = "msgspec-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78cbcabfa413edc281f0f9bb652b42a3092cb289c31dc4489e7d896e615581fb"}, - {file = "msgspec-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be9e4eeea7f47c0a7c522afb4697d9618cb38e81e52130c9b15ad5279a69d153"}, - {file = "msgspec-0.12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e581459710a53d240ad579bb7fbe2b64767003c3d06254f17c0cd106fab03b20"}, - {file = "msgspec-0.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:237ebaeb409269a001ba29dbb3f9fe760db63bc82d013b989733e6ec59ef2cf4"}, - {file = "msgspec-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:e010bab6128d1700d7bf70cbe7ce33a54cfeedf15c11f015712dcc0c062ca571"}, - {file = "msgspec-0.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebe1cd8c42e85dbf59ede8ef1e4f8f73897664a3a3341f16a7616bb31fe21f2c"}, - {file = "msgspec-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72b55df12dbcd89f636165bc1b76ac174917e7756105496b685a7970f5e9d70c"}, - {file = "msgspec-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bffa75be17ec2d4953c8068cbe6cd9b064dd0403ec6b04dc45d0dfdd9ca2cf36"}, - {file = "msgspec-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad43ccaf17deee41ed84dacc6619d2ccd3847fdebe9fc5f2b887bbf4b938724f"}, - {file = "msgspec-0.12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f3177bd78b5a4e1663ee9279889d89b574acf619aa29aee84f86c00ca259016d"}, - {file = "msgspec-0.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98bc70226b858218920a25b85906e61ada41898b8f2fc1f41af31d9628353e04"}, - {file = "msgspec-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9e3adf5f7a8aa6a1359ebe9e738d6b7b25389c942b1d7f8849981ff62ed3d8e"}, - {file = "msgspec-0.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d5a6a08fa1bd2b4e29b076c84ae6159a33f4256b88d6c6c55df9de04e225a5a"}, - {file = "msgspec-0.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b25a98e7f99dcb86ffec7b462222703fe87bc6e299be31d1a68a657dc7317498"}, - {file = "msgspec-0.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9bda08bb1f9372d7da112cd697993f238fc22fbc72accd1dfb50eb22b68c23"}, - {file = "msgspec-0.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c905ba72400a0593c6244691d78e450036b8f54a05b9544740e47ed35f739af"}, - {file = "msgspec-0.12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:04aff2b6404d54637170235983c67a231326a2b73a96a93f63c903f4a3e5701a"}, - {file = "msgspec-0.12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8e784500de56c89db90f0b5c8043999dd128260aa4fd111fb3b65566140b7830"}, - {file = "msgspec-0.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:318583cfad415d5c6bbb9e87a8a998de353146b64ac202c90a3d9396a5ea6b97"}, - {file = "msgspec-0.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:769e6d442969c0238c65b078b4962af19f4c1d875a4dc93267ed6cad4d887b47"}, - {file = "msgspec-0.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71332e8969436ebc8bc3bb455d5c47a65ccaf236f7267e369959f2fcaf88bf3"}, - {file = "msgspec-0.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:faf7510ff87d086e21503f8504ca0550161fdfb1a025d9060a90a3e58d727be4"}, - {file = "msgspec-0.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2886e45bef04db383649e30fba56f2124c84ce6455deff6689e7dc9dc4926329"}, - {file = "msgspec-0.12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c281bd01931456cf01d553bdce315cf148bfa3565be01390f12800c39f75797"}, - {file = "msgspec-0.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54c061e764f80c915fd86db8bfe48c88fc8c6047649fc8a5900a03dda745e600"}, - {file = "msgspec-0.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:cc7f0555b4cb8c74cca6d13040b3fd9e7adafc0fff273203a13064453c28d32f"}, - {file = "msgspec-0.12.0.tar.gz", hash = "sha256:d8fe529a2414a1a5d3ccb1e875b164cc4f56614750c7b27c85006808f5658489"}, + {file = "msgspec-0.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a07a50afb119728e4969ab58602985549b8b9af6cf565873fad28365eb7dbf7b"}, + {file = "msgspec-0.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:014e6e0b01946528f609102c5624749501f63ebdeba386855948185288bc299d"}, + {file = "msgspec-0.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1d6de944dd2d369186ce9f3d98119980427fd8fba0788d819d775fe0440442"}, + {file = "msgspec-0.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:037ff80d956bb13fc8c221a386f7ba97f6e55d0dc84f4bfe92226df570140802"}, + {file = "msgspec-0.13.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8f0679fbafb889e4165bf49c2afd342716dcee2fe13de3777f1613c09740f304"}, + {file = "msgspec-0.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99d239343d2b78ad139277067e727ad573445eec6b7ba113e4b987e15728d9d4"}, + {file = "msgspec-0.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:88466f804b817151cbff9e53d698f86a9280b0d00896302abb528be01c46701a"}, + {file = "msgspec-0.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:494de96dc6ad90c2f3ba96130591ade518dfd019be767e6030c73ea3e9eb6df9"}, + {file = "msgspec-0.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8853fdf1c44cb874c786133ce1062b6d9a484e0fa48f7174035eb506f462042"}, + {file = "msgspec-0.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac902ccdfa4574498342d06f4c49bfa60317724737985b2fc954e8c8eb009910"}, + {file = "msgspec-0.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0e594996a042fb6dcd9839094b6ef8768e7707bf1567e4d61cbf07c1d12f24"}, + {file = "msgspec-0.13.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2d32dedfeb5646ac339eb4602886861756f050c6307e2177b7c68f5084519069"}, + {file = "msgspec-0.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de920ff863aceef8304a1c5c3855f089782429f6fdbfea1321061f37e4ba418a"}, + {file = "msgspec-0.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:36b919dad8cbf98d1e2b3d829bf26fbe64205a08ee1caa6988581e5850bcb6b1"}, + {file = "msgspec-0.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:30c80380bfe3a03b2e968f08bdbec8a9fc1619cd2555cc8334774c475296faaa"}, + {file = "msgspec-0.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0e12db829b670159780d4483248c5099b99cc487f3c134f2d3e6ce59fa18ce67"}, + {file = "msgspec-0.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa7febe37689642a1a2ccdeecf3a8b820ea043cd142438bd0a88714c982555a0"}, + {file = "msgspec-0.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:317ec8e80cf919c42b6defb375e12ad49af649238327d0abaed980c4d72a7bea"}, + {file = "msgspec-0.13.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d51cdd579b632e3686c14eee9948aeba056ea969b85b60e09aa9811cea3524cd"}, + {file = "msgspec-0.13.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:348415004e907c856e22884485c9f91dc8f39317ce365b0881a08ac35f234afe"}, + {file = "msgspec-0.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:b80b58d8fa70f881701a3bff50661327b4f5a50441d33386fac95a94eb3cad1d"}, + {file = "msgspec-0.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:96f7898b4433a2570636cf0659ac7cedd1b753f0a108fa0ff82a813a830c5a98"}, + {file = "msgspec-0.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fae7c7e6f7715cebc34ce616b078dc2c021651115915d0c2ea507d9c9074e34d"}, + {file = "msgspec-0.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:050af5e263eece6a16c07ba651ab83208acccfccc5ab8b47fa6cf99c135f01a9"}, + {file = "msgspec-0.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e143da1561b6a766c2fccdb9c487dfcf1d56c8c1b9c8c8c5c3e322f02fbb8c11"}, + {file = "msgspec-0.13.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:59e572b78da8264b2ce3dbb65bec6a4a65cb355046b0d5fc16df24626e889947"}, + {file = "msgspec-0.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bf92cbef573127966cccce71eb9368a6ce36c847ee128e048c7c04724daae855"}, + {file = "msgspec-0.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:358da53e870d8bdb890b27c2cd9a670bc17c1d4330908d3fb6e00dcdc8d9c54c"}, + {file = "msgspec-0.13.1.tar.gz", hash = "sha256:b252174d33c7d7885e5efe8009147f09da4bf6655b86dba9128a2a4d4c1fb6ee"}, ] +[package.extras] +dev = ["coverage", "furo", "gcovr", "ipython", "msgpack", "mypy", "pre-commit", "pyright", "pytest", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "tomli", "tomli-w"] +doc = ["furo", "ipython", "sphinx", "sphinx-copybutton", "sphinx-design"] +test = ["msgpack", "mypy", "pyright", "pytest", "pyyaml", "tomli", "tomli-w"] +toml = ["tomli", "tomli-w"] +yaml = ["pyyaml"] + [[package]] name = "multidict" version = "6.0.4" @@ -1441,42 +1546,38 @@ files = [ [[package]] name = "mypy" -version = "0.991" +version = "1.0.1" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, + {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, + {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, + {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, + {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, + {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, + {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, + {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, + {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, + {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, + {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, + {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, + {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, + {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, + {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, + {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, + {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, + {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, + {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, ] [package.dependencies] @@ -1492,14 +1593,14 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] @@ -1558,40 +1659,40 @@ setuptools = "*" [[package]] name = "numpy" -version = "1.24.1" +version = "1.24.2" description = "Fundamental package for array computing in Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "numpy-1.24.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:179a7ef0889ab769cc03573b6217f54c8bd8e16cef80aad369e1e8185f994cd7"}, - {file = "numpy-1.24.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b09804ff570b907da323b3d762e74432fb07955701b17b08ff1b5ebaa8cfe6a9"}, - {file = "numpy-1.24.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b739841821968798947d3afcefd386fa56da0caf97722a5de53e07c4ccedc7"}, - {file = "numpy-1.24.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e3463e6ac25313462e04aea3fb8a0a30fb906d5d300f58b3bc2c23da6a15398"}, - {file = "numpy-1.24.1-cp310-cp310-win32.whl", hash = "sha256:b31da69ed0c18be8b77bfce48d234e55d040793cebb25398e2a7d84199fbc7e2"}, - {file = "numpy-1.24.1-cp310-cp310-win_amd64.whl", hash = "sha256:b07b40f5fb4fa034120a5796288f24c1fe0e0580bbfff99897ba6267af42def2"}, - {file = "numpy-1.24.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7094891dcf79ccc6bc2a1f30428fa5edb1e6fb955411ffff3401fb4ea93780a8"}, - {file = "numpy-1.24.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e418681372520c992805bb723e29d69d6b7aa411065f48216d8329d02ba032"}, - {file = "numpy-1.24.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e274f0f6c7efd0d577744f52032fdd24344f11c5ae668fe8d01aac0422611df1"}, - {file = "numpy-1.24.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0044f7d944ee882400890f9ae955220d29b33d809a038923d88e4e01d652acd9"}, - {file = "numpy-1.24.1-cp311-cp311-win32.whl", hash = "sha256:442feb5e5bada8408e8fcd43f3360b78683ff12a4444670a7d9e9824c1817d36"}, - {file = "numpy-1.24.1-cp311-cp311-win_amd64.whl", hash = "sha256:de92efa737875329b052982e37bd4371d52cabf469f83e7b8be9bb7752d67e51"}, - {file = "numpy-1.24.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b162ac10ca38850510caf8ea33f89edcb7b0bb0dfa5592d59909419986b72407"}, - {file = "numpy-1.24.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26089487086f2648944f17adaa1a97ca6aee57f513ba5f1c0b7ebdabbe2b9954"}, - {file = "numpy-1.24.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caf65a396c0d1f9809596be2e444e3bd4190d86d5c1ce21f5fc4be60a3bc5b36"}, - {file = "numpy-1.24.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0677a52f5d896e84414761531947c7a330d1adc07c3a4372262f25d84af7bf7"}, - {file = "numpy-1.24.1-cp38-cp38-win32.whl", hash = "sha256:dae46bed2cb79a58d6496ff6d8da1e3b95ba09afeca2e277628171ca99b99db1"}, - {file = "numpy-1.24.1-cp38-cp38-win_amd64.whl", hash = "sha256:6ec0c021cd9fe732e5bab6401adea5a409214ca5592cd92a114f7067febcba0c"}, - {file = "numpy-1.24.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28bc9750ae1f75264ee0f10561709b1462d450a4808cd97c013046073ae64ab6"}, - {file = "numpy-1.24.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84e789a085aabef2f36c0515f45e459f02f570c4b4c4c108ac1179c34d475ed7"}, - {file = "numpy-1.24.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e669fbdcdd1e945691079c2cae335f3e3a56554e06bbd45d7609a6cf568c700"}, - {file = "numpy-1.24.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef85cf1f693c88c1fd229ccd1055570cb41cdf4875873b7728b6301f12cd05bf"}, - {file = "numpy-1.24.1-cp39-cp39-win32.whl", hash = "sha256:87a118968fba001b248aac90e502c0b13606721b1343cdaddbc6e552e8dfb56f"}, - {file = "numpy-1.24.1-cp39-cp39-win_amd64.whl", hash = "sha256:ddc7ab52b322eb1e40521eb422c4e0a20716c271a306860979d450decbb51b8e"}, - {file = "numpy-1.24.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ed5fb71d79e771ec930566fae9c02626b939e37271ec285e9efaf1b5d4370e7d"}, - {file = "numpy-1.24.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad2925567f43643f51255220424c23d204024ed428afc5aad0f86f3ffc080086"}, - {file = "numpy-1.24.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cfa1161c6ac8f92dea03d625c2d0c05e084668f4a06568b77a25a89111621566"}, - {file = "numpy-1.24.1.tar.gz", hash = "sha256:2386da9a471cc00a1f47845e27d916d5ec5346ae9696e01a8a34760858fe9dd2"}, + {file = "numpy-1.24.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d"}, + {file = "numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978"}, + {file = "numpy-1.24.2-cp310-cp310-win32.whl", hash = "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9"}, + {file = "numpy-1.24.2-cp310-cp310-win_amd64.whl", hash = "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910"}, + {file = "numpy-1.24.2-cp311-cp311-win32.whl", hash = "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95"}, + {file = "numpy-1.24.2-cp311-cp311-win_amd64.whl", hash = "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96"}, + {file = "numpy-1.24.2-cp38-cp38-win32.whl", hash = "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d"}, + {file = "numpy-1.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780"}, + {file = "numpy-1.24.2-cp39-cp39-win32.whl", hash = "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468"}, + {file = "numpy-1.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f"}, + {file = "numpy-1.24.2.tar.gz", hash = "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22"}, ] [[package]] @@ -1688,19 +1789,19 @@ files = [ [[package]] name = "platformdirs" -version = "2.6.2" +version = "3.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, - {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, + {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, + {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -1720,14 +1821,14 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.0.1" +version = "3.0.4" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.0.1-py2.py3-none-any.whl", hash = "sha256:61ecb75e0e99939cc30c79181c0394545657855e9172c42ff98ebecb0e02fcb7"}, - {file = "pre_commit-3.0.1.tar.gz", hash = "sha256:3a3f9229e8c19a626a7f91be25b3c8c135e52de1a678da98eb015c0d0baea7a5"}, + {file = "pre_commit-3.0.4-py2.py3-none-any.whl", hash = "sha256:9e3255edb0c9e7fe9b4f328cb3dc86069f8fdc38026f1bf521018a05eaf4d67b"}, + {file = "pre_commit-3.0.4.tar.gz", hash = "sha256:bc4687478d55578c4ac37272fe96df66f73d9b5cf81be6f28627d4e712e752d5"}, ] [package.dependencies] @@ -2052,14 +2153,14 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" -version = "3.1.0" +version = "3.2.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.1.0.tar.gz", hash = "sha256:40fdb8f3544921c5dfcd486ac080ce22870e71d82ced6d2e78fa97c2addd480c"}, - {file = "pytest_xdist-3.1.0-py3-none-any.whl", hash = "sha256:70a76f191d8a1d2d6be69fc440cdf85f3e4c03c08b520fd5dc5d338d6cf07d89"}, + {file = "pytest-xdist-3.2.0.tar.gz", hash = "sha256:fa10f95a2564cd91652f2d132725183c3b590d9fdcdec09d3677386ecf4c1ce9"}, + {file = "pytest_xdist-3.2.0-py3-none-any.whl", hash = "sha256:336098e3bbd8193276867cc87db8b22903c3927665dff9d1ac8684c02f597b68"}, ] [package.dependencies] @@ -2194,14 +2295,14 @@ files = [ [[package]] name = "redis" -version = "4.4.2" +version = "4.5.1" description = "Python client for Redis database and key-value store" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "redis-4.4.2-py3-none-any.whl", hash = "sha256:e6206448e2f8a432871d07d432c13ed6c2abcf6b74edb436c99752b1371be387"}, - {file = "redis-4.4.2.tar.gz", hash = "sha256:a010f6cb7378065040a02839c3f75c7e0fb37a87116fb4a95be82a95552776c7"}, + {file = "redis-4.5.1-py3-none-any.whl", hash = "sha256:5deb072d26e67d2be1712603bfb7947ec3431fb0eec9c578994052e33035af6d"}, + {file = "redis-4.5.1.tar.gz", hash = "sha256:1eec3741cda408d3a5f84b78d089c8b8d895f21b3b050988351e925faf202864"}, ] [package.dependencies] @@ -2235,14 +2336,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "67.0.0" +version = "67.3.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.0.0-py3-none-any.whl", hash = "sha256:9d790961ba6219e9ff7d9557622d2fe136816a264dd01d5997cfc057d804853d"}, - {file = "setuptools-67.0.0.tar.gz", hash = "sha256:883131c5b6efa70b9101c7ef30b2b7b780a4283d5fc1616383cdf22c83cbefe6"}, + {file = "setuptools-67.3.2-py3-none-any.whl", hash = "sha256:bb6d8e508de562768f2027902929f8523932fcd1fb784e6d573d2cafac995a48"}, + {file = "setuptools-67.3.2.tar.gz", hash = "sha256:95f00380ef2ffa41d9bba85d95b27689d923c93dfbafed4aecd7cf988a25e012"}, ] [package.extras] @@ -2276,14 +2377,14 @@ files = [ [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, ] [[package]] @@ -2460,14 +2561,14 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.0" +version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, ] [package.extras] @@ -2595,14 +2696,14 @@ telegram = ["requests"] [[package]] name = "types-pyopenssl" -version = "23.0.0.2" +version = "23.0.0.3" description = "Typing stubs for pyOpenSSL" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-pyOpenSSL-23.0.0.2.tar.gz", hash = "sha256:2e95f9a667d5eeb0af699196f857f7d23d5b4d642437bd37355bc13a87e9f4ae"}, - {file = "types_pyOpenSSL-23.0.0.2-py3-none-any.whl", hash = "sha256:ea7e5d06f9190a1cb013ad4b13d48896e5cd1e785c04491f38b206d1bc4b8dc1"}, + {file = "types-pyOpenSSL-23.0.0.3.tar.gz", hash = "sha256:6ca54d593f8b946f9570f9ed7457c41da3b518feff5e344851941a6209bea62b"}, + {file = "types_pyOpenSSL-23.0.0.3-py3-none-any.whl", hash = "sha256:847ab17a16475a882dc29898648a6a35ad0d3e11a5bba5aa8ab2f3435a8647cb"}, ] [package.dependencies] @@ -2622,14 +2723,14 @@ files = [ [[package]] name = "types-redis" -version = "4.4.0.4" +version = "4.5.1.1" description = "Typing stubs for redis" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-redis-4.4.0.4.tar.gz", hash = "sha256:b70829ca3401d3153d628e28d860070eff1b36b2fa3e5af3e583c1d167383cab"}, - {file = "types_redis-4.4.0.4-py3-none-any.whl", hash = "sha256:802e893ad3f88e03d3a2feb0d23a715d60b0bb330bc598a52f1de237fc2547a5"}, + {file = "types-redis-4.5.1.1.tar.gz", hash = "sha256:c072e4824855f46d0a968509c3e0fa4789fc13b62d472064527bad3d1815aeed"}, + {file = "types_redis-4.5.1.1-py3-none-any.whl", hash = "sha256:081dfeec730df6e3f32ccbdafe3198873b7c02516c22d79cc2a40efdd69a3963"}, ] [package.dependencies] @@ -2638,14 +2739,14 @@ types-pyOpenSSL = "*" [[package]] name = "types-requests" -version = "2.28.11.8" +version = "2.28.11.13" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.28.11.8.tar.gz", hash = "sha256:e67424525f84adfbeab7268a159d3c633862dafae15c5b19547ce1b55954f0a3"}, - {file = "types_requests-2.28.11.8-py3-none-any.whl", hash = "sha256:61960554baca0008ae7e2db2bd3b322ca9a144d3e80ce270f5fb640817e40994"}, + {file = "types-requests-2.28.11.13.tar.gz", hash = "sha256:3fd332842e8759ea5f7eb7789df8aa772ba155216ccf10ef4aa3b0e5b42e1b46"}, + {file = "types_requests-2.28.11.13-py3-none-any.whl", hash = "sha256:94896f6f8e9f3db11e422c6e3e4abbc5d7ccace853eac74b23bdd65eeee3cdee"}, ] [package.dependencies] @@ -2653,38 +2754,38 @@ types-urllib3 = "<1.27" [[package]] name = "types-toml" -version = "0.10.8.1" +version = "0.10.8.4" description = "Typing stubs for toml" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-toml-0.10.8.1.tar.gz", hash = "sha256:171bdb3163d79a520560f24ba916a9fc9bff81659c5448a9fea89240923722be"}, - {file = "types_toml-0.10.8.1-py3-none-any.whl", hash = "sha256:b7b5c4977f96ab7b5ac06d8a6590d17c0bf252a96efc03b109c2711fb3e0eafd"}, + {file = "types-toml-0.10.8.4.tar.gz", hash = "sha256:c8748dd225b28eb80ce712e2d7d61b57599815e7b48d07ef53df51ed148fa6b1"}, + {file = "types_toml-0.10.8.4-py3-none-any.whl", hash = "sha256:306b1bb8b5bbc5f1b60387dbcc4b489e79f8490ce20e93af5f422a68b470d94b"}, ] [[package]] name = "types-urllib3" -version = "1.26.25.4" +version = "1.26.25.6" description = "Typing stubs for urllib3" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, - {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, + {file = "types-urllib3-1.26.25.6.tar.gz", hash = "sha256:35586727cbd7751acccf2c0f34a88baffc092f435ab62458f10776466590f2d5"}, + {file = "types_urllib3-1.26.25.6-py3-none-any.whl", hash = "sha256:a6c23c41bd03e542eaee5423a018f833077b51c4bf9ceb5aa544e12b812d5604"}, ] [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] [[package]] @@ -2778,35 +2879,35 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my [[package]] name = "virtualenv" -version = "20.17.1" +version = "20.19.0" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"}, - {file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"}, + {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"}, + {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"}, ] [package.dependencies] distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" +platformdirs = ">=2.4,<4" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [[package]] name = "websocket-client" -version = "1.5.0" +version = "1.5.1" description = "WebSocket client for Python with low level API options" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "websocket-client-1.5.0.tar.gz", hash = "sha256:561ca949e5bbb5d33409a37235db55c279235c78ee407802f1d2314fff8a8536"}, - {file = "websocket_client-1.5.0-py3-none-any.whl", hash = "sha256:fb5d81b95d350f3a54838ebcb4c68a5353bbd1412ae8f068b1e5280faeb13074"}, + {file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"}, + {file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"}, ] [package.extras] @@ -2919,14 +3020,14 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.12.0" +version = "3.13.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.12.0-py3-none-any.whl", hash = "sha256:9eb0a4c5feab9b08871db0d672745b53450d7f26992fd1e4653aa43345e97b86"}, - {file = "zipp-3.12.0.tar.gz", hash = "sha256:73efd63936398aac78fd92b6f4865190119d6c91b531532e798977ea8dd402eb"}, + {file = "zipp-3.13.0-py3-none-any.whl", hash = "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"}, + {file = "zipp-3.13.0.tar.gz", hash = "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6"}, ] [package.extras] @@ -2942,4 +3043,4 @@ redis = ["hiredis", "redis"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "2aefd8f46e5047c677f66f4e7ab191f44bb5b882ea261d468ba84153237d494f" +content-hash = "72eeb6d454820c45a046ee65d3623c8a0060314c81e57aecf4948f785d69e5c9" diff --git a/pyproject.toml b/pyproject.toml index 874cd2aa27ae..eba0b4a66ac8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautilus_trader" -version = "1.168.0" +version = "1.169.0" description = "A high-performance algorithmic trading platform and event-driven backtester" authors = ["Nautech Systems "] license = "LGPL-3.0-or-later" @@ -36,7 +36,7 @@ include = [ requires = [ "setuptools", "poetry-core>=1.4.0", - "numpy>=1.24.1", + "numpy>=1.24.2", "Cython==3.0.0a11", ] build-backend = "poetry.core.masonry.api" @@ -49,12 +49,12 @@ generate-setup-file = false python = ">=3.9,<3.12" cython = "==3.0.0a11" aiodns = "^3.0.0" -aiohttp = "^3.8.3" +aiohttp = "^3.8.4" click = "^8.1.3" -frozendict = "^2.3.4" +frozendict = "^2.3.5" fsspec = ">=2023.1.0" -msgspec = "^0.12.0" -numpy = "^1.24.1" +msgspec = "^0.13.1" +numpy = "^1.24.2" pandas = "^1.5.3" psutil = "^5.9.4" pyarrow = "^10.0.1" @@ -63,9 +63,9 @@ tabulate = "^0.9.0" toml = "^0.10.2" tqdm = "^4.64.1" uvloop = {version = "^0.17.0", markers = "sys_platform != 'win32'"} -hiredis = {version = "^2.1.1", optional = true} +hiredis = {version = "^2.2.2", optional = true} ib_insync = {version = "^0.9.81", optional = true} -redis = {version = "^4.4.2", optional = true} +redis = {version = "^4.5.1", optional = true} docker = {version = "^6.0.1", optional = true} betfair_parser = {version = "==0.1.11", optional = true} @@ -79,14 +79,14 @@ redis = ["hiredis", "redis"] optional = true [tool.poetry.group.dev.dependencies] -black = "^22.12.0" +black = "^23.1.0" flake8 = "^6.0.0" isort = "^5.12.0" -mypy = "^0.991" -pre-commit = "^3.0.0" +mypy = "^1.0.0" +pre-commit = "^3.0.4" pyproject-flake8 = "^6.0.0" types-pytz = "^2022.6.0" -types-redis = "^4.3.21" +types-redis = "^4.5.1" types-requests = "^2.28.11" types-toml = "^0.10.8" @@ -100,7 +100,7 @@ pytest-asyncio = "^0.20.2" pytest-benchmark = "^4.0.0" pytest-cov = "4.0.0" pytest-mock = "^3.10.0" -pytest-xdist = { version = "^3.1.0", extras = ["psutil"] } +pytest-xdist = { version = "^3.2.0", extras = ["psutil"] } [tool.poetry.group.docs] optional = true diff --git a/tests/acceptance_tests/test_backtest_acceptance.py b/tests/acceptance_tests/test_backtest_acceptance.py index 3f82325555e3..2400fc8670d9 100644 --- a/tests/acceptance_tests/test_backtest_acceptance.py +++ b/tests/acceptance_tests/test_backtest_acceptance.py @@ -116,7 +116,10 @@ def test_run_ema_cross_strategy(self): # Assert - Should return expected PnL assert strategy.fast_ema.count == 2689 assert self.engine.iteration == 115044 - assert self.engine.portfolio.account(self.venue).balance_total(USD) == Money(996798.21, USD) + assert self.engine.portfolio.account(self.venue).balance_total(USD) == Money( + 996_798.21, + USD, + ) def test_rerun_ema_cross_strategy_returns_identical_performance(self): # Arrange @@ -176,7 +179,7 @@ def test_run_multiple_strategies(self): assert strategy2.fast_ema.count == 2689 assert self.engine.iteration == 115044 assert self.engine.portfolio.account(self.venue).balance_total(USD) == Money( - 1023449.90, + 1_023_449.90, USD, ) @@ -240,7 +243,10 @@ def test_run_ema_cross_with_five_minute_bar_spec(self): # Assert assert strategy.fast_ema.count == 8353 assert self.engine.iteration == 120468 - assert self.engine.portfolio.account(self.venue).balance_total(GBP) == Money(961323.91, GBP) + assert self.engine.portfolio.account(self.venue).balance_total(GBP) == Money( + 961_323.91, + GBP, + ) def test_run_ema_cross_stop_entry_trail_strategy(self): # Arrange @@ -266,7 +272,7 @@ def test_run_ema_cross_stop_entry_trail_strategy(self): assert strategy.fast_ema.count == 8353 assert self.engine.iteration == 120468 assert self.engine.portfolio.account(self.venue).balance_total(GBP) == Money( - 1009220.90, + 1_009_220.90, GBP, ) @@ -293,7 +299,10 @@ def test_run_ema_cross_stop_entry_trail_strategy_with_emulation(self): # Assert - Should return expected PnL assert strategy.fast_ema.count == 41761 assert self.engine.iteration == 120468 - assert self.engine.portfolio.account(self.venue).balance_total(GBP) == Money(963946.75, GBP) + assert self.engine.portfolio.account(self.venue).balance_total(GBP) == Money( + 963_946.75, + GBP, + ) class TestBacktestAcceptanceTestsGBPUSDBarsExternal: @@ -374,7 +383,7 @@ def test_run_ema_cross_with_minute_bar_spec(self): assert strategy.fast_ema.count == 30117 assert self.engine.iteration == 60234 ending_balance = self.engine.portfolio.account(self.venue).balance_total(USD) - assert ending_balance == Money(1088115.65, USD) + assert ending_balance == Money(1_088_115.65, USD) class TestBacktestAcceptanceTestsBTCUSDTSpotNoCashPositions: @@ -436,12 +445,12 @@ def test_run_ema_cross_with_minute_trade_bars(self): self.engine.run() # Assert - assert strategy.fast_ema.count == 10000 - assert self.engine.iteration == 10000 + assert strategy.fast_ema.count == 10_000 + assert self.engine.iteration == 10_000 btc_ending_balance = self.engine.portfolio.account(self.venue).balance_total(BTC) usdt_ending_balance = self.engine.portfolio.account(self.venue).balance_total(USDT) assert btc_ending_balance == Money(9.57200000, BTC) - assert usdt_ending_balance == Money(10017571.74970600, USDT) + assert usdt_ending_balance == Money(10_017_571.74970600, USDT) def test_run_ema_cross_with_trade_ticks_from_bar_data(self): # Arrange @@ -471,13 +480,13 @@ def test_run_ema_cross_with_trade_ticks_from_bar_data(self): self.engine.run() # Assert - assert len(ticks) == 40000 - assert strategy.fast_ema.count == 10000 - assert self.engine.iteration == 40000 + assert len(ticks) == 40_000 + assert strategy.fast_ema.count == 10_000 + assert self.engine.iteration == 40_000 btc_ending_balance = self.engine.portfolio.account(self.venue).balance_total(BTC) usdt_ending_balance = self.engine.portfolio.account(self.venue).balance_total(USDT) assert btc_ending_balance == Money(9.57200000, BTC) - assert usdt_ending_balance == Money(10017571.72928400, USDT) + assert usdt_ending_balance == Money(10_017_571.72928400, USDT) class TestBacktestAcceptanceTestsAUDUSD: @@ -532,8 +541,11 @@ def test_run_ema_cross_with_minute_bar_spec(self): # Assert assert strategy.fast_ema.count == 1771 - assert self.engine.iteration == 100000 - assert self.engine.portfolio.account(self.venue).balance_total(AUD) == Money(991360.15, AUD) + assert self.engine.iteration == 100_000 + assert self.engine.portfolio.account(self.venue).balance_total(AUD) == Money( + 991_360.15, + AUD, + ) def test_run_ema_cross_with_tick_bar_spec(self): # Arrange @@ -551,9 +563,12 @@ def test_run_ema_cross_with_tick_bar_spec(self): self.engine.run() # Assert - assert strategy.fast_ema.count == 1000 - assert self.engine.iteration == 100000 - assert self.engine.portfolio.account(self.venue).balance_total(AUD) == Money(996361.60, AUD) + assert strategy.fast_ema.count == 1_000 + assert self.engine.iteration == 100_000 + assert self.engine.portfolio.account(self.venue).balance_total(AUD) == Money( + 996_361.60, + AUD, + ) class TestBacktestAcceptanceTestsETHUSDT: @@ -607,7 +622,7 @@ def test_run_ema_cross_with_tick_bar_spec(self): assert strategy.fast_ema.count == 279 assert self.engine.iteration == 69806 expected_commission = Money(127.56763570, USDT) - expected_usdt = Money(998869.96375810, USDT) + expected_usdt = Money(998_869.96375810, USDT) assert self.engine.portfolio.account(self.venue).commission(USDT) == expected_commission assert self.engine.portfolio.account(self.venue).balance_total(USDT) == expected_usdt diff --git a/tests/integration_tests/adapters/betfair/test_betfair_account.py b/tests/integration_tests/adapters/betfair/test_betfair_account.py index fce382038447..723efbe0ce2a 100644 --- a/tests/integration_tests/adapters/betfair/test_betfair_account.py +++ b/tests/integration_tests/adapters/betfair/test_betfair_account.py @@ -20,7 +20,7 @@ from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.model.objects import Price from nautilus_trader.model.objects import Quantity from nautilus_trader.msgbus.bus import MessageBus @@ -41,7 +41,7 @@ def setup(self): self.instrument = TestInstrumentProvider.betting_instrument() # Setup logging - self.logger = LiveLogger(loop=self.loop, clock=self.clock, level_stdout=LogLevel.DEBUG) + self.logger = Logger(clock=self.clock, level_stdout=LogLevel.DEBUG) self.msgbus = MessageBus( trader_id=TestIdStubs.trader_id(), diff --git a/tests/integration_tests/adapters/betfair/test_betfair_client.py b/tests/integration_tests/adapters/betfair/test_betfair_client.py index 1e65a354982b..40b062000e66 100644 --- a/tests/integration_tests/adapters/betfair/test_betfair_client.py +++ b/tests/integration_tests/adapters/betfair/test_betfair_client.py @@ -32,7 +32,7 @@ from nautilus_trader.adapters.betfair.parsing.requests import order_update_to_betfair from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.core.uuid import UUID4 from nautilus_trader.execution.messages import SubmitOrder from nautilus_trader.model.enums import OrderSide @@ -55,7 +55,7 @@ def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.clock = LiveClock() - self.logger = LiveLogger(loop=self.loop, clock=self.clock) + self.logger = Logger(clock=self.clock) self.client = BetfairClient( # noqa: S106 (no hardcoded password) username="username", password="password", diff --git a/tests/integration_tests/adapters/betfair/test_betfair_data.py b/tests/integration_tests/adapters/betfair/test_betfair_data.py index 2ebff35f7e96..757849a7ea9e 100644 --- a/tests/integration_tests/adapters/betfair/test_betfair_data.py +++ b/tests/integration_tests/adapters/betfair/test_betfair_data.py @@ -35,7 +35,7 @@ from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.common.logging import LoggerAdapter from nautilus_trader.core.uuid import UUID4 from nautilus_trader.live.data_engine import LiveDataEngine @@ -79,9 +79,8 @@ def instrument_list(mock_load_markets_metadata, loop: asyncio.AbstractEventLoop) global INSTRUMENTS # Setup - logger = LiveLogger(loop=loop, clock=LiveClock(), level_stdout=LogLevel.ERROR) + logger = Logger(clock=LiveClock(), level_stdout=LogLevel.ERROR) client = BetfairTestStubs.betfair_client(loop=loop, logger=logger) - logger = LiveLogger(loop=loop, clock=LiveClock(), level_stdout=LogLevel.DEBUG) instrument_provider = BetfairInstrumentProvider(client=client, logger=logger, filters={}) # Load instruments @@ -111,7 +110,7 @@ def setup(self): self.venue = BETFAIR_VENUE # Setup logging - self.logger = LiveLogger(loop=self.loop, clock=self.clock, level_stdout=LogLevel.ERROR) + self.logger = Logger(clock=self.clock, level_stdout=LogLevel.ERROR) self._log = LoggerAdapter("TestBetfairExecutionClient", self.logger) self.msgbus = MessageBus( @@ -119,9 +118,9 @@ def setup(self): clock=self.clock, logger=self.logger, ) - + self.instrument_id = TestInstrumentProvider.betting_instrument() self.cache = TestComponentStubs.cache() - self.cache.add_instrument(TestInstrumentProvider.betting_instrument()) + self.cache.add_instrument(self.instrument_id) self.portfolio = Portfolio( msgbus=self.msgbus, @@ -215,9 +214,9 @@ async def test_connect( await self.client._connect() def test_subscriptions(self): - self.client.subscribe_trade_ticks(TestIdStubs.betting_instrument_id()) - self.client.subscribe_instrument_status_updates(TestIdStubs.betting_instrument_id()) - self.client.subscribe_instrument_close(TestIdStubs.betting_instrument_id()) + self.client.subscribe_trade_ticks(self.instrument_id) + self.client.subscribe_instrument_status_updates(self.instrument_id) + self.client.subscribe_instrument_close(self.instrument_id) def test_market_heartbeat(self): self.client.on_market_update(BetfairStreaming.mcm_HEARTBEAT()) @@ -336,7 +335,7 @@ def test_market_bsp(self): "InstrumentStatusUpdate": 9, "OrderBookSnapshot": 8, "BetfairTicker": 8, - "BSPOrderBookDeltas": 8, + "GenericData": 8, "OrderBookDeltas": 2, "InstrumentClose": 1, } @@ -344,8 +343,8 @@ def test_market_bsp(self): sp_deltas = [ d for deltas in self.messages - if isinstance(deltas, BSPOrderBookDeltas) - for d in deltas.deltas + if isinstance(deltas, GenericData) + for d in deltas.data.deltas ] assert len(sp_deltas) == 30 diff --git a/tests/integration_tests/adapters/betfair/test_betfair_factory.py b/tests/integration_tests/adapters/betfair/test_betfair_factory.py index 5bf0494c0b6d..ab4c47d7e5f5 100644 --- a/tests/integration_tests/adapters/betfair/test_betfair_factory.py +++ b/tests/integration_tests/adapters/betfair/test_betfair_factory.py @@ -28,7 +28,7 @@ from nautilus_trader.adapters.betfair.factories import BetfairLiveExecClientFactory from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.common.logging import LoggerAdapter from nautilus_trader.msgbus.bus import MessageBus from nautilus_trader.test_kit.stubs.component import TestComponentStubs @@ -47,7 +47,7 @@ def setup(self): self.venue = BETFAIR_VENUE # Setup logging - self.logger = LiveLogger(loop=self.loop, clock=self.clock, level_stdout=LogLevel.DEBUG) + self.logger = Logger(clock=self.clock, level_stdout=LogLevel.DEBUG) self._log = LoggerAdapter("TestBetfairExecutionClient", self.logger) self.msgbus = MessageBus( diff --git a/tests/integration_tests/adapters/betfair/test_betfair_parsing.py b/tests/integration_tests/adapters/betfair/test_betfair_parsing.py index 83cc5bab284e..a8c1091ccd81 100644 --- a/tests/integration_tests/adapters/betfair/test_betfair_parsing.py +++ b/tests/integration_tests/adapters/betfair/test_betfair_parsing.py @@ -49,7 +49,7 @@ ) from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.core.uuid import UUID4 from nautilus_trader.model.currencies import GBP from nautilus_trader.model.data.tick import TradeTick @@ -240,7 +240,7 @@ def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.clock = LiveClock() - self.logger = LiveLogger(loop=self.loop, clock=self.clock) + self.logger = Logger(clock=self.clock) self.instrument = TestInstrumentProvider.betting_instrument() self.client = BetfairTestStubs.betfair_client(loop=self.loop, logger=self.logger) self.provider = BetfairTestStubs.instrument_provider(self.client) diff --git a/tests/integration_tests/adapters/betfair/test_betfair_providers.py b/tests/integration_tests/adapters/betfair/test_betfair_providers.py index 4e920d628d50..01036d9401b7 100644 --- a/tests/integration_tests/adapters/betfair/test_betfair_providers.py +++ b/tests/integration_tests/adapters/betfair/test_betfair_providers.py @@ -28,7 +28,7 @@ from nautilus_trader.adapters.betfair.providers import make_instruments from nautilus_trader.adapters.betfair.providers import parse_market_catalog from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.model.enums import MarketStatus from nautilus_trader.test_kit.stubs.component import TestComponentStubs from tests.integration_tests.adapters.betfair.test_kit import BetfairResponses @@ -36,13 +36,14 @@ from tests.integration_tests.adapters.betfair.test_kit import BetfairTestStubs +@pytest.mark.skip(reason="Flaky in CI") @pytest.mark.skipif(sys.platform == "win32", reason="Failing on windows") class TestBetfairInstrumentProvider: def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.clock = LiveClock() - self.logger = LiveLogger(loop=self.loop, clock=self.clock) + self.logger = Logger(clock=self.clock) self.client = BetfairTestStubs.betfair_client(loop=self.loop, logger=self.logger) self.provider = BetfairInstrumentProvider( client=self.client, diff --git a/tests/integration_tests/adapters/betfair/test_betfair_sockets.py b/tests/integration_tests/adapters/betfair/test_betfair_sockets.py index 9459a1a01d64..b30c8f65eb9f 100644 --- a/tests/integration_tests/adapters/betfair/test_betfair_sockets.py +++ b/tests/integration_tests/adapters/betfair/test_betfair_sockets.py @@ -18,7 +18,7 @@ from nautilus_trader.adapters.betfair.sockets import BetfairMarketStreamClient from nautilus_trader.adapters.betfair.sockets import BetfairOrderStreamClient from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from tests.integration_tests.adapters.betfair.test_kit import BetfairTestStubs @@ -27,7 +27,7 @@ def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.clock = LiveClock() - self.logger = LiveLogger(loop=self.loop, clock=self.clock) + self.logger = Logger(clock=self.clock) self.client = BetfairTestStubs.betfair_client(loop=self.loop, logger=self.logger) def test_unique_id(self): diff --git a/tests/integration_tests/adapters/betfair/test_kit.py b/tests/integration_tests/adapters/betfair/test_kit.py index a27d1c4d0207..fbe85e851cb3 100644 --- a/tests/integration_tests/adapters/betfair/test_kit.py +++ b/tests/integration_tests/adapters/betfair/test_kit.py @@ -214,10 +214,15 @@ def betfair_venue_config() -> BacktestVenueConfig: ) @staticmethod - def streaming_config(catalog_path: str, catalog_fs_protocol: str = "memory") -> StreamingConfig: + def streaming_config( + catalog_path: str, + catalog_fs_protocol: str = "memory", + flush_interval_ms: int = None, + ) -> StreamingConfig: return StreamingConfig( catalog_path=catalog_path, fs_protocol=catalog_fs_protocol, + flush_interval_ms=flush_interval_ms, ) @staticmethod @@ -228,12 +233,16 @@ def betfair_backtest_run_config( persist=True, add_strategy=True, bypass_risk=False, + flush_interval_ms: int = None, ) -> BacktestRunConfig: engine_config = BacktestEngineConfig( log_level="INFO", bypass_logging=True, risk_engine=RiskEngineConfig(bypass=bypass_risk), - streaming=BetfairTestStubs.streaming_config(catalog_path=catalog_path) + streaming=BetfairTestStubs.streaming_config( + catalog_path=catalog_path, + flush_interval_ms=flush_interval_ms, + ) if persist else None, strategies=[ diff --git a/tests/integration_tests/adapters/binance/resources/ws_messages/ws_spot_agg_trade.json b/tests/integration_tests/adapters/binance/resources/ws_messages/ws_spot_agg_trade.json new file mode 100644 index 000000000000..e54e301e4873 --- /dev/null +++ b/tests/integration_tests/adapters/binance/resources/ws_messages/ws_spot_agg_trade.json @@ -0,0 +1,16 @@ +{ + "stream":"ethusdt@aggTrade", + "data":{ + "e":"aggTrade", + "E":1675759520848, + "s":"ETHUSDT", + "a":226532, + "p":"1632.46000000", + "q":"0.34305000", + "f":228423, + "l":228423, + "T":1675759520847, + "m":false, + "M":true + } +} \ No newline at end of file diff --git a/tests/integration_tests/adapters/binance/sandbox/sandbox_http_futures_testnet_instrument_provider.py b/tests/integration_tests/adapters/binance/sandbox/sandbox_http_futures_testnet_instrument_provider.py new file mode 100644 index 000000000000..5ea28e4027dd --- /dev/null +++ b/tests/integration_tests/adapters/binance/sandbox/sandbox_http_futures_testnet_instrument_provider.py @@ -0,0 +1,60 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import asyncio +import os + +import pytest + +from nautilus_trader.adapters.binance.common.constants import BINANCE_VENUE +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType +from nautilus_trader.adapters.binance.factories import get_cached_binance_http_client +from nautilus_trader.adapters.binance.futures.providers import BinanceFuturesInstrumentProvider +from nautilus_trader.common.clock import LiveClock +from nautilus_trader.common.logging import Logger +from nautilus_trader.model.identifiers import InstrumentId +from nautilus_trader.model.identifiers import Symbol + + +@pytest.mark.asyncio +async def test_binance_futures_testnet_instrument_provider(): + loop = asyncio.get_event_loop() + clock = LiveClock() + + client = get_cached_binance_http_client( + loop=loop, + clock=clock, + logger=Logger(clock=clock), + account_type=BinanceAccountType.FUTURES_USDT, + key=os.getenv("BINANCE_FUTURES_TESTNET_API_KEY"), + secret=os.getenv("BINANCE_FUTURES_TESTNET_API_SECRET"), + is_testnet=True, + ) + await client.connect() + + provider = BinanceFuturesInstrumentProvider( + client=client, + clock=clock, + logger=Logger(clock=clock), + ) + + # await provider.load_all_async() + btcusdt_perp = InstrumentId(Symbol("BTCUSDT-PERP"), BINANCE_VENUE) + await provider.load_ids_async(instrument_ids=[btcusdt_perp]) + await provider.load_all_async() + + print(provider.count) + + await client.disconnect() diff --git a/tests/integration_tests/adapters/binance/sandbox/sandbox_http_spot_instrument_provider.py b/tests/integration_tests/adapters/binance/sandbox/sandbox_http_spot_instrument_provider.py index 0e812e438a63..311ce1ba566a 100644 --- a/tests/integration_tests/adapters/binance/sandbox/sandbox_http_spot_instrument_provider.py +++ b/tests/integration_tests/adapters/binance/sandbox/sandbox_http_spot_instrument_provider.py @@ -26,7 +26,7 @@ @pytest.mark.asyncio -async def test_binance_spot_market_http_client(): +async def test_binance_spot_instrument_provider(): clock = LiveClock() client = get_cached_binance_http_client( diff --git a/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_futures_market.py b/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_futures_market.py index e93f6cb4f1fc..cdfd38aaf5d0 100644 --- a/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_futures_market.py +++ b/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_futures_market.py @@ -19,7 +19,7 @@ from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger @pytest.mark.asyncio @@ -30,7 +30,7 @@ async def test_binance_websocket_client(): client = BinanceWebSocketClient( loop=loop, clock=clock, - logger=LiveLogger(loop=loop, clock=clock), + logger=Logger(clock=clock), handler=print, base_url="wss://fstream.binance.com", ) diff --git a/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_spot_user.py b/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_spot_user.py index c36f2faffbcb..4911c5c705f0 100644 --- a/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_spot_user.py +++ b/tests/integration_tests/adapters/binance/sandbox/sandbox_ws_spot_user.py @@ -23,7 +23,6 @@ from nautilus_trader.adapters.binance.spot.http.user import BinanceSpotUserDataHttpAPI from nautilus_trader.adapters.binance.websocket.client import BinanceWebSocketClient from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger from nautilus_trader.common.logging import Logger @@ -49,7 +48,7 @@ async def test_binance_websocket_client(): ws = BinanceWebSocketClient( loop=loop, clock=clock, - logger=LiveLogger(loop=loop, clock=clock), + logger=Logger(clock=clock), handler=print, ) diff --git a/tests/integration_tests/adapters/binance/test_core_functions.py b/tests/integration_tests/adapters/binance/test_core_functions.py index b5271de8e1c6..51f212c5bf4c 100644 --- a/tests/integration_tests/adapters/binance/test_core_functions.py +++ b/tests/integration_tests/adapters/binance/test_core_functions.py @@ -16,8 +16,8 @@ import pytest from nautilus_trader.adapters.binance.common.enums import BinanceAccountType -from nautilus_trader.adapters.binance.common.functions import convert_symbols_list_to_json_array -from nautilus_trader.adapters.binance.common.functions import format_symbol +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbol +from nautilus_trader.adapters.binance.common.schemas.symbol import BinanceSymbols class TestBinanceCoreFunctions: @@ -26,7 +26,7 @@ def test_format_symbol(self): symbol = "ethusdt-perp" # Act - result = format_symbol(symbol) + result = BinanceSymbol(symbol) # Assert assert result == "ETHUSDT" @@ -36,7 +36,7 @@ def test_convert_symbols_list_to_json_array(self): symbols = ["BTCUSDT", "ETHUSDT-PERP", " XRDUSDT"] # Act - result = convert_symbols_list_to_json_array(symbols) + result = BinanceSymbols(symbols) # Assert assert result == '["BTCUSDT","ETHUSDT","XRDUSDT"]' @@ -45,7 +45,8 @@ def test_convert_symbols_list_to_json_array(self): "account_type, expected", [ [BinanceAccountType.SPOT, True], - [BinanceAccountType.MARGIN, False], + [BinanceAccountType.MARGIN_CROSS, False], + [BinanceAccountType.MARGIN_ISOLATED, False], [BinanceAccountType.FUTURES_USDT, False], [BinanceAccountType.FUTURES_COIN, False], ], @@ -58,7 +59,8 @@ def test_binance_account_type_is_spot(self, account_type, expected): "account_type, expected", [ [BinanceAccountType.SPOT, False], - [BinanceAccountType.MARGIN, True], + [BinanceAccountType.MARGIN_CROSS, True], + [BinanceAccountType.MARGIN_ISOLATED, True], [BinanceAccountType.FUTURES_USDT, False], [BinanceAccountType.FUTURES_COIN, False], ], @@ -67,11 +69,26 @@ def test_binance_account_type_is_margin(self, account_type, expected): # Arrange, Act, Assert assert account_type.is_margin == expected + @pytest.mark.parametrize( + "account_type, expected", + [ + [BinanceAccountType.SPOT, True], + [BinanceAccountType.MARGIN_CROSS, True], + [BinanceAccountType.MARGIN_ISOLATED, True], + [BinanceAccountType.FUTURES_USDT, False], + [BinanceAccountType.FUTURES_COIN, False], + ], + ) + def test_binance_account_type_is_spot_or_margin(self, account_type, expected): + # Arrange, Act, Assert + assert account_type.is_spot_or_margin == expected + @pytest.mark.parametrize( "account_type, expected", [ [BinanceAccountType.SPOT, False], - [BinanceAccountType.MARGIN, False], + [BinanceAccountType.MARGIN_CROSS, False], + [BinanceAccountType.MARGIN_ISOLATED, False], [BinanceAccountType.FUTURES_USDT, True], [BinanceAccountType.FUTURES_COIN, True], ], diff --git a/tests/integration_tests/adapters/binance/test_data_spot.py b/tests/integration_tests/adapters/binance/test_data_spot.py index f4e27cafb813..60e485fe9ac6 100644 --- a/tests/integration_tests/adapters/binance/test_data_spot.py +++ b/tests/integration_tests/adapters/binance/test_data_spot.py @@ -77,6 +77,7 @@ def setup(self): self.provider = BinanceSpotInstrumentProvider( client=self.http_client, logger=self.logger, + clock=self.clock, config=InstrumentProviderConfig(load_all=True), ) @@ -323,3 +324,37 @@ async def test_subscribe_trade_ticks(self, monkeypatch): ts_event=1639351062243000064, ts_init=handler[0].ts_init, ) + + @pytest.mark.asyncio + async def test_subscribe_agg_trade_ticks(self, monkeypatch): + handler = [] + self.msgbus.subscribe( + topic="data.trades.BINANCE.ETHUSDT", + handler=handler.append, + ) + + # Act + self.data_client._use_agg_trade_ticks = True + self.data_client.subscribe_trade_ticks(ETHUSDT_BINANCE.id) + self.data_client._use_agg_trade_ticks = False + + raw_trade = pkgutil.get_data( + package="tests.integration_tests.adapters.binance.resources.ws_messages", + resource="ws_spot_agg_trade.json", + ) + + # Assert + self.data_client._handle_ws_message(raw_trade) + await asyncio.sleep(1) + + assert self.data_engine.data_count == 1 + assert len(handler) == 1 # <-- handler received tick + assert handler[0] == TradeTick( + instrument_id=ETHUSDT_BINANCE.id, + price=Price.from_str("1632.46000000"), + size=Quantity.from_str("0.34305000"), + aggressor_side=AggressorSide.BUYER, + trade_id=TradeId("226532"), + ts_event=1675759520847, + ts_init=handler[0].ts_init, + ) diff --git a/tests/integration_tests/adapters/binance/test_execution_futures.py b/tests/integration_tests/adapters/binance/test_execution_futures.py index fdf7973fb796..ba0fc586b2f1 100644 --- a/tests/integration_tests/adapters/binance/test_execution_futures.py +++ b/tests/integration_tests/adapters/binance/test_execution_futures.py @@ -80,6 +80,7 @@ def setup(self): self.provider = BinanceFuturesInstrumentProvider( client=self.http_client, logger=self.logger, + clock=self.clock, config=InstrumentProviderConfig(load_all=True), ) @@ -249,7 +250,6 @@ async def test_submit_limit_post_only_order(self, mocker): assert request[2]["type"] == "LIMIT" assert request[2]["timeInForce"] == "GTX" assert request[2]["quantity"] == "10" - assert request[2]["reduceOnly"] == "false" assert request[2]["price"] == "10050.80" assert request[2]["newClientOrderId"] is not None assert request[2]["recvWindow"] == "5000" @@ -293,7 +293,7 @@ async def test_submit_stop_market_order(self, mocker): assert request[2]["type"] == "STOP_MARKET" assert request[2]["timeInForce"] == "GTC" assert request[2]["quantity"] == "10" - assert request[2]["reduceOnly"] == "true" + assert request[2]["reduceOnly"] == "True" assert request[2]["newClientOrderId"] is not None assert request[2]["stopPrice"] == "10099.00" assert request[2]["workingType"] == "CONTRACT_PRICE" @@ -426,7 +426,6 @@ async def test_submit_limit_if_touched_order(self, mocker): assert request[2]["type"] == "TAKE_PROFIT" assert request[2]["timeInForce"] == "GTC" assert request[2]["quantity"] == "10" - assert request[2]["reduceOnly"] == "false" assert request[2]["price"] == "10050.80" assert request[2]["newClientOrderId"] is not None assert request[2]["stopPrice"] == "10099.00" @@ -475,7 +474,7 @@ async def test_trailing_stop_market_order(self, mocker): assert request[2]["type"] == "TRAILING_STOP_MARKET" assert request[2]["timeInForce"] == "GTC" assert request[2]["quantity"] == "10" - assert request[2]["reduceOnly"] == "true" + assert request[2]["reduceOnly"] == "True" assert request[2]["newClientOrderId"] is not None assert request[2]["activationPrice"] == "10000.00" assert request[2]["callbackRate"] == "1" diff --git a/tests/integration_tests/adapters/binance/test_execution_spot.py b/tests/integration_tests/adapters/binance/test_execution_spot.py index c49c1fb87c2e..89f7725eebdd 100644 --- a/tests/integration_tests/adapters/binance/test_execution_spot.py +++ b/tests/integration_tests/adapters/binance/test_execution_spot.py @@ -80,6 +80,7 @@ def setup(self): self.provider = BinanceSpotInstrumentProvider( client=self.http_client, logger=self.logger, + clock=self.clock, config=InstrumentProviderConfig(load_all=True), ) diff --git a/tests/integration_tests/adapters/binance/test_factories.py b/tests/integration_tests/adapters/binance/test_factories.py index e92c4e7e3883..156458b36ec9 100644 --- a/tests/integration_tests/adapters/binance/test_factories.py +++ b/tests/integration_tests/adapters/binance/test_factories.py @@ -31,7 +31,7 @@ from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.msgbus.bus import MessageBus from nautilus_trader.test_kit.mocks.cache_database import MockCacheDatabase from nautilus_trader.test_kit.stubs.identifiers import TestIdStubs @@ -42,11 +42,7 @@ def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.clock = LiveClock() - self.logger = LiveLogger( - loop=self.loop, - clock=self.clock, - level_stdout=LogLevel.DEBUG, - ) + self.logger = Logger(clock=self.clock, level_stdout=LogLevel.DEBUG) self.trader_id = TestIdStubs.trader_id() self.strategy_id = TestIdStubs.strategy_id() @@ -77,7 +73,13 @@ def setup(self): "https://api.binance.com", ], [ - BinanceAccountType.MARGIN, + BinanceAccountType.MARGIN_CROSS, + False, + False, + "https://sapi.binance.com", + ], + [ + BinanceAccountType.MARGIN_ISOLATED, False, False, "https://sapi.binance.com", @@ -101,7 +103,13 @@ def setup(self): "https://api.binance.us", ], [ - BinanceAccountType.MARGIN, + BinanceAccountType.MARGIN_CROSS, + False, + True, + "https://sapi.binance.us", + ], + [ + BinanceAccountType.MARGIN_ISOLATED, False, True, "https://sapi.binance.us", @@ -125,7 +133,13 @@ def setup(self): "https://testnet.binance.vision", ], [ - BinanceAccountType.MARGIN, + BinanceAccountType.MARGIN_CROSS, + True, + False, + "https://testnet.binance.vision", + ], + [ + BinanceAccountType.MARGIN_ISOLATED, True, False, "https://testnet.binance.vision", @@ -155,7 +169,13 @@ def test_get_http_base_url(self, account_type, is_testnet, is_us, expected): "wss://stream.binance.com:9443", ], [ - BinanceAccountType.MARGIN, + BinanceAccountType.MARGIN_CROSS, + False, + False, + "wss://stream.binance.com:9443", + ], + [ + BinanceAccountType.MARGIN_ISOLATED, False, False, "wss://stream.binance.com:9443", @@ -179,7 +199,13 @@ def test_get_http_base_url(self, account_type, is_testnet, is_us, expected): "wss://stream.binance.us:9443", ], [ - BinanceAccountType.MARGIN, + BinanceAccountType.MARGIN_CROSS, + False, + True, + "wss://stream.binance.us:9443", + ], + [ + BinanceAccountType.MARGIN_ISOLATED, False, True, "wss://stream.binance.us:9443", @@ -203,7 +229,13 @@ def test_get_http_base_url(self, account_type, is_testnet, is_us, expected): "wss://testnet.binance.vision", ], [ - BinanceAccountType.MARGIN, + BinanceAccountType.MARGIN_CROSS, + True, + False, + "wss://testnet.binance.vision", + ], + [ + BinanceAccountType.MARGIN_ISOLATED, True, False, "wss://testnet.binance.vision", diff --git a/tests/integration_tests/adapters/binance/test_http_account.py b/tests/integration_tests/adapters/binance/test_http_account.py index b1f315e6339b..207bd323fb4a 100644 --- a/tests/integration_tests/adapters/binance/test_http_account.py +++ b/tests/integration_tests/adapters/binance/test_http_account.py @@ -17,6 +17,7 @@ import pytest +from nautilus_trader.adapters.binance.http.account import BinanceOrderHttp from nautilus_trader.adapters.binance.http.client import BinanceHttpClient from nautilus_trader.adapters.binance.spot.http.account import BinanceSpotAccountHttpAPI from nautilus_trader.common.clock import LiveClock @@ -27,17 +28,19 @@ class TestBinanceSpotAccountHttpAPI: def setup(self): # Fixture Setup - clock = LiveClock() - logger = Logger(clock=clock) + self.clock = LiveClock() + logger = Logger(clock=self.clock) self.client = BinanceHttpClient( # noqa: S106 (no hardcoded password) loop=asyncio.get_event_loop(), - clock=clock, + clock=self.clock, logger=logger, key="SOME_BINANCE_API_KEY", secret="SOME_BINANCE_API_SECRET", ) - self.api = BinanceSpotAccountHttpAPI(self.client) + self.api = BinanceSpotAccountHttpAPI(self.client, self.clock) + + # COMMON tests @pytest.mark.asyncio async def test_new_order_test_sends_expected_request(self, mocker): @@ -45,15 +48,24 @@ async def test_new_order_test_sends_expected_request(self, mocker): await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") + endpoint = BinanceOrderHttp( + client=self.client, + base_endpoint="/api/v3", + testing_endpoint=True, + ) + # Act - await self.api.new_order_test( - symbol="ETHUSDT", - side="SELL", - type="LIMIT", - time_in_force="GTC", - quantity="0.01", - price="5000", - recv_window=5000, + await endpoint._post( + parameters=endpoint.PostParameters( + symbol="ETHUSDT", + side="SELL", + type="LIMIT", + timeInForce="GTC", + quantity="0.01", + price="5000", + recvWindow=str(5000), + timestamp=str(self.clock.timestamp_ms()), + ), ) # Assert @@ -65,7 +77,7 @@ async def test_new_order_test_sends_expected_request(self, mocker): ) @pytest.mark.asyncio - async def test_order_test_sends_expected_request(self, mocker): + async def test_new_order_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") @@ -74,11 +86,11 @@ async def test_order_test_sends_expected_request(self, mocker): await self.api.new_order( symbol="ETHUSDT", side="SELL", - type="LIMIT", + order_type="LIMIT", time_in_force="GTC", quantity="0.01", price="5000", - recv_window=5000, + recv_window=str(5000), ) # Assert @@ -109,13 +121,13 @@ async def test_cancel_order_sends_expected_request(self, mocker): assert request["params"].startswith("symbol=ETHUSDT&orderId=1&recvWindow=5000×tamp=") @pytest.mark.asyncio - async def test_cancel_open_orders_sends_expected_request(self, mocker): + async def test_cancel_all_open_orders_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.cancel_open_orders( + await self.api.cancel_all_open_orders( symbol="ETHUSDT", recv_window=5000, ) @@ -127,13 +139,13 @@ async def test_cancel_open_orders_sends_expected_request(self, mocker): assert request["params"].startswith("symbol=ETHUSDT&recvWindow=5000×tamp=") @pytest.mark.asyncio - async def test_get_order_sends_expected_request(self, mocker): + async def test_query_order_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.get_order( + await self.api.query_order( symbol="ETHUSDT", order_id="1", recv_window=5000, @@ -146,13 +158,13 @@ async def test_get_order_sends_expected_request(self, mocker): assert request["params"].startswith("symbol=ETHUSDT&orderId=1&recvWindow=5000×tamp=") @pytest.mark.asyncio - async def test_get_open_orders_sends_expected_request(self, mocker): + async def test_query_open_orders_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.get_open_orders( + await self.api.query_open_orders( symbol="ETHUSDT", recv_window=5000, ) @@ -164,13 +176,13 @@ async def test_get_open_orders_sends_expected_request(self, mocker): assert request["params"].startswith("symbol=ETHUSDT&recvWindow=5000×tamp=") @pytest.mark.asyncio - async def test_get_orders_sends_expected_request(self, mocker): + async def test_query_all_orders_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.get_orders( + await self.api.query_all_orders( symbol="ETHUSDT", recv_window=5000, ) @@ -182,13 +194,38 @@ async def test_get_orders_sends_expected_request(self, mocker): assert request["params"].startswith("symbol=ETHUSDT&recvWindow=5000×tamp=") @pytest.mark.asyncio - async def test_new_oco_order_sends_expected_request(self, mocker): + async def test_query_user_trades_sends_expected_request(self, mocker): + # Arrange + await self.client.connect() + mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") + + # Act + await self.api.query_user_trades( + symbol="ETHUSDT", + start_time=str(1600000000), + end_time=str(1637355823), + limit=1000, + recv_window=str(5000), + ) + + # Assert + request = mock_send_request.call_args.kwargs + assert request["method"] == "GET" + assert request["url"] == "https://api.binance.com/api/v3/myTrades" + assert request["params"].startswith( + "symbol=ETHUSDT&fromId=1&orderId=1&startTime=1600000000&endTime=1637355823&limit=1000&recvWindow=5000×tamp=", + ) + + # SPOT/MARGIN tests + + @pytest.mark.asyncio + async def test_new_spot_oco_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.new_oco_order( + await self.api.new_spot_oco( symbol="ETHUSDT", side="BUY", quantity="100", @@ -213,13 +250,13 @@ async def test_new_oco_order_sends_expected_request(self, mocker): ) @pytest.mark.asyncio - async def test_cancel_oco_order_sends_expected_request(self, mocker): + async def test_cancel_spot_oco_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.cancel_oco_order( + await self.api.cancel_spot_oco( symbol="ETHUSDT", order_list_id="1", list_client_order_id="1", @@ -236,13 +273,13 @@ async def test_cancel_oco_order_sends_expected_request(self, mocker): ) @pytest.mark.asyncio - async def test_get_oco_order_sends_expected_request(self, mocker): + async def test_query_spot_oco_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.get_oco_order( + await self.api.query_spot_oco( order_list_id="1", orig_client_order_id="1", recv_window=5000, @@ -257,18 +294,17 @@ async def test_get_oco_order_sends_expected_request(self, mocker): ) @pytest.mark.asyncio - async def test_get_oco_orders_sends_expected_request(self, mocker): + async def test_query_spot_all_oco_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.get_oco_orders( - from_id="1", - start_time=1600000000, - end_time=1637355823, + await self.api.query_spot_all_oco( + start_time=str(1600000000), + end_time=str(1637355823), limit=10, - recv_window=5000, + recv_window=str(5000), ) # Assert @@ -276,17 +312,17 @@ async def test_get_oco_orders_sends_expected_request(self, mocker): assert request["method"] == "GET" assert request["url"] == "https://api.binance.com/api/v3/allOrderList" assert request["params"].startswith( - "fromId=1&startTime=1600000000&endTime=1637355823&limit=10&recvWindow=5000×tamp=", + "startTime=1600000000&endTime=1637355823&limit=10&recvWindow=5000×tamp=", ) @pytest.mark.asyncio - async def test_get_open_oco_orders_sends_expected_request(self, mocker): + async def test_query_spot_all_open_oco_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.get_oco_open_orders(recv_window=5000) + await self.api.query_spot_all_open_oco(recv_window=5000) # Assert request = mock_send_request.call_args.kwargs @@ -295,41 +331,16 @@ async def test_get_open_oco_orders_sends_expected_request(self, mocker): assert request["params"].startswith("recvWindow=5000×tamp=") @pytest.mark.asyncio - async def test_account_sends_expected_request(self, mocker): + async def test_query_spot_account_info_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.account(recv_window=5000) + await self.api.query_spot_account_info(recv_window=5000) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" assert request["url"] == "https://api.binance.com/api/v3/account" assert request["params"].startswith("recvWindow=5000×tamp=") - - @pytest.mark.asyncio - async def test_my_trades_sends_expected_request(self, mocker): - # Arrange - await self.client.connect() - mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") - - # Act - await self.api.get_account_trades( - symbol="ETHUSDT", - from_id="1", - order_id="1", - start_time=1600000000, - end_time=1637355823, - limit=1000, - recv_window=5000, - ) - - # Assert - request = mock_send_request.call_args.kwargs - assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/myTrades" - assert request["params"].startswith( - "symbol=ETHUSDT&fromId=1&orderId=1&startTime=1600000000&endTime=1637355823&limit=1000&recvWindow=5000×tamp=", - ) diff --git a/tests/integration_tests/adapters/binance/test_http_market.py b/tests/integration_tests/adapters/binance/test_http_market.py index ed43bebbc1cc..e1f65a04ffc0 100644 --- a/tests/integration_tests/adapters/binance/test_http_market.py +++ b/tests/integration_tests/adapters/binance/test_http_market.py @@ -38,6 +38,10 @@ def setup(self): ) self.api = BinanceSpotMarketHttpAPI(self.client) + self.test_symbol = "BTCUSDT" + self.test_symbols = ["BTCUSDT", "ETHUSDT"] + + # COMMON tests @pytest.mark.asyncio async def test_ping_sends_expected_request(self, mocker): @@ -54,13 +58,13 @@ async def test_ping_sends_expected_request(self, mocker): assert request["url"] == "https://api.binance.com/api/v3/ping" @pytest.mark.asyncio - async def test_time_sends_expected_request(self, mocker): + async def test_request_server_time_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.time() + await self.api.request_server_time() # Assert request = mock_send_request.call_args.kwargs @@ -68,178 +72,180 @@ async def test_time_sends_expected_request(self, mocker): assert request["url"] == "https://api.binance.com/api/v3/time" @pytest.mark.asyncio - async def test_exchange_info_with_symbol_sends_expected_request(self, mocker): + async def test_query_depth_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.exchange_info(symbol="BTCUSDT") + await self.api.query_depth(symbol=self.test_symbol, limit=10) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/exchangeInfo" - assert request["params"] == "symbol=BTCUSDT" + assert request["url"] == "https://api.binance.com/api/v3/depth" + assert request["params"] == "symbol=BTCUSDT&limit=10" @pytest.mark.asyncio - async def test_exchange_info_with_symbols_sends_expected_request(self, mocker): + async def test_query_trades_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.exchange_info(symbols=["BTCUSDT", "ETHUSDT"]) + await self.api.query_trades(symbol=self.test_symbol, limit=10) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/exchangeInfo" - assert request["params"] == "symbols=%5B%22BTCUSDT%22%2C%22ETHUSDT%22%5D" + assert request["url"] == "https://api.binance.com/api/v3/trades" + assert request["params"] == "symbol=BTCUSDT&limit=10" @pytest.mark.asyncio - async def test_depth_sends_expected_request(self, mocker): + async def test_query_historical_trades_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.depth(symbol="BTCUSDT", limit=10) + await self.api.query_historical_trades(symbol=self.test_symbol, limit=10, from_id=0) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/depth" - assert request["params"] == "symbol=BTCUSDT&limit=10" + assert request["url"] == "https://api.binance.com/api/v3/historicalTrades" + assert request["params"] == "symbol=BTCUSDT&limit=10&fromId=0" @pytest.mark.asyncio - async def test_trades_sends_expected_request(self, mocker): + async def test_query_agg_trades_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.trades(symbol="BTCUSDT", limit=10) + await self.api.query_agg_trades( + symbol=self.test_symbol, + from_id=0, + start_time=0, + end_time=1, + limit=10, + ) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/trades" - assert request["params"] == "symbol=BTCUSDT&limit=10" + assert request["url"] == "https://api.binance.com/api/v3/aggTrades" + assert request["params"] == "symbol=BTCUSDT&fromId=0&startTime=0&endTime=1&limit=10" @pytest.mark.asyncio - async def test_historical_trades_sends_expected_request(self, mocker): + async def test_query_klines_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.historical_trades(symbol="BTCUSDT", from_id=0, limit=10) + await self.api.query_klines( + symbol=self.test_symbol, + interval="1m", + start_time=0, + end_time=1, + limit=1000, + ) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/historicalTrades" - assert request["params"] == "symbol=BTCUSDT&limit=10&fromId=0" + assert request["url"] == "https://api.binance.com/api/v3/klines" + assert request["params"] == "symbol=BTCUSDT&interval=1m&startTime=0&endTime=1&limit=1000" @pytest.mark.asyncio - async def test_agg_trades_sends_expected_request(self, mocker): + async def test_query_ticker_24hr_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.agg_trades( - symbol="BTCUSDT", - from_id=0, - start_time_ms=0, - end_time_ms=1, - limit=10, - ) + await self.api.query_ticker_24hr(symbol=self.test_symbol) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/aggTrades" - assert request["params"] == "symbol=BTCUSDT&fromId=0&startTime=0&endTime=1&limit=10" + assert request["url"] == "https://api.binance.com/api/v3/ticker/24hr" + assert request["params"] == "symbol=BTCUSDT" @pytest.mark.asyncio - async def test_klines_sends_expected_request(self, mocker): + async def test_query_ticker_price_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.klines( - symbol="BTCUSDT", - interval="1m", - start_time_ms=0, - end_time_ms=1, - limit=1000, - ) + await self.api.query_ticker_price(symbol=self.test_symbol) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/klines" - assert request["params"] == "symbol=BTCUSDT&interval=1m&startTime=0&endTime=1&limit=1000" + assert request["url"] == "https://api.binance.com/api/v3/ticker/price" + assert request["params"] == "symbol=BTCUSDT" @pytest.mark.asyncio - async def test_avg_price_sends_expected_request(self, mocker): + async def test_query_book_ticker_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.avg_price(symbol="BTCUSDT") + await self.api.query_ticker_book(symbol=self.test_symbol) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/avgPrice" + assert request["url"] == "https://api.binance.com/api/v3/ticker/bookTicker" assert request["params"] == "symbol=BTCUSDT" + # SPOT/MARGIN tests + @pytest.mark.asyncio - async def test_ticker_24hr_sends_expected_request(self, mocker): + async def test_query_spot_exchange_info_with_symbol_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.ticker_24hr(symbol="BTCUSDT") + await self.api.query_spot_exchange_info(symbol=self.test_symbol) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/ticker/24hr" + assert request["url"] == "https://api.binance.com/api/v3/exchangeInfo" assert request["params"] == "symbol=BTCUSDT" @pytest.mark.asyncio - async def test_ticker_price_sends_expected_request(self, mocker): + async def test_query_spot_exchange_info_with_symbols_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.ticker_price(symbol="BTCUSDT") + await self.api.query_spot_exchange_info(symbols=self.test_symbols) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/ticker/price" - assert request["params"] == "symbol=BTCUSDT" + assert request["url"] == "https://api.binance.com/api/v3/exchangeInfo" + assert request["params"] == "symbols=%5B%22BTCUSDT%22%2C%22ETHUSDT%22%5D" @pytest.mark.asyncio - async def test_book_ticker_sends_expected_request(self, mocker): + async def test_query_spot_avg_price_sends_expected_request(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.book_ticker(symbol="BTCUSDT") + await self.api.query_spot_average_price(symbol=self.test_symbol) # Assert request = mock_send_request.call_args.kwargs assert request["method"] == "GET" - assert request["url"] == "https://api.binance.com/api/v3/ticker/bookTicker" + assert request["url"] == "https://api.binance.com/api/v3/avgPrice" assert request["params"] == "symbol=BTCUSDT" diff --git a/tests/integration_tests/adapters/binance/test_http_user.py b/tests/integration_tests/adapters/binance/test_http_user.py index 8f7abc01f846..4a98a37b2a06 100644 --- a/tests/integration_tests/adapters/binance/test_http_user.py +++ b/tests/integration_tests/adapters/binance/test_http_user.py @@ -17,6 +17,7 @@ import pytest +from nautilus_trader.adapters.binance.common.enums import BinanceAccountType from nautilus_trader.adapters.binance.http.client import BinanceHttpClient from nautilus_trader.adapters.binance.spot.http.user import BinanceSpotUserDataHttpAPI from nautilus_trader.common.clock import LiveClock @@ -36,8 +37,12 @@ def setup(self): key="SOME_BINANCE_API_KEY", secret="SOME_BINANCE_API_SECRET", ) - - self.api = BinanceSpotUserDataHttpAPI(self.client) + self.test_symbol = "ETHUSDT" + self.spot_api = BinanceSpotUserDataHttpAPI(self.client, BinanceAccountType.SPOT) + self.isolated_margin_api = BinanceSpotUserDataHttpAPI( + self.client, + BinanceAccountType.MARGIN_ISOLATED, + ) @pytest.mark.asyncio async def test_create_listen_key_spot(self, mocker): @@ -46,7 +51,7 @@ async def test_create_listen_key_spot(self, mocker): mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.create_listen_key() + await self.spot_api.create_listen_key() # Assert request = mock_send_request.call_args.kwargs @@ -54,14 +59,14 @@ async def test_create_listen_key_spot(self, mocker): assert request["url"] == "https://api.binance.com/api/v3/userDataStream" @pytest.mark.asyncio - async def test_ping_listen_key_spot(self, mocker): + async def test_keepalive_listen_key_spot(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.ping_listen_key( - key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", + await self.spot_api.keepalive_listen_key( + listen_key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", ) # Assert @@ -74,14 +79,14 @@ async def test_ping_listen_key_spot(self, mocker): ) @pytest.mark.asyncio - async def test_close_listen_key_spot(self, mocker): + async def test_delete_listen_key_spot(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.close_listen_key( - key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", + await self.spot_api.delete_listen_key( + listen_key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", ) # Assert @@ -100,7 +105,7 @@ async def test_create_listen_key_isolated_margin(self, mocker): mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.create_listen_key_isolated_margin(symbol="ETHUSDT") + await self.isolated_margin_api.create_listen_key(symbol=self.test_symbol) # Assert request = mock_send_request.call_args.kwargs @@ -109,15 +114,15 @@ async def test_create_listen_key_isolated_margin(self, mocker): assert request["params"] == "symbol=ETHUSDT" @pytest.mark.asyncio - async def test_ping_listen_key_isolated_margin(self, mocker): + async def test_keepalive_listen_key_isolated_margin(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.ping_listen_key_isolated_margin( - symbol="ETHUSDT", - key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", + await self.isolated_margin_api.keepalive_listen_key( + symbol=self.test_symbol, + listen_key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", ) # Assert @@ -130,15 +135,15 @@ async def test_ping_listen_key_isolated_margin(self, mocker): ) @pytest.mark.asyncio - async def test_close_listen_key_isolated_margin(self, mocker): + async def test_delete_listen_key_isolated_margin(self, mocker): # Arrange await self.client.connect() mock_send_request = mocker.patch(target="aiohttp.client.ClientSession.request") # Act - await self.api.close_listen_key_isolated_margin( - symbol="ETHUSDT", - key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", + await self.isolated_margin_api.delete_listen_key( + symbol=self.test_symbol, + listen_key="JUdsZc8CSmMUxg1wJha23RogrT3EuC8eV5UTbAOVTkF3XWofMzWoXtWmDAhy", ) # Assert diff --git a/tests/integration_tests/adapters/binance/test_http_wallet.py b/tests/integration_tests/adapters/binance/test_http_wallet.py index 799bbd3eb162..cd68c1befc95 100644 --- a/tests/integration_tests/adapters/binance/test_http_wallet.py +++ b/tests/integration_tests/adapters/binance/test_http_wallet.py @@ -21,7 +21,7 @@ from nautilus_trader.adapters.binance.http.client import BinanceHttpClient from nautilus_trader.adapters.binance.spot.http.wallet import BinanceSpotWalletHttpAPI -from nautilus_trader.adapters.binance.spot.schemas.wallet import BinanceSpotTradeFees +from nautilus_trader.adapters.binance.spot.schemas.wallet import BinanceSpotTradeFee from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.logging import Logger @@ -39,7 +39,7 @@ def setup(self): secret="SOME_BINANCE_API_SECRET", ) - self.api = BinanceSpotWalletHttpAPI(self.client) + self.api = BinanceSpotWalletHttpAPI(self.client, clock) @pytest.mark.asyncio async def test_trade_fee(self, mocker): @@ -58,7 +58,7 @@ async def async_mock(): ) # Act - response: BinanceSpotTradeFees = await self.api.trade_fee(symbol="BTCUSDT") + response = await self.api.query_spot_trade_fees(symbol="BTCUSDT") # Assert name, args, kwargs = mock_request.call_args[0] @@ -67,7 +67,8 @@ async def async_mock(): assert kwargs["symbol"] == "BTCUSDT" assert "signature" in kwargs assert "timestamp" in kwargs - assert isinstance(response, BinanceSpotTradeFees) + assert len(response) == 1 + assert isinstance(response[0], BinanceSpotTradeFee) @pytest.mark.asyncio async def test_trade_fees(self, mocker): @@ -86,7 +87,7 @@ async def async_mock(): ) # Act - response: list[BinanceSpotTradeFees] = await self.api.trade_fees() + response = await self.api.query_spot_trade_fees() # Assert name, args, kwargs = mock_request.call_args[0] @@ -95,5 +96,5 @@ async def async_mock(): assert "signature" in kwargs assert "timestamp" in kwargs assert len(response) == 2 - assert isinstance(response[0], BinanceSpotTradeFees) - assert isinstance(response[1], BinanceSpotTradeFees) + assert isinstance(response[0], BinanceSpotTradeFee) + assert isinstance(response[1], BinanceSpotTradeFee) diff --git a/tests/integration_tests/adapters/binance/test_parsing_common.py b/tests/integration_tests/adapters/binance/test_parsing_common.py index c24b2574af1f..57e9116b38c6 100644 --- a/tests/integration_tests/adapters/binance/test_parsing_common.py +++ b/tests/integration_tests/adapters/binance/test_parsing_common.py @@ -15,10 +15,9 @@ import pytest -from nautilus_trader.adapters.binance.common.parsing.data import parse_bar_ws -from nautilus_trader.adapters.binance.common.schemas import BinanceCandlestick -from nautilus_trader.adapters.binance.spot.enums import BinanceSpotOrderType -from nautilus_trader.adapters.binance.spot.parsing.execution import parse_order_type +from nautilus_trader.adapters.binance.common.enums import BinanceOrderType +from nautilus_trader.adapters.binance.common.schemas.market import BinanceCandlestick +from nautilus_trader.adapters.binance.spot.enums import BinanceSpotEnumParser from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.core.datetime import millis_to_nanos from nautilus_trader.model.data.bar import BarSpecification @@ -33,20 +32,25 @@ class TestBinanceCommonParsing: + def __init__(self) -> None: + self._spot_enum_parser = BinanceSpotEnumParser() + @pytest.mark.parametrize( "order_type, expected", [ - [BinanceSpotOrderType.MARKET, OrderType.MARKET], - [BinanceSpotOrderType.LIMIT, OrderType.LIMIT], - [BinanceSpotOrderType.STOP, OrderType.STOP_MARKET], - [BinanceSpotOrderType.STOP_LOSS, OrderType.STOP_MARKET], - [BinanceSpotOrderType.TAKE_PROFIT, OrderType.LIMIT], - [BinanceSpotOrderType.TAKE_PROFIT_LIMIT, OrderType.STOP_LIMIT], + [BinanceOrderType.LIMIT, OrderType.LIMIT], + [BinanceOrderType.MARKET, OrderType.MARKET], + [BinanceOrderType.STOP, OrderType.STOP_MARKET], + [BinanceOrderType.STOP_LOSS, OrderType.STOP_MARKET], + [BinanceOrderType.STOP_LOSS_LIMIT, OrderType.STOP_LIMIT], + [BinanceOrderType.TAKE_PROFIT, OrderType.LIMIT], + [BinanceOrderType.TAKE_PROFIT_LIMIT, OrderType.STOP_LIMIT], + [BinanceOrderType.LIMIT_MAKER, OrderType.LIMIT], ], ) def test_parse_order_type(self, order_type, expected): # Arrange, # Act - result = parse_order_type(order_type) + result = self._spot_enum_parser.parse_binance_order_type(order_type) # Assert assert result == expected @@ -199,9 +203,9 @@ def test_parse_parse_bar_ws(self, resolution, expected_type): ) # Act - bar = parse_bar_ws( + bar = candle.parse_to_binance_bar( instrument_id=BTCUSDT_BINANCE.id, - data=candle, + enum_parser=self._spot_enum_parser, ts_init=millis_to_nanos(1638747720000), ) diff --git a/tests/integration_tests/adapters/binance/test_parsing_http.py b/tests/integration_tests/adapters/binance/test_parsing_http.py index 6a8abeabbc13..16cb15560992 100644 --- a/tests/integration_tests/adapters/binance/test_parsing_http.py +++ b/tests/integration_tests/adapters/binance/test_parsing_http.py @@ -17,8 +17,7 @@ import msgspec -from nautilus_trader.adapters.binance.spot.parsing.data import parse_spot_book_snapshot -from nautilus_trader.adapters.binance.spot.schemas.market import BinanceSpotOrderBookDepthData +from nautilus_trader.adapters.binance.common.schemas.market import BinanceDepth from nautilus_trader.backtest.data.providers import TestInstrumentProvider @@ -34,9 +33,10 @@ def test_parse_book_snapshot(self): ) # Act - result = parse_spot_book_snapshot( + decoder = msgspec.json.Decoder(BinanceDepth) + data = decoder.decode(raw) + result = data.parse_to_order_book_snapshot( instrument_id=ETHUSDT.id, - data=msgspec.json.decode(raw, type=BinanceSpotOrderBookDepthData), ts_init=2, ) diff --git a/tests/integration_tests/adapters/binance/test_parsing_ws.py b/tests/integration_tests/adapters/binance/test_parsing_ws.py index c27c5aad7785..10643243e074 100644 --- a/tests/integration_tests/adapters/binance/test_parsing_ws.py +++ b/tests/integration_tests/adapters/binance/test_parsing_ws.py @@ -17,8 +17,7 @@ import msgspec -from nautilus_trader.adapters.binance.common.parsing.data import parse_ticker_24hr_ws -from nautilus_trader.adapters.binance.common.schemas import BinanceTickerData +from nautilus_trader.adapters.binance.common.schemas.market import BinanceTickerData from nautilus_trader.backtest.data.providers import TestInstrumentProvider @@ -34,9 +33,10 @@ def test_parse_ticker(self): ) # Act - result = parse_ticker_24hr_ws( + decoder = msgspec.json.Decoder(BinanceTickerData) + data = decoder.decode(raw) + result = data.parse_to_binance_ticker( instrument_id=ETHUSDT.id, - data=msgspec.json.decode(raw, type=BinanceTickerData), ts_init=9999999999999991, ) diff --git a/tests/integration_tests/adapters/binance/test_providers.py b/tests/integration_tests/adapters/binance/test_providers.py index 557361cb6d94..52ffb5372867 100644 --- a/tests/integration_tests/adapters/binance/test_providers.py +++ b/tests/integration_tests/adapters/binance/test_providers.py @@ -22,6 +22,7 @@ from nautilus_trader.adapters.binance.futures.providers import BinanceFuturesInstrumentProvider from nautilus_trader.adapters.binance.http.client import BinanceHttpClient from nautilus_trader.adapters.binance.spot.providers import BinanceSpotInstrumentProvider +from nautilus_trader.common.clock import LiveClock from nautilus_trader.model.identifiers import InstrumentId from nautilus_trader.model.identifiers import Symbol from nautilus_trader.model.identifiers import Venue @@ -29,6 +30,10 @@ @pytest.mark.skip(reason="WIP") class TestBinanceInstrumentProvider: + def setup(self): + # Fixture Setup + self.clock = LiveClock() + @pytest.mark.asyncio async def test_load_all_async_for_spot_markets( self, @@ -68,6 +73,7 @@ async def mock_send_request( self.provider = BinanceSpotInstrumentProvider( client=binance_http_client, logger=live_logger, + clock=self.clock, account_type=BinanceAccountType.SPOT, ) @@ -122,6 +128,7 @@ async def mock_send_request( self.provider = BinanceFuturesInstrumentProvider( client=binance_http_client, logger=live_logger, + clock=self.clock, account_type=BinanceAccountType.FUTURES_USDT, ) diff --git a/tests/integration_tests/adapters/interactive_brokers/base.py b/tests/integration_tests/adapters/interactive_brokers/base.py index 8d12c1fe1175..b0eee1aa2061 100644 --- a/tests/integration_tests/adapters/interactive_brokers/base.py +++ b/tests/integration_tests/adapters/interactive_brokers/base.py @@ -19,7 +19,7 @@ from nautilus_trader.cache.cache import Cache from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.msgbus.bus import MessageBus from nautilus_trader.test_kit.mocks.cache_database import MockCacheDatabase from nautilus_trader.test_kit.stubs.identifiers import TestIdStubs @@ -36,11 +36,7 @@ def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.clock = LiveClock() - self.logger = LiveLogger( - loop=self.loop, - clock=self.clock, - level_stdout=LogLevel.DEBUG, - ) + self.logger = Logger(clock=self.clock, level_stdout=LogLevel.DEBUG) self.trader_id = TestIdStubs.trader_id() self.strategy_id = TestIdStubs.strategy_id() diff --git a/tests/integration_tests/adapters/interactive_brokers/test_providers.py b/tests/integration_tests/adapters/interactive_brokers/test_providers.py index b92c73e6cf3b..81f2afa2b09a 100644 --- a/tests/integration_tests/adapters/interactive_brokers/test_providers.py +++ b/tests/integration_tests/adapters/interactive_brokers/test_providers.py @@ -32,7 +32,7 @@ ) from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.config import InstrumentProviderConfig from nautilus_trader.model.enums import AssetClass from nautilus_trader.model.enums import AssetType @@ -49,11 +49,7 @@ def setup(self): self.ib = MagicMock() self.loop = asyncio.get_event_loop() self.clock = LiveClock() - self.logger = LiveLogger( - loop=self.loop, - clock=self.clock, - level_stdout=LogLevel.DEBUG, - ) + self.logger = Logger(clock=self.clock, level_stdout=LogLevel.DEBUG) self.provider = InteractiveBrokersInstrumentProvider( client=self.ib, logger=self.logger, @@ -256,6 +252,7 @@ async def test_instrument_filter_callable_none(self, mocker): # Assert assert len(self.provider.get_all()) == 1 + @pytest.mark.skip(reason="Configs now immutable, limx0 to fix") @pytest.mark.asyncio async def test_instrument_filter_callable_option_filter(self, mocker): # Arrange diff --git a/tests/integration_tests/adapters/sandbox/test_sandbox_execution.py b/tests/integration_tests/adapters/sandbox/test_sandbox_execution.py index afde455d1afb..5713bee9ee21 100644 --- a/tests/integration_tests/adapters/sandbox/test_sandbox_execution.py +++ b/tests/integration_tests/adapters/sandbox/test_sandbox_execution.py @@ -22,7 +22,7 @@ from nautilus_trader.backtest.exchange import SimulatedExchange from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.enums import LogLevel -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.common.logging import LoggerAdapter from nautilus_trader.config import LiveExecEngineConfig from nautilus_trader.live.data_engine import LiveDataEngine @@ -64,7 +64,7 @@ def setup(self): self.account_id = AccountId(f"{self.venue.value}-001") # Setup logging - self.logger = LiveLogger(loop=self.loop, clock=self.clock, level_stdout=LogLevel.DEBUG) + self.logger = Logger(clock=self.clock, level_stdout=LogLevel.DEBUG) self._log = LoggerAdapter("TestBetfairExecutionClient", self.logger) self.msgbus = MessageBus( diff --git a/tests/integration_tests/infrastructure/test_cache_database.py b/tests/integration_tests/infrastructure/test_cache_database.py index f3278dfac9ea..b02d1bf2db5d 100644 --- a/tests/integration_tests/infrastructure/test_cache_database.py +++ b/tests/integration_tests/infrastructure/test_cache_database.py @@ -84,6 +84,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() @@ -438,7 +439,6 @@ def test_update_actor(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -825,7 +825,6 @@ def test_delete_actor(self): # Arrange, Act actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, diff --git a/tests/integration_tests/live/test_live_node.py b/tests/integration_tests/live/test_live_node.py index 33e260a29398..8597058e2166 100644 --- a/tests/integration_tests/live/test_live_node.py +++ b/tests/integration_tests/live/test_live_node.py @@ -34,7 +34,6 @@ from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.config import CacheDatabaseConfig from nautilus_trader.config import TradingNodeConfig -from nautilus_trader.core.uuid import UUID4 from nautilus_trader.live.node import TradingNode from nautilus_trader.model.identifiers import StrategyId @@ -181,9 +180,8 @@ def test_setting_instance_id(self, monkeypatch): config = TradingNodeConfig.parse(RAW_CONFIG) # Act - config.instance_id = UUID4().value node = TradingNode(config) - assert node.kernel.instance_id.value == config.instance_id + assert len(node.kernel.instance_id.value) == 36 class TestTradingNodeOperation: @@ -204,11 +202,12 @@ def test_build_called_twice_raises_runtime_error(self): node.build() node.build() - def test_start_when_not_built_raises_runtime_error(self): + @pytest.mark.asyncio + async def test_run_when_not_built_raises_runtime_error(self): # Arrange, # Act with pytest.raises(RuntimeError): node = TradingNode() - node.start() + await node.run_async() def test_add_data_client_factory(self): # Arrange @@ -240,7 +239,7 @@ async def test_build_with_multiple_clients(self): node.add_exec_client_factory("BETFAIR", BetfairLiveExecClientFactory) node.build() - node.start() + node.run() await asyncio.sleep(1) # assert self.node.kernel.data_engine.registered_clients @@ -257,7 +256,7 @@ async def test_register_log_sink(self): node.kernel.add_log_sink(sink.append) node.build() - node.start() + node.run() await asyncio.sleep(1) # Assert: Log record received @@ -266,13 +265,13 @@ async def test_register_log_sink(self): assert sink[-1]["instance_id"] == node.instance_id.value @pytest.mark.asyncio - async def test_start(self): + async def test_run(self): # Arrange node = TradingNode() node.build() # Act - node.start() + node.run() await asyncio.sleep(2) # Assert @@ -283,7 +282,7 @@ async def test_stop(self): # Arrange node = TradingNode() node.build() - node.start() + node.run() await asyncio.sleep(2) # Allow node to start # Act @@ -308,7 +307,7 @@ async def test_dispose(self, monkeypatch): node.build() node.kernel.cache.add_instrument(TestInstrumentProvider.ethusdt_perp_binance()) - node.start() + node.run() await asyncio.sleep(2) # Allow node to start node.stop() diff --git a/tests/integration_tests/network/conftest.py b/tests/integration_tests/network/conftest.py index e4b017a67d05..1a7b87d1e79b 100644 --- a/tests/integration_tests/network/conftest.py +++ b/tests/integration_tests/network/conftest.py @@ -24,7 +24,7 @@ from aiohttp.test_utils import TestServer from nautilus_trader.common.clock import LiveClock -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger async def handle_echo(reader: asyncio.StreamReader, writer: asyncio.StreamWriter): @@ -107,4 +107,4 @@ async def on_shutdown(app): @pytest.fixture() def logger(event_loop): clock = LiveClock() - return LiveLogger(loop=event_loop, clock=clock) + return Logger(clock=clock) diff --git a/tests/integration_tests/network/test_socket.py b/tests/integration_tests/network/test_socket.py index 1199fd2bef7a..c4f7d5e2051e 100644 --- a/tests/integration_tests/network/test_socket.py +++ b/tests/integration_tests/network/test_socket.py @@ -68,4 +68,4 @@ def handler(raw): # Reconnect and receive another message await asyncio.sleep(1) - assert client.reconnection_count >= 1 + assert client._reconnection_count >= 1 diff --git a/tests/performance_tests/test_perf_catalog.py b/tests/performance_tests/test_perf_catalog.py index c6e0582b3fed..6bb8b2c225e2 100644 --- a/tests/performance_tests/test_perf_catalog.py +++ b/tests/performance_tests/test_perf_catalog.py @@ -27,11 +27,9 @@ class TestBacktestEnginePerformance(PerformanceHarness): @staticmethod def test_load_quote_ticks_python(benchmark): - tempdir = tempfile.mkdtemp() def setup(): - # Arrange cls = TestPersistenceCatalogFile() @@ -51,7 +49,6 @@ def run(catalog): @staticmethod def test_load_quote_ticks_rust(benchmark): - tempdir = tempfile.mkdtemp() def setup(): diff --git a/tests/test_data/bars_eurusd_2019_sim.parquet b/tests/test_data/bars_eurusd_2019_sim.parquet new file mode 100644 index 000000000000..2aef74500dd9 Binary files /dev/null and b/tests/test_data/bars_eurusd_2019_sim.parquet differ diff --git a/tests/test_data/quote_tick_eurusd_2019_sim_rust.parquet b/tests/test_data/quote_tick_eurusd_2019_sim_rust.parquet new file mode 100644 index 000000000000..1b363fdc0ec0 Binary files /dev/null and b/tests/test_data/quote_tick_eurusd_2019_sim_rust.parquet differ diff --git a/tests/test_data/quote_tick_usdjpy_2019_sim_rust.parquet b/tests/test_data/quote_tick_usdjpy_2019_sim_rust.parquet new file mode 100644 index 000000000000..d41c5104059b Binary files /dev/null and b/tests/test_data/quote_tick_usdjpy_2019_sim_rust.parquet differ diff --git a/tests/unit_tests/backtest/test_backtest_config.py b/tests/unit_tests/backtest/test_backtest_config.py index b842b7f8ff49..f000bcba21be 100644 --- a/tests/unit_tests/backtest/test_backtest_config.py +++ b/tests/unit_tests/backtest/test_backtest_config.py @@ -86,6 +86,8 @@ def test_backtest_data_config_load(self): "filter_expr": None, "start": 1580398089820000000, "end": 1580504394501000000, + "use_rust": False, + "metadata": None, } def test_backtest_data_config_generic_data(self): @@ -211,7 +213,7 @@ def test_run_config_to_json(self): ) json = msgspec.json.encode(run_config) result = len(msgspec.json.encode(json)) - assert result in (786, 790) # unix, windows sizes + assert result in (854, 858) # unix, windows sizes def test_run_config_parse_obj(self): run_config = TestConfigStubs.backtest_run_config( @@ -221,7 +223,7 @@ def test_run_config_parse_obj(self): BacktestVenueConfig( name="SIM", oms_type="HEDGING", - account_type="MARGIN", + account_type="MARG IN", starting_balances=["1_000_000 USD"], ), ], @@ -231,7 +233,7 @@ def test_run_config_parse_obj(self): assert isinstance(config, BacktestRunConfig) node = BacktestNode(configs=[config]) assert isinstance(node, BacktestNode) - assert len(raw) in (587, 589) # unix, windows sizes + assert len(raw) in (641, 643) # unix, windows sizes def test_backtest_data_config_to_dict(self): run_config = TestConfigStubs.backtest_run_config( @@ -251,7 +253,7 @@ def test_backtest_data_config_to_dict(self): ) json = msgspec.json.encode(run_config) result = len(msgspec.json.encode(json)) - assert result in (1510, 1518) # unix, windows + assert result in (1718, 1726) # unix, windows def test_backtest_run_config_id(self): token = self.backtest_config.id @@ -259,8 +261,8 @@ def test_backtest_run_config_id(self): value: bytes = msgspec.json.encode(self.backtest_config.dict(), enc_hook=json_encoder) print("token_value:", value.decode()) assert token in ( - "025fddcf56215cdd9be2a7b1ccc0e48abfd76fc44839d793fa07d326655b70a9", # unix - "585913bbdf353d7e00b74c8f0a00f0eb8771da901faefeecf3fb9df1f3d48854", # windows + "f36364e423ae67307b08a68feb7cf18353d2983fc8a2f1b9683c44bd707007b3", # unix + "4b985813f597118e367ccc462bcd19a4752fbeff7b73c71ff518dbdef8ef2a47", # windows ) @pytest.mark.skip(reason="fix after merge") diff --git a/tests/unit_tests/backtest/test_backtest_engine.py b/tests/unit_tests/backtest/test_backtest_engine.py index f8f6e538729b..9ee3ef652e31 100644 --- a/tests/unit_tests/backtest/test_backtest_engine.py +++ b/tests/unit_tests/backtest/test_backtest_engine.py @@ -107,7 +107,7 @@ def teardown(self): self.engine.dispose() def test_initialization(self): - engine = BacktestEngine() + engine = BacktestEngine(BacktestEngineConfig(bypass_logging=True)) # Arrange, Act, Assert assert engine.run_id is None @@ -191,6 +191,7 @@ def test_backtest_engine_multiple_runs(self): engine = self.create_engine( config=BacktestEngineConfig( streaming=StreamingConfig(catalog_path="/", fs_protocol="memory"), + bypass_logging=True, ), ) engine.add_strategy(strategy) @@ -204,6 +205,7 @@ def test_backtest_engine_strategy_timestamps(self): engine = self.create_engine( config=BacktestEngineConfig( streaming=StreamingConfig(catalog_path="/", fs_protocol="memory"), + bypass_logging=True, ), ) engine.add_strategy(strategy) @@ -224,8 +226,12 @@ def test_set_instance_id(self): instance_id = UUID4().value # Act - engine = self.create_engine(config=BacktestEngineConfig(instance_id=instance_id)) - engine2 = self.create_engine(config=BacktestEngineConfig()) # Engine sets instance id + engine = self.create_engine( + config=BacktestEngineConfig(instance_id=instance_id, bypass_logging=True), + ) + engine2 = self.create_engine( + config=BacktestEngineConfig(bypass_logging=True), + ) # Engine sets instance id # Assert assert engine.kernel.instance_id.value == instance_id @@ -235,7 +241,7 @@ def test_set_instance_id(self): class TestBacktestEngineData: def setup(self): # Fixture Setup - self.engine = BacktestEngine() + self.engine = BacktestEngine(BacktestEngineConfig(bypass_logging=True)) self.engine.add_venue( venue=Venue("BINANCE"), oms_type=OmsType.NETTING, @@ -288,7 +294,7 @@ def test_add_generic_data_adds_to_engine(self, capsys): def test_add_instrument_when_no_venue_raises_exception(self): # Arrange - engine = BacktestEngine() + engine = BacktestEngine(BacktestEngineConfig(bypass_logging=True)) # Act, Assert with pytest.raises(InvalidConfiguration): @@ -513,7 +519,7 @@ class TestBacktestWithAddedBars: def setup(self): # Fixture Setup config = BacktestEngineConfig( - bypass_logging=False, + bypass_logging=True, run_analysis=False, ) self.engine = BacktestEngine(config=config) diff --git a/tests/unit_tests/backtest/test_backtest_exchange.py b/tests/unit_tests/backtest/test_backtest_exchange.py index cefaddbc9250..15823c64a176 100644 --- a/tests/unit_tests/backtest/test_backtest_exchange.py +++ b/tests/unit_tests/backtest/test_backtest_exchange.py @@ -77,6 +77,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() @@ -186,6 +187,22 @@ def test_set_fill_model(self): # Assert assert self.exchange.fill_model == fill_model + def test_get_matching_engines_when_engine_returns_expected_dict(self): + # Arrange, Act + matching_engines = self.exchange.get_matching_engines() + + # Assert + assert isinstance(matching_engines, dict) + assert len(matching_engines) == 1 + assert list(matching_engines.keys()) == [USDJPY_SIM.id] + + def test_get_matching_engine_when_no_engine_for_instrument_returns_none(self): + # Arrange, Act + matching_engine = self.exchange.get_matching_engine(USDJPY_SIM.id) + + # Assert + assert matching_engine.instrument == USDJPY_SIM + def test_get_books_with_one_instrument_returns_one_book(self): # Arrange, Act books = self.exchange.get_books() diff --git a/tests/unit_tests/backtest/test_backtest_exchange_bitmex.py b/tests/unit_tests/backtest/test_backtest_exchange_bitmex.py index ddc4ef07af94..d53a196731ae 100644 --- a/tests/unit_tests/backtest/test_backtest_exchange_bitmex.py +++ b/tests/unit_tests/backtest/test_backtest_exchange_bitmex.py @@ -56,7 +56,7 @@ def setup(self): self.strategies = [MockStrategy(TestDataStubs.bartype_btcusdt_binance_100tick_last())] self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/backtest/test_backtest_exchange_bracket_if_touched_entries.py b/tests/unit_tests/backtest/test_backtest_exchange_bracket_if_touched_entries.py index f3dccb001f7b..1ace3b9259c4 100644 --- a/tests/unit_tests/backtest/test_backtest_exchange_bracket_if_touched_entries.py +++ b/tests/unit_tests/backtest/test_backtest_exchange_bracket_if_touched_entries.py @@ -60,6 +60,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/backtest/test_backtest_exchange_contingencies.py b/tests/unit_tests/backtest/test_backtest_exchange_contingencies.py index b87693e3494b..d5fb8f01c55d 100644 --- a/tests/unit_tests/backtest/test_backtest_exchange_contingencies.py +++ b/tests/unit_tests/backtest/test_backtest_exchange_contingencies.py @@ -56,6 +56,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.INFO, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() @@ -374,7 +375,7 @@ def test_reject_bracket_entry_then_rejects_sl_and_tp(self): entry_price=ETHUSDT_PERP_BINANCE.make_price(3050.0), # <-- in the market sl_trigger_price=ETHUSDT_PERP_BINANCE.make_price(3150.0), tp_price=ETHUSDT_PERP_BINANCE.make_price(3000.0), - post_only_entry=True, # <-- will reject placed into the market + entry_post_only=True, # <-- will reject placed into the market entry_order_type=OrderType.LIMIT, ) diff --git a/tests/unit_tests/backtest/test_backtest_exchange_l2_mbp.py b/tests/unit_tests/backtest/test_backtest_exchange_l2_mbp.py index 93b643cf5d61..5b9fc62e0d41 100644 --- a/tests/unit_tests/backtest/test_backtest_exchange_l2_mbp.py +++ b/tests/unit_tests/backtest/test_backtest_exchange_l2_mbp.py @@ -62,6 +62,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/backtest/test_backtest_exchange_stop_limits.py b/tests/unit_tests/backtest/test_backtest_exchange_stop_limits.py index b29593d4727a..28c4c80630d2 100644 --- a/tests/unit_tests/backtest/test_backtest_exchange_stop_limits.py +++ b/tests/unit_tests/backtest/test_backtest_exchange_stop_limits.py @@ -58,6 +58,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/backtest/test_backtest_exchange_trailing_stops.py b/tests/unit_tests/backtest/test_backtest_exchange_trailing_stops.py index 714d1e91c60e..2c0564282838 100644 --- a/tests/unit_tests/backtest/test_backtest_exchange_trailing_stops.py +++ b/tests/unit_tests/backtest/test_backtest_exchange_trailing_stops.py @@ -63,6 +63,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/backtest/test_backtest_matching_engine.py b/tests/unit_tests/backtest/test_backtest_matching_engine.py index 62c53381f0d2..c25b044645ac 100644 --- a/tests/unit_tests/backtest/test_backtest_matching_engine.py +++ b/tests/unit_tests/backtest/test_backtest_matching_engine.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------------------------- + from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.backtest.matching_engine import OrderMatchingEngine from nautilus_trader.backtest.models import FillModel @@ -42,6 +43,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/backtest/test_backtest_modules.py b/tests/unit_tests/backtest/test_backtest_modules.py index e31fb585a183..5e6a3fbfbd27 100644 --- a/tests/unit_tests/backtest/test_backtest_modules.py +++ b/tests/unit_tests/backtest/test_backtest_modules.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------------------------- + import pandas as pd from nautilus_trader.backtest.data.providers import TestDataProvider @@ -23,6 +24,7 @@ from nautilus_trader.backtest.modules import SimulationModule from nautilus_trader.backtest.modules import SimulationModuleConfig from nautilus_trader.common.logging import LoggerAdapter +from nautilus_trader.config.backtest import BacktestEngineConfig from nautilus_trader.model.currencies import USD from nautilus_trader.model.enums import AccountType from nautilus_trader.model.enums import OmsType @@ -35,7 +37,7 @@ class TestSimulationModules: def create_engine(self, modules: list): - engine = BacktestEngine() + engine = BacktestEngine(BacktestEngineConfig(bypass_logging=True)) engine.add_venue( venue=Venue("SIM"), oms_type=OmsType.HEDGING, diff --git a/tests/unit_tests/backtest/test_backtest_node.py b/tests/unit_tests/backtest/test_backtest_node.py index 3741a7c4ff85..3384ec07a408 100644 --- a/tests/unit_tests/backtest/test_backtest_node.py +++ b/tests/unit_tests/backtest/test_backtest_node.py @@ -64,7 +64,7 @@ def setup(self): ] self.backtest_configs = [ BacktestRunConfig( - engine=BacktestEngineConfig(strategies=self.strategies), + engine=BacktestEngineConfig(strategies=self.strategies, bypass_logging=True), venues=[self.venue_config], data=[self.data_config], ), @@ -85,7 +85,7 @@ def test_run(self): # Assert assert len(results) == 1 - def test_backtest_run_streaming_sync(self): + def test_backtest_run_batch_sync(self): # Arrange config = BacktestRunConfig( engine=BacktestEngineConfig(strategies=self.strategies), diff --git a/tests/unit_tests/cache/test_cache_execution.py b/tests/unit_tests/cache/test_cache_execution.py index 5b08e1b8a63e..8504e8a84d4a 100644 --- a/tests/unit_tests/cache/test_cache_execution.py +++ b/tests/unit_tests/cache/test_cache_execution.py @@ -72,7 +72,7 @@ class TestCache: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() self.account_id = TestIdStubs.account_id() diff --git a/tests/unit_tests/common/test_common_actor.py b/tests/unit_tests/common/test_common_actor.py index 1a58a3b1f90c..9a051cfee9e2 100644 --- a/tests/unit_tests/common/test_common_actor.py +++ b/tests/unit_tests/common/test_common_actor.py @@ -43,7 +43,7 @@ from nautilus_trader.model.identifiers import Symbol from nautilus_trader.model.identifiers import Venue from nautilus_trader.msgbus.bus import MessageBus -from nautilus_trader.persistence.streaming import StreamingFeatherWriter +from nautilus_trader.persistence.streaming.writer import StreamingFeatherWriter from nautilus_trader.test_kit.mocks.actors import KaboomActor from nautilus_trader.test_kit.mocks.actors import MockActor from nautilus_trader.test_kit.mocks.data import data_catalog_setup @@ -68,6 +68,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() @@ -150,7 +151,6 @@ def test_initialization(self): # Arrange actor = Actor(config=ActorConfig(component_id=self.component_id)) actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -165,7 +165,6 @@ def test_register_warning_event(self): # Arrange actor = Actor(config=ActorConfig(component_id=self.component_id)) actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -182,7 +181,6 @@ def test_deregister_warning_event(self): # Arrange actor = Actor(config=ActorConfig(component_id=self.component_id)) actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -488,7 +486,6 @@ def test_start_when_user_code_raises_error_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -504,7 +501,6 @@ def test_stop_when_user_code_raises_error_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -523,7 +519,6 @@ def test_resume_when_user_code_raises_error_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -544,7 +539,6 @@ def test_reset_when_user_code_raises_error_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -560,7 +554,6 @@ def test_dispose_when_user_code_raises_error_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -576,7 +569,6 @@ def test_degrade_when_user_code_raises_error_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -595,7 +587,6 @@ def test_fault_when_user_code_raises_error_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -614,7 +605,6 @@ def test_handle_quote_tick_when_user_code_raises_exception_logs_and_reraises(sel # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -634,7 +624,6 @@ def test_handle_trade_tick_when_user_code_raises_exception_logs_and_reraises(sel # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -654,7 +643,6 @@ def test_handle_bar_when_user_code_raises_exception_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -674,7 +662,6 @@ def test_handle_data_when_user_code_raises_exception_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -700,7 +687,6 @@ def test_handle_event_when_user_code_raises_exception_logs_and_reraises(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -720,7 +706,6 @@ def test_start(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -738,7 +723,6 @@ def test_stop(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -757,7 +741,6 @@ def test_resume(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -778,7 +761,6 @@ def test_reset(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -796,7 +778,6 @@ def test_dispose(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -816,7 +797,6 @@ def test_degrade(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -836,7 +816,6 @@ def test_fault(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -856,7 +835,6 @@ def test_handle_instrument_with_blow_up_logs_exception(self): # Arrange actor = KaboomActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -874,7 +852,6 @@ def test_handle_instrument_when_not_running_does_not_send_to_on_instrument(self) # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -892,7 +869,6 @@ def test_handle_instrument_when_running_sends_to_on_instrument(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -912,7 +888,6 @@ def test_handle_instruments_when_running_sends_to_on_instruments(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -932,7 +907,6 @@ def test_handle_instruments_when_not_running_does_not_send_to_on_instrument(self # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -950,7 +924,6 @@ def test_handle_ticker_when_not_running_does_not_send_to_on_quote_tick(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -970,7 +943,6 @@ def test_handle_ticker_when_running_sends_to_on_quote_tick(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -992,7 +964,6 @@ def test_handle_quote_tick_when_not_running_does_not_send_to_on_quote_tick(self) # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1012,7 +983,6 @@ def test_handle_quote_tick_when_running_sends_to_on_quote_tick(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1034,7 +1004,6 @@ def test_handle_trade_tick_when_not_running_does_not_send_to_on_trade_tick(self) # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1054,7 +1023,6 @@ def test_handle_trade_tick_when_running_sends_to_on_trade_tick(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1076,7 +1044,6 @@ def test_handle_bar_when_not_running_does_not_send_to_on_bar(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1096,7 +1063,6 @@ def test_handle_bar_when_running_sends_to_on_bar(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1118,7 +1084,6 @@ def test_handle_data_when_not_running_does_not_send_to_on_data(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1144,7 +1109,6 @@ def test_handle_data_when_running_sends_to_on_data(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1172,7 +1136,6 @@ def test_subscribe_custom_data(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1195,7 +1158,6 @@ def test_subscribe_custom_data_with_client_id(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1218,7 +1180,6 @@ def test_unsubscribe_custom_data(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1239,7 +1200,6 @@ def test_unsubscribe_custom_data_with_client_id(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1260,7 +1220,6 @@ def test_subscribe_order_book(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1277,7 +1236,6 @@ def test_unsubscribe_order_book(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1296,7 +1254,6 @@ def test_subscribe_order_book_data(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1313,7 +1270,6 @@ def test_unsubscribe_order_book_deltas(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1332,7 +1288,6 @@ def test_subscribe_instruments(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1354,7 +1309,6 @@ def test_unsubscribe_instruments(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1372,7 +1326,6 @@ def test_subscribe_instrument(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1391,7 +1344,6 @@ def test_unsubscribe_instrument(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1411,7 +1363,6 @@ def test_subscribe_ticker(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1430,7 +1381,6 @@ def test_unsubscribe_ticker(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1450,7 +1400,6 @@ def test_subscribe_quote_ticks(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1469,7 +1418,6 @@ def test_unsubscribe_quote_ticks(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1489,7 +1437,6 @@ def test_subscribe_trade_ticks(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1508,7 +1455,6 @@ def test_unsubscribe_trade_ticks(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1528,7 +1474,6 @@ def test_publish_data_sends_to_subscriber(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1555,7 +1500,6 @@ def test_publish_signal_warns_invalid_type(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1570,7 +1514,6 @@ def test_publish_signal_sends_to_subscriber(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1599,7 +1542,6 @@ def test_publish_data_persist(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1628,7 +1570,6 @@ def test_subscribe_bars(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1648,7 +1589,6 @@ def test_unsubscribe_bars(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1670,7 +1610,6 @@ def test_subscribe_venue_status_updates(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1686,7 +1625,6 @@ def test_request_data_sends_request_to_data_engine(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1705,7 +1643,6 @@ def test_request_quote_ticks_sends_request_to_data_engine(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1722,7 +1659,6 @@ def test_request_trade_ticks_sends_request_to_data_engine(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1739,7 +1675,6 @@ def test_request_bars_sends_request_to_data_engine(self): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, @@ -1765,7 +1700,6 @@ def test_request_bars_with_invalid_params_raises_value_error(self, start, stop): # Arrange actor = MockActor() actor.register_base( - trader_id=self.trader_id, msgbus=self.msgbus, cache=self.cache, clock=self.clock, diff --git a/tests/unit_tests/common/test_common_config.py b/tests/unit_tests/common/test_common_config.py index fccf04c8b77f..d4a18a04ff66 100644 --- a/tests/unit_tests/common/test_common_config.py +++ b/tests/unit_tests/common/test_common_config.py @@ -13,38 +13,12 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -import pkgutil - -import pytest - -from nautilus_trader.config import ActorConfig from nautilus_trader.config import ActorFactory from nautilus_trader.config import ImportableActorConfig from nautilus_trader.test_kit.mocks.actors import MockActor class TestActorFactory: - @pytest.mark.skip(reason="Not implemented anymore") - def test_create_from_source(self): - # Arrange - config = ActorConfig( - component_id="MyActor", - ) - - source = pkgutil.get_data("tests.test_kit", "mocks.py") - importable = ImportableActorConfig( - module="MockActor", - source=source, - config=config, - ) - - # Act - strategy = ActorFactory.create(importable) - - # Assert - assert isinstance(strategy, MockActor) - assert repr(config) == "ActorConfig()" - def test_create_from_path(self): # Arrange config = dict( diff --git a/tests/unit_tests/common/test_common_events.py b/tests/unit_tests/common/test_common_events.py index e75a43cd8e0a..a86989ab400d 100644 --- a/tests/unit_tests/common/test_common_events.py +++ b/tests/unit_tests/common/test_common_events.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------------------------- -from dataclasses import dataclass + import pytest @@ -57,7 +57,7 @@ def test_serializing_component_state_changed_with_unserializable_config_raises_h ): # Arrange - class MyType(ActorConfig): + class MyType(ActorConfig, frozen=True): values: list[int] config = {"key": MyType(values=[1, 2, 3])} @@ -103,32 +103,3 @@ def test_trading_state_changed(self): repr(event) == f"TradingStateChanged(trader_id=TESTER-000, state=HALTED, config={{'max_order_submit_rate': '100/00:00:01'}}, event_id={uuid}, ts_init=0)" # noqa ) - - @pytest.mark.skip(reason="msgspec no longer raises an exception") - def test_serializing_trading_state_changed_with_unserializable_config_raises_helpful_exception( - self, - ): - # Arrange - - @dataclass - class MyType: - values: list[int] - - config = {"key": MyType(values=[1, 2, 3])} - event = TradingStateChanged( - trader_id=TestIdStubs.trader_id(), - state=TradingState.HALTED, - config=config, - event_id=UUID4(), - ts_event=0, - ts_init=0, - ) - - # Act - with pytest.raises(TypeError) as e: - TradingStateChanged.to_dict(event) - - # Assert - expected = "Serialization failed: `Encoding objects of type MyType is unsupported`. You can register a new serializer for `MyType` through `nautilus_trader.config.backtest.register_json_encoding`." # noqa - msg = e.value.args[0] - assert msg == expected diff --git a/tests/unit_tests/common/test_common_factories.py b/tests/unit_tests/common/test_common_factories.py new file mode 100644 index 000000000000..55d81b157f71 --- /dev/null +++ b/tests/unit_tests/common/test_common_factories.py @@ -0,0 +1,98 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +from nautilus_trader.backtest.data.providers import TestInstrumentProvider +from nautilus_trader.common.clock import TestClock +from nautilus_trader.common.factories import OrderFactory +from nautilus_trader.model.enums import OrderSide +from nautilus_trader.model.identifiers import ClientOrderId +from nautilus_trader.model.identifiers import OrderListId +from nautilus_trader.model.objects import Quantity +from nautilus_trader.test_kit.stubs.identifiers import TestIdStubs + + +ETHUSDT_PERP_BINANCE = TestInstrumentProvider.ethusdt_perp_binance() + + +class TestOrderFactory: + def setup(self): + # Fixture Setup + self.trader_id = TestIdStubs.trader_id() + self.strategy_id = TestIdStubs.strategy_id() + self.account_id = TestIdStubs.account_id() + + self.order_factory = OrderFactory( + trader_id=self.trader_id, + strategy_id=self.strategy_id, + clock=TestClock(), + ) + + def test_generate_client_order_id(self): + # Arrange, Act + result = self.order_factory.generate_client_order_id() + + # Assert + assert result == ClientOrderId("O-19700101-000-001-1") + + def test_generate_order_list_id(self): + # Arrange, Act + result = self.order_factory.generate_order_list_id() + + # Assert + assert result == OrderListId("OL-19700101-000-001-1") + + def test_set_client_order_id_count(self): + # Arrange, Act + self.order_factory.set_client_order_id_count(1) + + result = self.order_factory.generate_client_order_id() + + # Assert + assert result == ClientOrderId("O-19700101-000-001-2") + + def test_set_order_list_id_count(self): + # Arrange, Act + self.order_factory.set_order_list_id_count(1) + + result = self.order_factory.generate_order_list_id() + + # Assert + assert result == OrderListId("OL-19700101-000-001-2") + + def test_create_list(self): + # Arrange + order1 = self.order_factory.market( + ETHUSDT_PERP_BINANCE.id, + OrderSide.BUY, + Quantity.from_str("1.5"), + ) + + order2 = self.order_factory.market( + ETHUSDT_PERP_BINANCE.id, + OrderSide.BUY, + Quantity.from_str("1.5"), + ) + + order3 = self.order_factory.market( + ETHUSDT_PERP_BINANCE.id, + OrderSide.BUY, + Quantity.from_str("1.5"), + ) + + # Act + order_list = self.order_factory.create_list([order1, order2, order3]) + + # Assert + assert len(order_list) == 3 diff --git a/tests/unit_tests/common/test_common_logging.py b/tests/unit_tests/common/test_common_logging.py index da4da9b93031..0a1b4e8b2d99 100644 --- a/tests/unit_tests/common/test_common_logging.py +++ b/tests/unit_tests/common/test_common_logging.py @@ -13,18 +13,15 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -import asyncio import socket import pytest -from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.clock import TestClock from nautilus_trader.common.enums import LogColor from nautilus_trader.common.enums import LogLevel from nautilus_trader.common.enums import log_level_from_str from nautilus_trader.common.enums import log_level_to_str -from nautilus_trader.common.logging import LiveLogger from nautilus_trader.common.logging import Logger from nautilus_trader.common.logging import LoggerAdapter @@ -73,7 +70,11 @@ def test_log_level_from_str(self, string, expected): class TestLoggerTests: def test_log_debug_messages_to_console(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.DEBUG) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.DEBUG, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -84,7 +85,11 @@ def test_log_debug_messages_to_console(self): def test_log_info_messages_to_console(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.INFO) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.INFO, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -95,7 +100,11 @@ def test_log_info_messages_to_console(self): def test_log_info_with_annotation_sends_to_stdout(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.INFO) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.INFO, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) annotations = {"my_tag": "something"} @@ -108,7 +117,11 @@ def test_log_info_with_annotation_sends_to_stdout(self): def test_log_info_messages_to_console_with_blue_colour(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.INFO) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.INFO, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -119,7 +132,11 @@ def test_log_info_messages_to_console_with_blue_colour(self): def test_log_info_messages_to_console_with_green_colour(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.INFO) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.INFO, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -130,7 +147,11 @@ def test_log_info_messages_to_console_with_green_colour(self): def test_log_warning_messages_to_console(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.WARNING) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.WARNING, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -141,7 +162,11 @@ def test_log_warning_messages_to_console(self): def test_log_error_messages_to_console(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.ERROR) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.ERROR, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -152,7 +177,11 @@ def test_log_error_messages_to_console(self): def test_log_critical_messages_to_console(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.CRITICAL) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.CRITICAL, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -163,7 +192,11 @@ def test_log_critical_messages_to_console(self): def test_log_exception_messages_to_console(self): # Arrange - logger = Logger(clock=TestClock(), level_stdout=LogLevel.CRITICAL) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.CRITICAL, + bypass=True, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -175,7 +208,10 @@ def test_log_exception_messages_to_console(self): def test_register_sink_sends_records_to_sink(self): # Arrange sink = [] - logger = Logger(clock=TestClock(), level_stdout=LogLevel.CRITICAL) + logger = Logger( + clock=TestClock(), + level_stdout=LogLevel.CRITICAL, + ) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act @@ -193,79 +229,3 @@ def test_register_sink_sends_records_to_sink(self): "timestamp": 0, "trader_id": "TRADER-000", } - - -class TestLiveLogger: - def setup(self): - # Fixture Setup - self.loop = asyncio.get_event_loop() - self.loop.set_debug(True) - - self.logger = LiveLogger( - loop=self.loop, - clock=LiveClock(), - level_stdout=LogLevel.DEBUG, - ) - - self.logger_adapter = LoggerAdapter(component_name="LIVER_LOGGER", logger=self.logger) - - def test_log_when_not_running_on_event_loop_successfully_logs(self): - # Arrange, Act - self.logger_adapter.info("test message") - - # Assert - assert True # No exceptions raised - - @pytest.mark.asyncio - async def test_start_runs_on_event_loop(self): - # Arrange - self.logger.start() - - self.logger_adapter.info("A log message.") - await asyncio.sleep(0) - - # Act, Assert - assert self.logger.is_running - self.logger.stop() - - @pytest.mark.asyncio - async def test_stop_when_running_stops_logger(self): - # Arrange - self.logger.start() - - self.logger_adapter.info("A log message.") - await asyncio.sleep(0) - - # Act - self.logger.stop() - self.logger_adapter.info("A log message.") - - # Assert - assert not self.logger.is_running - - @pytest.mark.asyncio - async def test_log_when_queue_over_maxsize_blocks(self): - # Arrange - logger = LiveLogger( - loop=self.loop, - clock=LiveClock(), - maxsize=5, - ) - - logger_adapter = LoggerAdapter(component_name="LIVE_LOGGER", logger=logger) - logger.start() - - # Act - logger_adapter.info("A log message.") - logger_adapter.info("A log message.") # <-- blocks - logger_adapter.info("A different log message.") # <-- blocks - logger_adapter.info("A log message.") # <-- blocks - logger_adapter.info("A different log message.") # <-- blocks - logger_adapter.info("A log message.") # <-- blocks - - await asyncio.sleep(0.3) # <-- processes all log messages - logger.stop() - await asyncio.sleep(0.3) - - # Assert - assert not logger.is_running diff --git a/tests/unit_tests/common/test_common_providers.py b/tests/unit_tests/common/test_common_providers.py index c134f85b83b3..ade4cfbfbe9e 100644 --- a/tests/unit_tests/common/test_common_providers.py +++ b/tests/unit_tests/common/test_common_providers.py @@ -30,7 +30,7 @@ def setup(self): clock = TestClock() self.provider = InstrumentProvider( venue=BITMEX, - logger=Logger(clock), + logger=Logger(clock, bypass=True), ) def test_get_all_when_no_instruments_returns_empty_dict(self): diff --git a/tests/unit_tests/common/test_common_throttler.py b/tests/unit_tests/common/test_common_throttler.py index f0bbd9e77178..595f5d9a20c1 100644 --- a/tests/unit_tests/common/test_common_throttler.py +++ b/tests/unit_tests/common/test_common_throttler.py @@ -24,7 +24,7 @@ class TestBufferingThrottler: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.handler = [] self.throttler = Throttler( @@ -165,7 +165,7 @@ class TestDroppingThrottler: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.handler = [] self.dropped = [] diff --git a/tests/unit_tests/core/test_core_inspect.py b/tests/unit_tests/core/test_core_inspect.py index 0ed3860079ac..8e3a27bd098a 100644 --- a/tests/unit_tests/core/test_core_inspect.py +++ b/tests/unit_tests/core/test_core_inspect.py @@ -41,6 +41,7 @@ def test_is_nautilus_class(cls, is_nautilus): assert is_nautilus_class(cls=cls) is is_nautilus +@pytest.mark.skip(reason="Flaky and probably being removed") def test_get_size_of(): # Arrange, Act result1 = get_size_of(0) diff --git a/tests/unit_tests/data/test_data_aggregation.py b/tests/unit_tests/data/test_data_aggregation.py index 15062b3ccdc1..d3bf91838699 100644 --- a/tests/unit_tests/data/test_data_aggregation.py +++ b/tests/unit_tests/data/test_data_aggregation.py @@ -144,7 +144,7 @@ def test_set_partial_when_already_set_does_not_update(self): builder.set_partial(partial_bar1) builder.set_partial(partial_bar2) - bar = builder.build(4_000_000_000) + bar = builder.build(4_000_000_000, 4_000_000_000) # Assert assert bar.open == Price.from_str("1.00001") @@ -1307,7 +1307,7 @@ def test_aggregation_for_same_sec_and_minute_intervals(self, step, aggregation): assert handler[0].ts_event == 1610064002000000000 assert handler[0].ts_init == 1610064002000000000 - def test_do_not_build_bars_with_no_updates(self): + def test_do_not_build_with_no_updates(self): # Arrange path = os.path.join(TEST_DATA_DIR, "binance-btcusdt-quotes.parquet") df_ticks = ParquetTickDataLoader.load(path) @@ -1328,7 +1328,7 @@ def test_do_not_build_bars_with_no_updates(self): bar_store.append, clock, Logger(clock), - build_bars_with_no_updates=False, # <-- set this True and test will fail + build_with_no_updates=False, # <-- set this True and test will fail ) aggregator.handle_quote_tick(ticks[0]) @@ -1338,3 +1338,39 @@ def test_do_not_build_bars_with_no_updates(self): # Assert assert len(bar_store) == 1 # <-- only 1 bar even after 5 minutes + + def test_timestamp_on_close_false_timestamps_ts_event_as_open(self): + # Arrange + path = os.path.join(TEST_DATA_DIR, "binance-btcusdt-quotes.parquet") + df_ticks = ParquetTickDataLoader.load(path) + + wrangler = QuoteTickDataWrangler(BTCUSDT_BINANCE) + ticks = wrangler.process(df_ticks) + + clock = TestClock() + bar_store = [] + instrument_id = TestIdStubs.audusd_id() + bar_spec = BarSpecification(1, BarAggregation.MINUTE, PriceType.MID) + bar_type = BarType(instrument_id, bar_spec) + + # Act + aggregator = TimeBarAggregator( + AUDUSD_SIM, + bar_type, + bar_store.append, + clock, + Logger(clock), + timestamp_on_close=False, # <-- set this True and test will fail + ) + aggregator.handle_quote_tick(ticks[0]) + + events = clock.advance_time(dt_to_unix_nanos(UNIX_EPOCH + timedelta(minutes=2))) + for event in events: + event.handle() + + # Assert + assert len(bar_store) == 2 + assert bar_store[0].ts_event == 0 # <-- bar open + assert bar_store[0].ts_init == 60_000_000_000 # <-- bar close + assert bar_store[1].ts_event == 60_000_000_000 # <-- bar open + assert bar_store[1].ts_init == 120_000_000_000 # <-- bar close diff --git a/tests/unit_tests/data/test_data_client.py b/tests/unit_tests/data/test_data_client.py index 6c6a732b9bd3..abc9296fb2c0 100644 --- a/tests/unit_tests/data/test_data_client.py +++ b/tests/unit_tests/data/test_data_client.py @@ -168,7 +168,7 @@ class TestMarketDataClient: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/data/test_data_engine.py b/tests/unit_tests/data/test_data_engine.py index b15ac7b015aa..48cb9852bb41 100644 --- a/tests/unit_tests/data/test_data_engine.py +++ b/tests/unit_tests/data/test_data_engine.py @@ -75,6 +75,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/execution/test_execution_client.py b/tests/unit_tests/execution/test_execution_client.py index 49a561c4eb33..4a06a801da16 100644 --- a/tests/unit_tests/execution/test_execution_client.py +++ b/tests/unit_tests/execution/test_execution_client.py @@ -40,7 +40,7 @@ class TestExecutionClient: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/execution/test_execution_emulator.py b/tests/unit_tests/execution/test_execution_emulator.py index 894674e744f5..b8e8c39e4bd2 100644 --- a/tests/unit_tests/execution/test_execution_emulator.py +++ b/tests/unit_tests/execution/test_execution_emulator.py @@ -76,6 +76,7 @@ def setup(self): self.logger = Logger( clock=TestClock(), level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/execution/test_execution_emulator_list.py b/tests/unit_tests/execution/test_execution_emulator_list.py index ef709919f9a2..e9e907820adc 100644 --- a/tests/unit_tests/execution/test_execution_emulator_list.py +++ b/tests/unit_tests/execution/test_execution_emulator_list.py @@ -64,6 +64,7 @@ def setup(self): self.logger = Logger( clock=TestClock(), level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/execution/test_execution_engine.py b/tests/unit_tests/execution/test_execution_engine.py index c498eec785c5..0df7d135eb1a 100644 --- a/tests/unit_tests/execution/test_execution_engine.py +++ b/tests/unit_tests/execution/test_execution_engine.py @@ -74,6 +74,7 @@ def setup(self): self.logger = Logger( clock=TestClock(), level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/live/test_live_data_client.py b/tests/unit_tests/live/test_live_data_client.py index 27e460c77839..e27c0a06fca0 100644 --- a/tests/unit_tests/live/test_live_data_client.py +++ b/tests/unit_tests/live/test_live_data_client.py @@ -44,7 +44,7 @@ def setup(self): self.loop.set_debug(True) self.clock = LiveClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/live/test_live_data_engine.py b/tests/unit_tests/live/test_live_data_engine.py index 3772e876c7b1..76a551d58fa0 100644 --- a/tests/unit_tests/live/test_live_data_engine.py +++ b/tests/unit_tests/live/test_live_data_engine.py @@ -54,7 +54,7 @@ def setup(self): self.loop.set_debug(True) self.clock = LiveClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/live/test_live_execution_engine.py b/tests/unit_tests/live/test_live_execution_engine.py index 2f01415264a6..b8219253b500 100644 --- a/tests/unit_tests/live/test_live_execution_engine.py +++ b/tests/unit_tests/live/test_live_execution_engine.py @@ -83,6 +83,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/live/test_live_execution_recon.py b/tests/unit_tests/live/test_live_execution_recon.py index 2acfbf978cd3..978a4bb7ab71 100644 --- a/tests/unit_tests/live/test_live_execution_recon.py +++ b/tests/unit_tests/live/test_live_execution_recon.py @@ -22,7 +22,7 @@ from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.common.clock import LiveClock from nautilus_trader.common.factories import OrderFactory -from nautilus_trader.common.logging import LiveLogger +from nautilus_trader.common.logging import Logger from nautilus_trader.common.providers import InstrumentProvider from nautilus_trader.core.uuid import UUID4 from nautilus_trader.execution.reports import OrderStatusReport @@ -68,7 +68,7 @@ def setup(self): self.loop.set_debug(True) self.clock = LiveClock() - self.logger = LiveLogger(self.loop, self.clock) + self.logger = Logger(self.clock) self.account_id = TestIdStubs.account_id() self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/live/test_live_risk_engine.py b/tests/unit_tests/live/test_live_risk_engine.py index 478f31ec3dd1..2270757fc347 100644 --- a/tests/unit_tests/live/test_live_risk_engine.py +++ b/tests/unit_tests/live/test_live_risk_engine.py @@ -56,7 +56,7 @@ def setup(self): self.loop.set_debug(True) self.clock = LiveClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() self.account_id = TestIdStubs.account_id() diff --git a/tests/unit_tests/model/test_model_objects_money.py b/tests/unit_tests/model/test_model_objects_money.py index 266b15bd8578..a8c5ae2d761c 100644 --- a/tests/unit_tests/model/test_model_objects_money.py +++ b/tests/unit_tests/model/test_model_objects_money.py @@ -46,7 +46,6 @@ def test_instantiate_with_value_exceeding_negative_limit_raises_value_error(self Money(-9_223_372_036 - 1, currency=USD) def test_instantiate_with_none_value_returns_money_with_zero_amount(self) -> None: - # Arrange, Act money_zero = Money(None, currency=USD) diff --git a/tests/unit_tests/model/test_model_orders.py b/tests/unit_tests/model/test_model_orders.py index d95352e643c5..ccc44ef946d3 100644 --- a/tests/unit_tests/model/test_model_orders.py +++ b/tests/unit_tests/model/test_model_orders.py @@ -1263,6 +1263,7 @@ def test_bracket_market_entry_order_list(self): # Assert assert bracket.id == OrderListId("OL-19700101-000-001-1") assert bracket.instrument_id == AUDUSD_SIM.id + assert len(bracket) == 3 assert len(bracket.orders) == 3 assert bracket.orders[0].order_type == OrderType.MARKET assert bracket.orders[1].order_type == OrderType.STOP_MARKET @@ -1307,17 +1308,21 @@ def test_bracket_limit_entry_order_list(self): entry_price=Price.from_str("1.00000"), sl_trigger_price=Price.from_str("0.99990"), tp_price=Price.from_str("1.00010"), + tp_trigger_price=Price.from_str("1.00010"), time_in_force=TimeInForce.GTC, entry_order_type=OrderType.LIMIT, + tp_order_type=OrderType.LIMIT_IF_TOUCHED, + tp_post_only=False, ) # Assert assert bracket.id == OrderListId("OL-19700101-000-001-1") assert bracket.instrument_id == AUDUSD_SIM.id + assert len(bracket) == 3 assert len(bracket.orders) == 3 assert bracket.orders[0].order_type == OrderType.LIMIT assert bracket.orders[1].order_type == OrderType.STOP_MARKET - assert bracket.orders[2].order_type == OrderType.LIMIT + assert bracket.orders[2].order_type == OrderType.LIMIT_IF_TOUCHED assert bracket.orders[0].instrument_id == AUDUSD_SIM.id assert bracket.orders[1].instrument_id == AUDUSD_SIM.id assert bracket.orders[2].instrument_id == AUDUSD_SIM.id @@ -1336,6 +1341,9 @@ def test_bracket_limit_entry_order_list(self): assert bracket.orders[2].time_in_force == TimeInForce.GTC assert bracket.orders[1].expire_time is None assert bracket.orders[2].expire_time is None + assert bracket.orders[0].is_post_only is False + assert bracket.orders[1].is_post_only is False + assert bracket.orders[2].is_post_only is False assert bracket.orders[0].contingency_type == ContingencyType.OTO assert bracket.orders[1].contingency_type == ContingencyType.OUO assert bracket.orders[2].contingency_type == ContingencyType.OUO @@ -1368,6 +1376,7 @@ def test_bracket_stop_limit_entry_stop_limit_tp_order_list(self): # Assert assert bracket.id == OrderListId("OL-19700101-000-001-1") assert bracket.instrument_id == AUDUSD_SIM.id + assert len(bracket) == 3 assert len(bracket.orders) == 3 assert bracket.orders[0].order_type == OrderType.LIMIT_IF_TOUCHED assert bracket.orders[1].order_type == OrderType.STOP_MARKET diff --git a/tests/unit_tests/msgbus/test_msgbus_bus.py b/tests/unit_tests/msgbus/test_msgbus_bus.py index 7ee9f926efe7..9a768a29dedb 100644 --- a/tests/unit_tests/msgbus/test_msgbus_bus.py +++ b/tests/unit_tests/msgbus/test_msgbus_bus.py @@ -29,7 +29,7 @@ class TestMessageBus: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/persistence/external/test_core.py b/tests/unit_tests/persistence/external/test_core.py index 822f92cb492f..f56fccfbf37b 100644 --- a/tests/unit_tests/persistence/external/test_core.py +++ b/tests/unit_tests/persistence/external/test_core.py @@ -26,7 +26,11 @@ from nautilus_trader.adapters.betfair.providers import BetfairInstrumentProvider from nautilus_trader.adapters.betfair.util import make_betfair_reader from nautilus_trader.backtest.data.providers import TestInstrumentProvider +from nautilus_trader.backtest.data.wranglers import QuoteTickDataWrangler from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.tick import TradeTick +from nautilus_trader.model.enums import AggressorSide +from nautilus_trader.model.identifiers import TradeId from nautilus_trader.model.objects import Price from nautilus_trader.model.objects import Quantity from nautilus_trader.persistence.external.core import RawFile @@ -39,6 +43,7 @@ from nautilus_trader.persistence.external.core import validate_data_catalog from nautilus_trader.persistence.external.core import write_objects from nautilus_trader.persistence.external.core import write_parquet +from nautilus_trader.persistence.external.core import write_parquet_rust from nautilus_trader.persistence.external.core import write_tables from nautilus_trader.persistence.external.readers import CSVReader from nautilus_trader.test_kit.mocks.data import NewsEventData @@ -130,10 +135,10 @@ def test_raw_file_pickleable(self): @pytest.mark.parametrize( "glob, num_files", [ - ("**.json", 4), - ("**.txt", 3), - ("**.parquet", 4), - ("**.csv", 16), + # ("**.json", 4), + # ("**.txt", 3), + ("**.parquet", 7), + # ("**.csv", 16), ], ) def test_scan_paths(self, glob, num_files): @@ -196,7 +201,6 @@ def test_write_parquet_determine_partitions_writes_instrument_id(self): assert expected in files def test_data_catalog_instruments_no_partition(self): - # Arrange, Act self._load_data_into_catalog() path = f"{self.catalog.path}/data/betting_instrument.parquet" @@ -204,6 +208,8 @@ def test_data_catalog_instruments_no_partition(self): path_or_paths=path, filesystem=self.fs, ) + + # TODO deprecation warning partitions = dataset.partitions # Assert @@ -468,7 +474,6 @@ def test_write_parquet_no_partitions(self): assert result.equals(df) def test_write_parquet_partitions(self): - self._load_data_into_catalog() # Arrange fs = self.catalog.fs @@ -498,3 +503,87 @@ def test_write_parquet_partitions(self): assert dataset.files[1].startswith( f"{self.catalog.path}/sample.parquet/instrument_id=b/", ) + + def test_process_files_use_rust_writes_expected(self): + # Arrange + instrument = TestInstrumentProvider.default_fx_ccy("USD/JPY") + + def block_parser(df): + df = df.set_index("timestamp") + df.index = pd.to_datetime(df.index) + yield from QuoteTickDataWrangler(instrument=instrument).process(df) + + # Act + process_files( + glob_path=TEST_DATA_DIR + "/truefx-usdjpy-ticks.csv", + reader=CSVReader(block_parser=block_parser), + use_rust=True, + catalog=self.catalog, + instrument=instrument, + ) + + path = f"{self.catalog.path}/data/quote_tick.parquet/instrument_id=USD-JPY.SIM/1357077600295000064-1357079713493999872-0.parquet" + assert self.fs.exists(path) + + def test_write_parquet_rust_quote_ticks_writes_expected(self): + # Arrange + instrument = TestInstrumentProvider.default_fx_ccy("EUR/USD") + + objs = [ + QuoteTick( + instrument_id=instrument.id, + bid=Price.from_str("4507.24000000"), + ask=Price.from_str("4507.25000000"), + bid_size=Quantity.from_str("2.35950000"), + ask_size=Quantity.from_str("2.84570000"), + ts_event=1, + ts_init=1, + ), + QuoteTick( + instrument_id=instrument.id, + bid=Price.from_str("4507.24000000"), + ask=Price.from_str("4507.25000000"), + bid_size=Quantity.from_str("2.35950000"), + ask_size=Quantity.from_str("2.84570000"), + ts_event=10, + ts_init=10, + ), + ] + # Act + write_parquet_rust(self.catalog, objs, instrument) + + path = f"{self.catalog.path}/data/quote_tick.parquet/instrument_id=EUR-USD.SIM/0000000000000000001-0000000000000000010-0.parquet" + + assert self.fs.exists(path) + assert len(pd.read_parquet(path)) == 2 + + def test_write_parquet_rust_trade_ticks_writes_expected(self): + # Arrange + instrument = TestInstrumentProvider.default_fx_ccy("EUR/USD") + + objs = [ + TradeTick( + instrument_id=instrument.id, + price=Price.from_str("2.0"), + size=Quantity.from_int(10), + aggressor_side=AggressorSide.NO_AGGRESSOR, + trade_id=TradeId("1"), + ts_event=1, + ts_init=1, + ), + TradeTick( + instrument_id=instrument.id, + price=Price.from_str("2.0"), + size=Quantity.from_int(10), + aggressor_side=AggressorSide.NO_AGGRESSOR, + trade_id=TradeId("1"), + ts_event=10, + ts_init=10, + ), + ] + # Act + write_parquet_rust(self.catalog, objs, instrument) + + path = f"{self.catalog.path}/data/trade_tick.parquet/instrument_id=EUR-USD.SIM/0000000000000000001-0000000000000000010-0.parquet" + + assert self.fs.exists(path) diff --git a/tests/unit_tests/persistence/external/test_util.py b/tests/unit_tests/persistence/external/test_util.py index ecadb1112c49..4d760c5143a7 100644 --- a/tests/unit_tests/persistence/external/test_util.py +++ b/tests/unit_tests/persistence/external/test_util.py @@ -13,9 +13,16 @@ # limitations under the License. # ------------------------------------------------------------------------------------------------- -from nautilus_trader.persistence.base import Singleton -from nautilus_trader.persistence.base import clear_singleton_instances -from nautilus_trader.persistence.base import resolve_kwargs + +import pandas as pd +import pytest + +from nautilus_trader.persistence.external.util import Singleton +from nautilus_trader.persistence.external.util import clear_singleton_instances +from nautilus_trader.persistence.external.util import is_filename_in_time_range +from nautilus_trader.persistence.external.util import parse_filename +from nautilus_trader.persistence.external.util import parse_filename_start +from nautilus_trader.persistence.external.util import resolve_kwargs def test_resolve_kwargs(): @@ -92,3 +99,108 @@ def __init__(self, a, b): assert test1.b == {"hello": "world"} instances = {(("a", 1), ("b", (("hello", "world"),))): test1} assert Test._instances == instances + + +@pytest.mark.parametrize( + "filename, expected", + [ + [ + "1577836800000000000-1578182400000000000-0.parquet", + (1577836800000000000, 1578182400000000000), + ], + [ + "/data/test/sample.parquet/instrument_id=a/1577836800000000000-1578182400000000000-0.parquet", + (None, None), + ], + ], +) +def test_parse_filename(filename, expected): + assert parse_filename(filename) == expected + + +@pytest.mark.parametrize( + "filename, start, end, expected", + [ + [ + "1546383600000000000-1577826000000000000-SIM-1-HOUR-BID-EXTERNAL-0.parquet", + 0, + 9223372036854775807, + True, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 4, + 7, + True, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 6, + 9, + True, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 6, + 7, + True, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 4, + 9, + True, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 7, + 10, + True, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 9, + 10, + False, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 2, + 4, + False, + ], + [ + "0000000000000000005-0000000000000000008-0.parquet", + 0, + 9223372036854775807, + True, + ], + ], +) +def test_is_filename_in_time_range(filename, start, end, expected): + assert is_filename_in_time_range(filename, start, end) is expected + + +@pytest.mark.parametrize( + "filename, expected", + [ + [ + "/data/test/sample.parquet/instrument_id=a/1577836800000000000-1578182400000000000-0.parquet", + ("a", pd.Timestamp("2020-01-01 00:00:00")), + ], + [ + "1546383600000000000-1577826000000000000-SIM-1-HOUR-BID-EXTERNAL-0.parquet", + (None, pd.Timestamp("2019-01-01 23:00:00")), + ], + [ + "/data/test/sample.parquet/instrument_id=a/0648140b1fd7491a97983c0c6ece8d57.parquet", + None, + ], + [ + "0648140b1fd7491a97983c0c6ece8d57.parquet", + None, + ], + ], +) +def test_parse_filename_start(filename, expected): + assert parse_filename_start(filename) == expected diff --git a/tests/unit_tests/persistence/test_batching.py b/tests/unit_tests/persistence/test_batching.py deleted file mode 100644 index ee8465b187ad..000000000000 --- a/tests/unit_tests/persistence/test_batching.py +++ /dev/null @@ -1,127 +0,0 @@ -# ------------------------------------------------------------------------------------------------- -# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. -# https://nautechsystems.io -# -# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------------------------- - -import fsspec - -from nautilus_trader.adapters.betfair.providers import BetfairInstrumentProvider -from nautilus_trader.backtest.node import BacktestNode -from nautilus_trader.config import BacktestDataConfig -from nautilus_trader.config import BacktestEngineConfig -from nautilus_trader.config import BacktestRunConfig -from nautilus_trader.model.data.venue import InstrumentStatusUpdate -from nautilus_trader.model.orderbook.data import OrderBookData -from nautilus_trader.persistence.batching import batch_files -from nautilus_trader.persistence.external.core import process_files -from nautilus_trader.persistence.external.readers import CSVReader -from nautilus_trader.persistence.funcs import parse_bytes -from nautilus_trader.test_kit.mocks.data import NewsEventData -from nautilus_trader.test_kit.mocks.data import data_catalog_setup -from nautilus_trader.test_kit.stubs.persistence import TestPersistenceStubs -from tests import TEST_DATA_DIR -from tests.integration_tests.adapters.betfair.test_kit import BetfairTestStubs - - -class TestPersistenceBatching: - def setup(self): - self.catalog = data_catalog_setup(protocol="memory") - - self.fs: fsspec.AbstractFileSystem = self.catalog.fs - - self._load_data_into_catalog() - - def teardown(self): - # Cleanup - path = self.catalog.path - fs = self.catalog.fs - if fs.exists(path): - fs.rm(path, recursive=True) - - def _load_data_into_catalog(self): - self.instrument_provider = BetfairInstrumentProvider.from_instruments([]) - process_files( - glob_path=TEST_DATA_DIR + "/1.166564490.bz2", - reader=BetfairTestStubs.betfair_reader(instrument_provider=self.instrument_provider), - instrument_provider=self.instrument_provider, - catalog=self.catalog, - ) - - def test_batch_files_single(self): - # Arrange - instrument_ids = self.catalog.instruments()["id"].unique().tolist() - shared_kw = dict( - catalog_path=str(self.catalog.path), - catalog_fs_protocol=self.catalog.fs.protocol, - data_cls=OrderBookData, - ) - iter_batches = batch_files( - catalog=self.catalog, - data_configs=[ - BacktestDataConfig(**shared_kw, instrument_id=instrument_ids[0]), - BacktestDataConfig(**shared_kw, instrument_id=instrument_ids[1]), - ], - target_batch_size_bytes=parse_bytes("10kib"), - read_num_rows=300, - ) - - # Act - timestamp_chunks = [] - for batch in iter_batches: - timestamp_chunks.append([b.ts_init for b in batch]) - - # Assert - latest_timestamp = 0 - for timestamps in timestamp_chunks: - assert max(timestamps) > latest_timestamp - latest_timestamp = max(timestamps) - assert timestamps == sorted(timestamps) - - def test_batch_generic_data(self): - # Arrange - TestPersistenceStubs.setup_news_event_persistence() - process_files( - glob_path=f"{TEST_DATA_DIR}/news_events.csv", - reader=CSVReader(block_parser=TestPersistenceStubs.news_event_parser), - catalog=self.catalog, - ) - data_config = BacktestDataConfig( - catalog_path=self.catalog.path, - catalog_fs_protocol="memory", - data_cls=NewsEventData, - client_id="NewsClient", - ) - # Add some arbitrary instrument data to appease BacktestEngine - instrument_data_config = BacktestDataConfig( - catalog_path=self.catalog.path, - catalog_fs_protocol="memory", - instrument_id=self.catalog.instruments(as_nautilus=True)[0].id.value, - data_cls=InstrumentStatusUpdate, - ) - streaming = BetfairTestStubs.streaming_config( - catalog_path=self.catalog.path, - ) - engine = BacktestEngineConfig(streaming=streaming) - run_config = BacktestRunConfig( - engine=engine, - data=[data_config, instrument_data_config], - venues=[BetfairTestStubs.betfair_venue_config()], - batch_size_bytes=parse_bytes("1mib"), - ) - - # Act - node = BacktestNode(configs=[run_config]) - node.run() - - # Assert - assert node diff --git a/tests/unit_tests/persistence/test_catalog.py b/tests/unit_tests/persistence/test_catalog.py index 738051fd1cc8..63caf00282df 100644 --- a/tests/unit_tests/persistence/test_catalog.py +++ b/tests/unit_tests/persistence/test_catalog.py @@ -14,7 +14,9 @@ # ------------------------------------------------------------------------------------------------- import datetime +import itertools import os +import sys import tempfile from decimal import Decimal @@ -26,6 +28,8 @@ from nautilus_trader.adapters.betfair.providers import BetfairInstrumentProvider from nautilus_trader.backtest.data.providers import TestInstrumentProvider from nautilus_trader.backtest.data.wranglers import BarDataWrangler +from nautilus_trader.backtest.data.wranglers import QuoteTickDataWrangler +from nautilus_trader.core.datetime import unix_nanos_to_dt from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReader from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReaderType from nautilus_trader.core.nautilus_pyo3.persistence import ParquetType @@ -50,6 +54,7 @@ from nautilus_trader.persistence.external.core import write_objects from nautilus_trader.persistence.external.core import write_tables from nautilus_trader.persistence.external.readers import CSVReader +from nautilus_trader.persistence.external.readers import ParquetReader as ParquetByteReader from nautilus_trader.test_kit.mocks.data import NewsEventData from nautilus_trader.test_kit.mocks.data import data_catalog_setup from nautilus_trader.test_kit.stubs.data import TestDataStubs @@ -63,6 +68,7 @@ class TestPersistenceCatalogRust: def setup(self): self.catalog = data_catalog_setup(protocol="file") self.fs: fsspec.AbstractFileSystem = self.catalog.fs + self.instrument = TestInstrumentProvider.default_fx_ccy("EUR/USD", Venue("SIM")) def teardown(self): # Cleanup @@ -71,59 +77,84 @@ def teardown(self): if fs.exists(path): fs.rm(path, recursive=True) - def _load_quote_ticks_into_catalog_rust(self) -> None: + def _load_quote_ticks_into_catalog_rust(self) -> list[QuoteTick]: parquet_data_path = os.path.join(TEST_DATA_DIR, "quote_tick_data.parquet") assert os.path.exists(parquet_data_path) + reader = ParquetReader( parquet_data_path, 1000, ParquetType.QuoteTick, ParquetReaderType.File, ) - # data = map(QuoteTick.list_from_capsule, reader) - # ticks = list(itertools.chain(*data)) - # print(ticks) - # Use rust writer - metadata = { - "instrument_id": "USD/JPY.SIM", - "price_precision": "5", - "size_precision": "0", - } - writer = ParquetWriter( - ParquetType.QuoteTick, - metadata, + mapped_chunk = map(QuoteTick.list_from_capsule, reader) + quotes = list(itertools.chain(*mapped_chunk)) + + min_timestamp = str(quotes[0].ts_init).rjust(19, "0") + max_timestamp = str(quotes[-1].ts_init).rjust(19, "0") + + # Write EUR/USD and USD/JPY rust quotes + for instrument_id in ("EUR/USD.SIM", "USD/JPY.SIM"): + # Reset reader + reader = ParquetReader( + parquet_data_path, + 1000, + ParquetType.QuoteTick, + ParquetReaderType.File, + ) + + metadata = { + "instrument_id": instrument_id, + "price_precision": "5", + "size_precision": "0", + } + writer = ParquetWriter( + ParquetType.QuoteTick, + metadata, + ) + + file_path = os.path.join( + self.catalog.path, + "data", + "quote_tick.parquet", + f"instrument_id={instrument_id.replace('/', '-')}", # EUR-USD.SIM, USD-JPY.SIM + f"{min_timestamp}-{max_timestamp}-0.parquet", + ) + + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, "wb") as f: + for chunk in reader: + writer.write(chunk) + data: bytes = writer.flush_bytes() + f.write(data) + + return quotes + + def _load_trade_ticks_into_catalog_rust(self) -> list[TradeTick]: + parquet_data_path = os.path.join(TEST_DATA_DIR, "trade_tick_data.parquet") + assert os.path.exists(parquet_data_path) + reader = ParquetReader( + parquet_data_path, + 100, + ParquetType.TradeTick, + ParquetReaderType.File, ) - file_path = os.path.join( - self.catalog.path, - "data", - "quote_tick.parquet", - "instrument_id=USD-JPY.SIM", - "0-0-0.parquet", - ) + mapped_chunk = map(TradeTick.list_from_capsule, reader) + trades = list(itertools.chain(*mapped_chunk)) - os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, "wb") as f: - for chunk in reader: - writer.write(chunk) - data: bytes = writer.flush_bytes() - f.write(data) + min_timestamp = str(trades[0].ts_init).rjust(19, "0") + max_timestamp = str(trades[-1].ts_init).rjust(19, "0") - def _load_trade_ticks_into_catalog_rust(self) -> None: - parquet_data_path = os.path.join(TEST_DATA_DIR, "trade_tick_data.parquet") - assert os.path.exists(parquet_data_path) + # Reset reader reader = ParquetReader( parquet_data_path, 100, ParquetType.TradeTick, ParquetReaderType.File, ) - # data = map(TradeTick.list_from_capsule, reader) - # ticks = list(itertools.chain(*data)) - # print(ticks) - # Use rust writer metadata = { "instrument_id": "EUR/USD.SIM", "price_precision": "5", @@ -139,7 +170,7 @@ def _load_trade_ticks_into_catalog_rust(self) -> None: "data", "trade_tick.parquet", "instrument_id=EUR-USD.SIM", - "0-0-0.parquet", + f"{min_timestamp}-{max_timestamp}-0.parquet", ) os.makedirs(os.path.dirname(file_path), exist_ok=True) @@ -149,38 +180,198 @@ def _load_trade_ticks_into_catalog_rust(self) -> None: data: bytes = writer.flush_bytes() f.write(data) + return trades + + def test_get_files_for_expected_instrument_id(self): + # Arrange + self._load_quote_ticks_into_catalog_rust() + + # Act + files1 = self.catalog.get_files(cls=QuoteTick, instrument_id="USD/JPY.SIM") + files2 = self.catalog.get_files(cls=QuoteTick, instrument_id="EUR/USD.SIM") + files3 = self.catalog.get_files(cls=QuoteTick, instrument_id="USD/CHF.SIM") + + # Assert + assert files1 == [ + f"{self.catalog.path}/data/quote_tick.parquet/instrument_id=USD-JPY.SIM/1577898000000000065-1577919652000000125-0.parquet", + ] + assert files2 == [ + f"{self.catalog.path}/data/quote_tick.parquet/instrument_id=EUR-USD.SIM/1577898000000000065-1577919652000000125-0.parquet", + ] + assert files3 == [] + + def test_get_files_for_no_instrument_id(self): + # Arrange + self._load_quote_ticks_into_catalog_rust() + + # Act + files = self.catalog.get_files(cls=QuoteTick) + + # Assert + assert files == [ + f"{self.catalog.path}/data/quote_tick.parquet/instrument_id=EUR-USD.SIM/1577898000000000065-1577919652000000125-0.parquet", + f"{self.catalog.path}/data/quote_tick.parquet/instrument_id=USD-JPY.SIM/1577898000000000065-1577919652000000125-0.parquet", + ] + + def test_get_files_for_timestamp_range(self): + # Arrange + self._load_quote_ticks_into_catalog_rust() + start = 1577898000000000065 + end = 1577919652000000125 + + # Act + files1 = self.catalog.get_files( + cls=QuoteTick, + instrument_id="EUR/USD.SIM", + start_nanos=start, + end_nanos=start, + ) + + files2 = self.catalog.get_files( + cls=QuoteTick, + instrument_id="EUR/USD.SIM", + start_nanos=0, + end_nanos=start - 1, + ) + + files3 = self.catalog.get_files( + cls=QuoteTick, + instrument_id="EUR/USD.SIM", + start_nanos=end + 1, + end_nanos=sys.maxsize, + ) + + # Assert + assert files1 == [ + f"{self.catalog.path}/data/quote_tick.parquet/instrument_id=EUR-USD.SIM/1577898000000000065-1577919652000000125-0.parquet", + ] + assert files2 == [] + assert files3 == [] + def test_data_catalog_quote_ticks_as_nautilus_use_rust(self): # Arrange self._load_quote_ticks_into_catalog_rust() # Act - quote_ticks = self.catalog.quote_ticks(as_nautilus=True, use_rust=True) + quote_ticks = self.catalog.quote_ticks( + as_nautilus=True, + use_rust=True, + instrument_ids=["EUR/USD.SIM"], + ) # Assert assert all(isinstance(tick, QuoteTick) for tick in quote_ticks) assert len(quote_ticks) == 9500 - def test_data_catalog_quote_ticks_use_rust(self): + def test_data_catalog_quote_ticks_as_nautilus_use_rust_with_date_range(self): + # Arrange + self._load_quote_ticks_into_catalog_rust() + + start_timestamp = 1577898181000000440 # index 44 + end_timestamp = 1577898572000000953 # index 99 + + # Act + quote_ticks = self.catalog.quote_ticks( + as_nautilus=True, + use_rust=True, + instrument_ids=["EUR/USD.SIM"], + start=start_timestamp, + end=end_timestamp, + ) + + # Assert + assert all(isinstance(tick, QuoteTick) for tick in quote_ticks) + assert len(quote_ticks) == 54 + assert quote_ticks[0].ts_init == start_timestamp + assert quote_ticks[-1].ts_init == end_timestamp + + def test_data_catalog_quote_ticks_as_nautilus_use_rust_with_date_range_with_multiple_instrument_ids( + self, + ): # Arrange self._load_quote_ticks_into_catalog_rust() + start_timestamp = 1577898181000000440 # EUR/USD.SIM index 44 + end_timestamp = 1577898572000000953 # EUR/USD.SIM index 99 + + # Act + quote_ticks = self.catalog.quote_ticks( + as_nautilus=True, + use_rust=True, + instrument_ids=["EUR/USD.SIM", "USD/JPY.SIM"], + start=start_timestamp, + end=end_timestamp, + ) + + # Assert + assert all(isinstance(tick, QuoteTick) for tick in quote_ticks) + + instrument1_quote_ticks = [t for t in quote_ticks if str(t.instrument_id) == "EUR/USD.SIM"] + assert len(instrument1_quote_ticks) == 54 + + instrument2_quote_ticks = [t for t in quote_ticks if str(t.instrument_id) == "USD/JPY.SIM"] + assert len(instrument2_quote_ticks) == 54 + + assert quote_ticks[0].ts_init == start_timestamp + assert quote_ticks[-1].ts_init == end_timestamp + + def test_data_catalog_use_rust_quote_ticks_round_trip(self): + # Arrange + instrument = TestInstrumentProvider.default_fx_ccy("EUR/USD") + + parquet_data_glob_path = TEST_DATA_DIR + "/quote_tick_data.parquet" + assert os.path.exists(parquet_data_glob_path) + + def block_parser(df): + df = df.set_index("ts_event") + df.index = df.ts_init.apply(unix_nanos_to_dt) + objs = QuoteTickDataWrangler(instrument=instrument).process(df) + yield from objs + + # Act + process_files( + glob_path=parquet_data_glob_path, + reader=ParquetByteReader(parser=block_parser), + use_rust=True, + catalog=self.catalog, + instrument=instrument, + ) + + quote_ticks = self.catalog.quote_ticks( + as_nautilus=True, + use_rust=True, + instrument_ids=["EUR/USD.SIM"], + ) + + assert all(isinstance(tick, QuoteTick) for tick in quote_ticks) + assert len(quote_ticks) == 9500 + + def test_data_catalog_quote_ticks_use_rust(self): + # Arrange + quotes = self._load_quote_ticks_into_catalog_rust() + # Act - qdf = self.catalog.quote_ticks(use_rust=True) + qdf = self.catalog.quote_ticks(use_rust=True, instrument_ids=["EUR/USD.SIM"]) # Assert assert isinstance(qdf, pd.DataFrame) assert len(qdf) == 9500 - # assert qdf.bid.equals(pd.Series([float(q.bid) for q in quotes])) - # assert qdf.ask.equals(pd.Series([float(q.ask) for q in quotes])) - # assert qdf.bid_size.equals(pd.Series([float(q.bid_size) for q in quotes])) - # assert qdf.ask_size.equals(pd.Series([float(q.ask_size) for q in quotes])) + assert qdf.bid.equals(pd.Series([float(q.bid) for q in quotes])) + assert qdf.ask.equals(pd.Series([float(q.ask) for q in quotes])) + assert qdf.bid_size.equals(pd.Series([float(q.bid_size) for q in quotes])) + assert qdf.ask_size.equals(pd.Series([float(q.ask_size) for q in quotes])) + assert (qdf.instrument_id == "EUR/USD.SIM").all def test_data_catalog_trade_ticks_as_nautilus_use_rust(self): # Arrange self._load_trade_ticks_into_catalog_rust() # Act - trade_ticks = self.catalog.trade_ticks(as_nautilus=True, use_rust=True) + trade_ticks = self.catalog.trade_ticks( + as_nautilus=True, + use_rust=True, + instrument_ids=["EUR/USD.SIM"], + ) # Assert assert all(isinstance(tick, TradeTick) for tick in trade_ticks) @@ -367,20 +558,26 @@ def test_data_catalog_filter(self): assert len(filtered_deltas) == 351 def test_data_catalog_generic_data(self): - + # Arrange TestPersistenceStubs.setup_news_event_persistence() process_files( glob_path=f"{TEST_DATA_DIR}/news_events.csv", reader=CSVReader(block_parser=TestPersistenceStubs.news_event_parser), catalog=self.catalog, ) + + # Act df = self.catalog.generic_data(cls=NewsEventData, filter_expr=ds.field("currency") == "USD") - assert len(df) == 22925 data = self.catalog.generic_data( cls=NewsEventData, filter_expr=ds.field("currency") == "CHF", as_nautilus=True, ) + + # Assert + assert df is not None + assert data is not None + assert len(df) == 22925 assert len(data) == 2745 and isinstance(data[0], GenericData) def test_data_catalog_bars(self): @@ -436,7 +633,6 @@ def test_catalog_bar_query_instrument_id(self): assert "instrument_id" in data.columns def test_catalog_projections(self): - projections = {"tid": ds.field("trade_id")} trades = self.catalog.trade_ticks(projections=projections) assert "tid" in trades.columns diff --git a/tests/unit_tests/persistence/test_catalog_rust.py b/tests/unit_tests/persistence/test_catalog_rust.py index fa207b710c0d..d43e558d1306 100644 --- a/tests/unit_tests/persistence/test_catalog_rust.py +++ b/tests/unit_tests/persistence/test_catalog_rust.py @@ -15,6 +15,7 @@ import itertools import os +import tempfile import pandas as pd @@ -22,7 +23,9 @@ from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReader from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReaderType from nautilus_trader.core.nautilus_pyo3.persistence import ParquetType +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetWriter from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.tick import TradeTick from tests import TEST_DATA_DIR @@ -35,8 +38,8 @@ def test_file_parquet_reader_quote_ticks(): ParquetReaderType.File, ) - data = map(QuoteTick.list_from_capsule, reader) - ticks = list(itertools.chain(*data)) + mapped_chunk = map(QuoteTick.list_from_capsule, reader) + ticks = list(itertools.chain(*mapped_chunk)) csv_data_path = os.path.join(TEST_DATA_DIR, "quote_tick_data.csv") df = pd.read_csv(csv_data_path, header=None, names="dates bid ask bid_size".split()) @@ -88,6 +91,100 @@ def test_buffer_parquet_reader_quote_ticks(): # ) +def test_file_parquet_writer_quote_ticks(): + parquet_data_path = os.path.join(PACKAGE_ROOT, "tests/test_data/quote_tick_data.parquet") + + # Write quotes + reader = ParquetReader( + parquet_data_path, + 1000, + ParquetType.QuoteTick, + ParquetReaderType.File, + ) + + metadata = { + "instrument_id": "EUR/USD.SIM", + "price_precision": "5", + "size_precision": "0", + } + writer = ParquetWriter( + ParquetType.QuoteTick, + metadata, + ) + + file_path = tempfile.mktemp() + + for chunk in reader: + writer.write(chunk) + + with open(file_path, "wb") as f: + data: bytes = writer.flush_bytes() + f.write(data) + + # Read quotes again + reader = ParquetReader( + file_path, + 1000, + ParquetType.QuoteTick, + ParquetReaderType.File, + ) + + # Cleanup + os.remove(file_path) + + mapped_chunk = map(QuoteTick.list_from_capsule, reader) + quotes = list(itertools.chain(*mapped_chunk)) + + assert len(quotes) == 9500 + + +def test_file_parquet_writer_trade_ticks(): + # Read quotes + parquet_data_path = os.path.join(TEST_DATA_DIR, "trade_tick_data.parquet") + assert os.path.exists(parquet_data_path) + + reader = ParquetReader( + parquet_data_path, + 100, + ParquetType.TradeTick, + ParquetReaderType.File, + ) + + # Write trades + metadata = { + "instrument_id": "EUR/USD.SIM", + "price_precision": "5", + "size_precision": "0", + } + writer = ParquetWriter( + ParquetType.QuoteTick, + metadata, + ) + + file_path = tempfile.mktemp() + with open(file_path, "wb") as f: + for chunk in reader: + writer.write(chunk) + data: bytes = writer.flush_bytes() + f.write(data) + + # Read quotes again + reader = ParquetReader( + parquet_data_path, + 100, + ParquetType.TradeTick, + ParquetReaderType.File, + ) + + # Cleanup + os.remove(file_path) + + mapped_chunk = map(TradeTick.list_from_capsule, reader) + trades = list(itertools.chain(*mapped_chunk)) + + assert len(trades) == 100 + + def get_peak_memory_usage_gb(): import platform diff --git a/tests/unit_tests/persistence/test_streaming.py b/tests/unit_tests/persistence/test_streaming.py index 559435db2747..fa18fb1206cd 100644 --- a/tests/unit_tests/persistence/test_streaming.py +++ b/tests/unit_tests/persistence/test_streaming.py @@ -32,7 +32,7 @@ from nautilus_trader.model.data.venue import InstrumentStatusUpdate from nautilus_trader.persistence.external.core import process_files from nautilus_trader.persistence.external.readers import CSVReader -from nautilus_trader.persistence.streaming import generate_signal_class +from nautilus_trader.persistence.streaming.writer import generate_signal_class from nautilus_trader.test_kit.mocks.data import NewsEventData from nautilus_trader.test_kit.mocks.data import data_catalog_setup from nautilus_trader.test_kit.stubs.persistence import TestPersistenceStubs @@ -71,12 +71,16 @@ def _load_data_into_catalog(self): def test_feather_writer(self): # Arrange instrument = self.catalog.instruments(as_nautilus=True)[0] + + catalog_path = "/.nautilus/catalog" + run_config = BetfairTestStubs.betfair_backtest_run_config( - catalog_path="/.nautilus/catalog", + catalog_path=catalog_path, catalog_fs_protocol="memory", instrument_id=instrument.id.value, + flush_interval_ms=5000, ) - run_config.engine.streaming.flush_interval_ms = 5000 + node = BacktestNode(configs=[run_config]) # Act @@ -109,7 +113,6 @@ def test_feather_writer(self): assert result == expected def test_feather_writer_generic_data(self): - # Arrange TestPersistenceStubs.setup_news_event_persistence() @@ -157,7 +160,6 @@ def test_feather_writer_generic_data(self): @pytest.mark.skip(reason="fix after merge") def test_feather_writer_signal_data(self): - # Arrange instrument_id = self.catalog.instruments(as_nautilus=True)[0].id.value data_config = BacktestDataConfig( diff --git a/tests/unit_tests/persistence/test_streaming_batching.py b/tests/unit_tests/persistence/test_streaming_batching.py new file mode 100644 index 000000000000..ed2cc3aa5c6f --- /dev/null +++ b/tests/unit_tests/persistence/test_streaming_batching.py @@ -0,0 +1,351 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import itertools +import os + +import pandas as pd + +from nautilus_trader.backtest.data.providers import TestInstrumentProvider +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReader +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetReaderType +from nautilus_trader.core.nautilus_pyo3.persistence import ParquetType +from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.identifiers import Venue +from nautilus_trader.persistence.streaming.batching import generate_batches_rust +from tests import TEST_DATA_DIR + + +class TestBatchingData: + test_parquet_files = [ + os.path.join(TEST_DATA_DIR, "quote_tick_eurusd_2019_sim_rust.parquet"), + os.path.join(TEST_DATA_DIR, "quote_tick_usdjpy_2019_sim_rust.parquet"), + os.path.join(TEST_DATA_DIR, "bars_eurusd_2019_sim.parquet"), + ] + + test_instruments = [ + TestInstrumentProvider.default_fx_ccy("EUR/USD", venue=Venue("SIM")), + TestInstrumentProvider.default_fx_ccy("USD/JPY", venue=Venue("SIM")), + TestInstrumentProvider.default_fx_ccy("EUR/USD", venue=Venue("SIM")), + ] + test_instrument_ids = [x.id for x in test_instruments] + + +class TestGenerateBatches(TestBatchingData): + def test_generate_batches_returns_empty_list_before_start_timestamp_with_end_timestamp(self): + start_timestamp = 1546389021944999936 + batch_gen = generate_batches_rust( + files=[self.test_parquet_files[1]], + cls=QuoteTick, + batch_size=1000, + start_nanos=start_timestamp, + end_nanos=1546394394948999936, + ) + batches = list(batch_gen) + assert [len(x) for x in batches] == [0, 0, 0, 0, 172, 1000, 1000, 1000, 1000, 887] + assert batches[4][0].ts_init == start_timestamp + + ################################# + batch_gen = generate_batches_rust( + files=[self.test_parquet_files[1]], + cls=QuoteTick, + batch_size=1000, + start_nanos=start_timestamp - 1, + end_nanos=1546394394948999936, + ) + batches = list(batch_gen) + assert [len(x) for x in batches] == [0, 0, 0, 0, 172, 1000, 1000, 1000, 1000, 887] + assert batches[4][0].ts_init == start_timestamp + + def test_generate_batches_returns_batches_of_expected_size(self): + batch_gen = generate_batches_rust( + files=[self.test_parquet_files[1]], + cls=QuoteTick, + batch_size=1000, + ) + batches = list(batch_gen) + assert all([len(x) == 1000 for x in batches]) + + def test_generate_batches_returns_empty_list_before_start_timestamp(self): + # Arrange + parquet_data_path = self.test_parquet_files[0] + start_timestamp = 1546383601403000064 # index 10 (1st item in batch) + batch_gen = generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=10, + start_nanos=start_timestamp, + ) + + # Act + batch = next(batch_gen, None) + + # Assert + assert batch == [] + + ############################################# + # Arrange + parquet_data_path = self.test_parquet_files[0] + start_timestamp = 1546383601862999808 # index 18 (last item in batch) + batch_gen = generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=10, + start_nanos=start_timestamp, + ) + # Act + batch = next(batch_gen, None) + + # Assert + assert batch == [] + + ################################################### + # Arrange + parquet_data_path = self.test_parquet_files[0] + start_timestamp = 1546383601352000000 # index 9 + batch_gen = generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=10, + start_nanos=start_timestamp, + ) + + # Act + batch = next(batch_gen, None) + + # Assert + assert batch != [] + + def test_generate_batches_trims_first_batch_by_start_timestamp(self): + def create_test_batch_gen(start_timestamp): + parquet_data_path = self.test_parquet_files[0] + return generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=10, + start_nanos=start_timestamp, + ) + + start_timestamp = 1546383605776999936 + batches = list( + generate_batches_rust( + files=[self.test_parquet_files[0]], + cls=QuoteTick, + batch_size=300, + start_nanos=start_timestamp, + ), + ) + + first_timestamp = batches[0][0].ts_init + assert first_timestamp == start_timestamp + + ############################################################### + # Timestamp, index -1, exists + start_timestamp = 1546383601301000192 # index 8 + batch_gen = create_test_batch_gen(start_timestamp) + + # Act + batches = list(batch_gen) + + # Assert + first_timestamp = batches[0][0].ts_init + assert first_timestamp == start_timestamp + + ############################################################### + # Timestamp, index 0, exists + start_timestamp = 1546383600078000128 # index 0 + batch_gen = create_test_batch_gen(start_timestamp) + + # Act + batches = list(batch_gen) + + # Assert + first_timestamp = batches[0][0].ts_init + assert first_timestamp == start_timestamp + + ############################################################### + # Timestamp, index 0, NOT exists + start_timestamp = 1546383600078000128 # index 0 + batch_gen = create_test_batch_gen(start_timestamp - 1) + + # Act + batches = list(batch_gen) + + # Assert + first_timestamp = batches[0][0].ts_init + assert first_timestamp == start_timestamp + + ############################################################### + # Timestamp, index -1, NOT exists + start_timestamp = 1546383601301000192 # index 8 + batch_gen = create_test_batch_gen(start_timestamp - 1) + + # Act + batches = list(batch_gen) + + # Assert + first_timestamp = batches[0][0].ts_init + assert first_timestamp == start_timestamp + ############################################################### + # Arrange + + start_timestamp = 1546383600691000064 + batch_gen = create_test_batch_gen(start_timestamp) + + # Act + batches = list(batch_gen) + + # Assert + first_batch = batches[0] + print(len(first_batch)) + assert len(first_batch) == 5 + + first_timestamp = first_batch[0].ts_init + assert first_timestamp == start_timestamp + ############################################################### + # Starts on next timestamp if start_timestamp NOT exists + # Arrange + start_timestamp = 1546383600078000128 # index 0 + next_timestamp = 1546383600180000000 # index 1 + batch_gen = create_test_batch_gen(start_timestamp + 1) + + # Act + batches = list(batch_gen) + + # Assert + first_timestamp = batches[0][0].ts_init + assert first_timestamp == next_timestamp + + def test_generate_batches_trims_end_batch_returns_no_empty_batch(self): + parquet_data_path = self.test_parquet_files[0] + + # Timestamp, index -1, NOT exists + # Arrange + end_timestamp = 1546383601914000128 # index 19 + batch_gen = generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=10, + end_nanos=end_timestamp, + ) + + # Act + batches = list(batch_gen) + + # Assert + last_batch = batches[-1] + assert last_batch != [] + + def test_generate_batches_trims_end_batch_by_end_timestamp(self): + def create_test_batch_gen(end_timestamp): + parquet_data_path = self.test_parquet_files[0] + return generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=10, + end_nanos=end_timestamp, + ) + + ############################################################### + # Timestamp, index 0 + end_timestamp = 1546383601403000064 # index 10 + batches = list(create_test_batch_gen(end_timestamp)) + last_timestamp = batches[-1][-1].ts_init + assert last_timestamp == end_timestamp + + batches = list(create_test_batch_gen(end_timestamp + 1)) + last_timestamp = batches[-1][-1].ts_init + assert last_timestamp == end_timestamp + + ############################################################### + # Timestamp index -1 + end_timestamp = 1546383601914000128 # index 19 + + batches = list(create_test_batch_gen(end_timestamp)) + last_timestamp = batches[-1][-1].ts_init + assert last_timestamp == end_timestamp + + batches = list(create_test_batch_gen(end_timestamp + 1)) + last_timestamp = batches[-1][-1].ts_init + assert last_timestamp == end_timestamp + + ############################################################### + # Ends on prev timestamp + + end_timestamp = 1546383601301000192 # index 8 + prev_timestamp = 1546383601197999872 # index 7 + batches = list(create_test_batch_gen(end_timestamp - 1)) + last_timestamp = batches[-1][-1].ts_init + assert last_timestamp == prev_timestamp + + def test_generate_batches_returns_valid_data(self): + # Arrange + parquet_data_path = self.test_parquet_files[0] + batch_gen = generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=300, + ) + reader = ParquetReader( + parquet_data_path, + 1000, + ParquetType.QuoteTick, + ParquetReaderType.File, + ) + mapped_chunk = map(QuoteTick.list_from_capsule, reader) + expected = list(itertools.chain(*mapped_chunk)) + + # Act + results = [] + for batch in batch_gen: + results.extend(batch) + + # Assert + assert len(results) == len(expected) + assert pd.Series([x.ts_init for x in results]).equals( + pd.Series([x.ts_init for x in expected]), + ) + + def test_generate_batches_returns_has_inclusive_start_and_end(self): + # Arrange + parquet_data_path = self.test_parquet_files[0] + + reader = ParquetReader( + parquet_data_path, + 1000, + ParquetType.QuoteTick, + ParquetReaderType.File, + ) + mapped_chunk = map(QuoteTick.list_from_capsule, reader) + expected = list(itertools.chain(*mapped_chunk)) + + batch_gen = generate_batches_rust( + files=[parquet_data_path], + cls=QuoteTick, + batch_size=500, + start_nanos=expected[0].ts_init, + end_nanos=expected[-1].ts_init, + ) + + # Act + results = [] + for batch in batch_gen: + results.extend(batch) + + # Assert + assert len(results) == len(expected) + assert pd.Series([x.ts_init for x in results]).equals( + pd.Series([x.ts_init for x in expected]), + ) diff --git a/tests/unit_tests/persistence/test_streaming_engine.py b/tests/unit_tests/persistence/test_streaming_engine.py new file mode 100644 index 000000000000..02e17419f786 --- /dev/null +++ b/tests/unit_tests/persistence/test_streaming_engine.py @@ -0,0 +1,636 @@ +# ------------------------------------------------------------------------------------------------- +# Copyright (C) 2015-2023 Nautech Systems Pty Ltd. All rights reserved. +# https://nautechsystems.io +# +# Licensed under the GNU Lesser General Public License Version 3.0 (the "License"); +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at https://www.gnu.org/licenses/lgpl-3.0.en.html +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------------------------- + +import os + +import fsspec +import pandas as pd +import pytest + +from nautilus_trader.adapters.betfair.providers import BetfairInstrumentProvider +from nautilus_trader.backtest.data.providers import TestInstrumentProvider +from nautilus_trader.backtest.data.wranglers import BarDataWrangler +from nautilus_trader.backtest.data.wranglers import QuoteTickDataWrangler +from nautilus_trader.backtest.node import BacktestNode +from nautilus_trader.config import BacktestDataConfig +from nautilus_trader.config import BacktestEngineConfig +from nautilus_trader.config import BacktestRunConfig +from nautilus_trader.core.datetime import unix_nanos_to_dt +from nautilus_trader.model.data.bar import Bar +from nautilus_trader.model.data.bar import BarType +from nautilus_trader.model.data.tick import QuoteTick +from nautilus_trader.model.data.venue import InstrumentStatusUpdate +from nautilus_trader.model.identifiers import Venue +from nautilus_trader.model.orderbook.data import OrderBookData +from nautilus_trader.persistence.external.core import process_files +from nautilus_trader.persistence.external.readers import CSVReader +from nautilus_trader.persistence.external.readers import ParquetReader as ParquetByteReader +from nautilus_trader.persistence.funcs import parse_bytes +from nautilus_trader.persistence.streaming.batching import generate_batches +from nautilus_trader.persistence.streaming.batching import generate_batches_rust +from nautilus_trader.persistence.streaming.engine import StreamingEngine +from nautilus_trader.persistence.streaming.engine import _BufferIterator +from nautilus_trader.persistence.streaming.engine import _StreamingBuffer +from nautilus_trader.test_kit.mocks.data import NewsEventData +from nautilus_trader.test_kit.mocks.data import data_catalog_setup +from nautilus_trader.test_kit.stubs.persistence import TestPersistenceStubs +from tests import TEST_DATA_DIR +from tests.integration_tests.adapters.betfair.test_kit import BetfairTestStubs + + +class TestBatchingData: + test_parquet_files = [ + os.path.join(TEST_DATA_DIR, "quote_tick_eurusd_2019_sim_rust.parquet"), + os.path.join(TEST_DATA_DIR, "quote_tick_usdjpy_2019_sim_rust.parquet"), + os.path.join(TEST_DATA_DIR, "bars_eurusd_2019_sim.parquet"), + ] + + test_instruments = [ + TestInstrumentProvider.default_fx_ccy("EUR/USD", venue=Venue("SIM")), + TestInstrumentProvider.default_fx_ccy("USD/JPY", venue=Venue("SIM")), + TestInstrumentProvider.default_fx_ccy("EUR/USD", venue=Venue("SIM")), + ] + test_instrument_ids = [x.id for x in test_instruments] + + +class TestBuffer(TestBatchingData): + @pytest.mark.parametrize( + "trim_timestamp,expected", + [ + [1546383600588999936, 1546383600588999936], # 4, 4 + [1546383600588999936 + 1, 1546383600588999936], # 4, 4 + [1546383600588999936 - 1, 1546383600487000064], # 4, 3 + ], + ) + def test_removed_chunk_has_correct_last_timestamp( + self, + trim_timestamp: int, + expected: int, + ): + # Arrange + buffer = _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[0]], + cls=QuoteTick, + batch_size=10, + ), + ) + + # Act + buffer.add_data() + removed = buffer.remove_front(trim_timestamp) # timestamp exists + + # Assert + assert removed[-1].ts_init == expected + + @pytest.mark.parametrize( + "trim_timestamp,expected", + [ + [1546383600588999936, 1546383600691000064], # 4, 5 + [1546383600588999936 + 1, 1546383600691000064], # 4, 5 + [1546383600588999936 - 1, 1546383600588999936], # 4, 4 + ], + ) + def test_streaming_buffer_remove_front_has_correct_next_timestamp( + self, + trim_timestamp: int, + expected: int, + ): + # Arrange + buffer = _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[0]], + cls=QuoteTick, + batch_size=10, + ), + ) + + # Act + buffer.add_data() + buffer.remove_front(trim_timestamp) # timestamp exists + + # Assert + next_timestamp = buffer._data[0].ts_init + assert next_timestamp == expected + + +class TestBufferIterator(TestBatchingData): + def test_iterate_returns_expected_timestamps_single(self): + # Arrange + batches = generate_batches_rust( + files=[self.test_parquet_files[0]], + cls=QuoteTick, + batch_size=1000, + ) + + buffer = _StreamingBuffer(batches=batches) + + iterator = _BufferIterator(buffers=[buffer]) + + expected = list(pd.read_parquet(self.test_parquet_files[0]).ts_event) + + # Act + timestamps = [] + for batch in iterator: + timestamps.extend([x.ts_init for x in batch]) + + # Assert + assert len(timestamps) == len(expected) + assert timestamps == expected + + def test_iterate_returns_expected_timestamps(self): + # Arrange + expected = sorted( + list(pd.read_parquet(self.test_parquet_files[0]).ts_event) + + list(pd.read_parquet(self.test_parquet_files[1]).ts_event), + ) + + buffers = [ + _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[0]], + cls=QuoteTick, + batch_size=1000, + ), + ), + _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[1]], + cls=QuoteTick, + batch_size=1000, + ), + ), + ] + + iterator = _BufferIterator(buffers=buffers) + + # Act + timestamps = [] + for batch in iterator: + timestamps.extend([x.ts_init for x in batch]) + + # Assert + assert len(timestamps) == len(expected) + assert timestamps == expected + + def test_iterate_returns_expected_timestamps_with_start_end_range_rust(self): + # Arrange + start_timestamps = (1546383605776999936, 1546389021944999936) + end_timestamps = (1546390125908000000, 1546394394948999936) + buffers = [ + _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[0]], + cls=QuoteTick, + batch_size=1000, + start_nanos=start_timestamps[0], + end_nanos=end_timestamps[0], + ), + ), + _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[1]], + cls=QuoteTick, + batch_size=1000, + start_nanos=start_timestamps[1], + end_nanos=end_timestamps[1], + ), + ), + ] + + buffer_iterator = _BufferIterator(buffers=buffers) + + # Act + objs = [] + for batch in buffer_iterator: + objs.extend(batch) + + # Assert + instrument_1_timestamps = [ + x.ts_init for x in objs if x.instrument_id == self.test_instrument_ids[0] + ] + instrument_2_timestamps = [ + x.ts_init for x in objs if x.instrument_id == self.test_instrument_ids[1] + ] + assert instrument_1_timestamps[0] == start_timestamps[0] + assert instrument_1_timestamps[-1] == end_timestamps[0] + + assert instrument_2_timestamps[0] == start_timestamps[1] + assert instrument_2_timestamps[-1] == end_timestamps[1] + + timestamps = [x.ts_init for x in objs] + assert timestamps == sorted(timestamps) + + def test_iterate_returns_expected_timestamps_with_start_end_range_and_bars(self): + # Arrange + start_timestamps = (1546383605776999936, 1546389021944999936, 1559224800000000000) + end_timestamps = (1546390125908000000, 1546394394948999936, 1577710800000000000) + + buffers = [ + _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[0]], + cls=QuoteTick, + batch_size=1000, + start_nanos=start_timestamps[0], + end_nanos=end_timestamps[0], + ), + ), + _StreamingBuffer( + generate_batches_rust( + files=[self.test_parquet_files[1]], + cls=QuoteTick, + batch_size=1000, + start_nanos=start_timestamps[1], + end_nanos=end_timestamps[1], + ), + ), + _StreamingBuffer( + generate_batches( + files=[self.test_parquet_files[2]], + cls=Bar, + instrument_id=self.test_instrument_ids[2], + batch_size=1000, + fs=fsspec.filesystem("file"), + start_nanos=start_timestamps[2], + end_nanos=end_timestamps[2], + ), + ), + ] + + # Act + results = [] + buffer_iterator = _BufferIterator(buffers=buffers) + + for batch in buffer_iterator: + results.extend(batch) + + # Assert + bars = [x for x in results if isinstance(x, Bar)] + + quote_ticks = [x for x in results if isinstance(x, QuoteTick)] + + instrument_1_timestamps = [ + x.ts_init for x in quote_ticks if x.instrument_id == self.test_instrument_ids[0] + ] + instrument_2_timestamps = [ + x.ts_init for x in quote_ticks if x.instrument_id == self.test_instrument_ids[1] + ] + instrument_3_timestamps = [ + x.ts_init for x in bars if x.bar_type.instrument_id == self.test_instrument_ids[2] + ] + + assert instrument_1_timestamps[0] == start_timestamps[0] + assert instrument_1_timestamps[-1] == end_timestamps[0] + + assert instrument_2_timestamps[0] == start_timestamps[1] + assert instrument_2_timestamps[-1] == end_timestamps[1] + + assert instrument_3_timestamps[0] == start_timestamps[2] + assert instrument_3_timestamps[-1] == end_timestamps[2] + + timestamps = [x.ts_init for x in results] + assert timestamps == sorted(timestamps) + + +class TestStreamingEngine(TestBatchingData): + def setup(self): + self.catalog = data_catalog_setup(protocol="file") + self._load_bars_into_catalog_rust() + self._load_quote_ticks_into_catalog_rust() + + def _load_bars_into_catalog_rust(self): + instrument = self.test_instruments[2] + parquet_data_path = self.test_parquet_files[2] + + def parser(df): + df.index = df["ts_init"].apply(unix_nanos_to_dt) + df = df["open high low close".split()] + for col in df: + df[col] = df[col].astype(float) + objs = BarDataWrangler( + bar_type=BarType.from_str("EUR/USD.SIM-1-HOUR-BID-EXTERNAL"), + instrument=instrument, + ).process(df) + yield from objs + + process_files( + glob_path=parquet_data_path, + reader=ParquetByteReader(parser=parser), + catalog=self.catalog, + use_rust=False, + ) + + def _load_quote_ticks_into_catalog_rust(self): + for instrument, parquet_data_path in zip( + self.test_instruments[:2], + self.test_parquet_files[:2], + ): + + def parser(df): + df.index = df["ts_init"].apply(unix_nanos_to_dt) + df = df["bid ask bid_size ask_size".split()] + for col in df: + df[col] = df[col].astype(float) + objs = QuoteTickDataWrangler(instrument=instrument).process(df) + yield from objs + + process_files( + glob_path=parquet_data_path, + reader=ParquetByteReader(parser=parser), # noqa: B023 + catalog=self.catalog, + use_rust=True, + instrument=instrument, + ) + + def test_iterate_returns_expected_timestamps_single(self): + # Arrange + config = BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[0]), + data_cls=QuoteTick, + use_rust=True, + ) + + expected = list(pd.read_parquet(self.test_parquet_files[0]).ts_event) + + iterator = StreamingEngine( + data_configs=[config], + target_batch_size_bytes=parse_bytes("10kib"), + ) + + # Act + timestamps = [] + for batch in iterator: + timestamps.extend([x.ts_init for x in batch]) + + # Assert + assert len(timestamps) == len(expected) + assert timestamps == expected + + def test_iterate_returns_expected_timestamps(self): + # Arrange + configs = [ + BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[0]), + data_cls=QuoteTick, + use_rust=True, + ), + BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[1]), + data_cls=QuoteTick, + use_rust=True, + ), + ] + + expected = sorted( + list(pd.read_parquet(self.test_parquet_files[0]).ts_event) + + list(pd.read_parquet(self.test_parquet_files[1]).ts_event), + ) + + iterator = StreamingEngine( + data_configs=configs, + target_batch_size_bytes=parse_bytes("10kib"), + ) + + # Act + timestamps = [] + for batch in iterator: + timestamps.extend([x.ts_init for x in batch]) + + # Assert + assert len(timestamps) == len(expected) + assert timestamps == expected + + def test_iterate_returns_expected_timestamps_with_start_end_range_rust( + self, + ): + # Arrange + + start_timestamps = (1546383605776999936, 1546389021944999936) + end_timestamps = (1546390125908000000, 1546394394948999936) + + configs = [ + BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[0]), + data_cls=QuoteTick, + use_rust=True, + start_time=unix_nanos_to_dt(start_timestamps[0]), + end_time=unix_nanos_to_dt(end_timestamps[0]), + ), + BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[1]), + data_cls=QuoteTick, + use_rust=True, + start_time=unix_nanos_to_dt(start_timestamps[1]), + end_time=unix_nanos_to_dt(end_timestamps[1]), + ), + ] + + iterator = StreamingEngine( + data_configs=configs, + target_batch_size_bytes=parse_bytes("10kib"), + ) + + # Act + objs = [] + for batch in iterator: + objs.extend(batch) + + # Assert + instrument_1_timestamps = [ + x.ts_init for x in objs if x.instrument_id == self.test_instrument_ids[0] + ] + instrument_2_timestamps = [ + x.ts_init for x in objs if x.instrument_id == self.test_instrument_ids[1] + ] + assert instrument_1_timestamps[0] == start_timestamps[0] + assert instrument_1_timestamps[-1] == end_timestamps[0] + + assert instrument_2_timestamps[0] == start_timestamps[1] + assert instrument_2_timestamps[-1] == end_timestamps[1] + + timestamps = [x.ts_init for x in objs] + assert timestamps == sorted(timestamps) + + def test_iterate_returns_expected_timestamps_with_start_end_range_and_bars( + self, + ): + # Arrange + start_timestamps = (1546383605776999936, 1546389021944999936, 1577725200000000000) + end_timestamps = (1546390125908000000, 1546394394948999936, 1577826000000000000) + + configs = [ + BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[0]), + data_cls=QuoteTick, + start_time=unix_nanos_to_dt(start_timestamps[0]), + end_time=unix_nanos_to_dt(end_timestamps[0]), + use_rust=True, + ), + BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[1]), + data_cls=QuoteTick, + start_time=unix_nanos_to_dt(start_timestamps[1]), + end_time=unix_nanos_to_dt(end_timestamps[1]), + use_rust=True, + ), + BacktestDataConfig( + catalog_path=str(self.catalog.path), + instrument_id=str(self.test_instrument_ids[2]), + data_cls=Bar, + start_time=unix_nanos_to_dt(start_timestamps[2]), + end_time=unix_nanos_to_dt(end_timestamps[2]), + bar_spec="1-HOUR-BID", + use_rust=False, + ), + ] + + # Act + iterator = StreamingEngine( + data_configs=configs, + target_batch_size_bytes=parse_bytes("10kib"), + ) + + # Act + objs = [] + for batch in iterator: + objs.extend(batch) + + # Assert + bars = [x for x in objs if isinstance(x, Bar)] + + quote_ticks = [x for x in objs if isinstance(x, QuoteTick)] + + instrument_1_timestamps = [ + x.ts_init for x in quote_ticks if x.instrument_id == self.test_instrument_ids[0] + ] + instrument_2_timestamps = [ + x.ts_init for x in quote_ticks if x.instrument_id == self.test_instrument_ids[1] + ] + instrument_3_timestamps = [ + x.ts_init for x in bars if x.bar_type.instrument_id == self.test_instrument_ids[2] + ] + + assert instrument_1_timestamps[0] == start_timestamps[0] + assert instrument_1_timestamps[-1] == end_timestamps[0] + + assert instrument_2_timestamps[0] == start_timestamps[1] + assert instrument_2_timestamps[-1] == end_timestamps[1] + + assert instrument_3_timestamps[0] == start_timestamps[2] + assert instrument_3_timestamps[-1] == end_timestamps[2] + + timestamps = [x.ts_init for x in objs] + assert timestamps == sorted(timestamps) + + +class TestPersistenceBatching: + def setup(self): + self.catalog = data_catalog_setup(protocol="memory") + self.fs: fsspec.AbstractFileSystem = self.catalog.fs + self._load_data_into_catalog() + + def teardown(self): + # Cleanup + path = self.catalog.path + fs = self.catalog.fs + if fs.exists(path): + fs.rm(path, recursive=True) + + def _load_data_into_catalog(self): + self.instrument_provider = BetfairInstrumentProvider.from_instruments([]) + process_files( + glob_path=TEST_DATA_DIR + "/1.166564490.bz2", + reader=BetfairTestStubs.betfair_reader(instrument_provider=self.instrument_provider), + instrument_provider=self.instrument_provider, + catalog=self.catalog, + ) + + def test_batch_files_single(self): + # Arrange + instrument_ids = self.catalog.instruments()["id"].unique().tolist() + + shared_kw = dict( + catalog_path=str(self.catalog.path), + catalog_fs_protocol=self.catalog.fs.protocol, + data_cls=OrderBookData, + ) + + engine = StreamingEngine( + data_configs=[ + BacktestDataConfig(**shared_kw, instrument_id=instrument_ids[0]), + BacktestDataConfig(**shared_kw, instrument_id=instrument_ids[1]), + ], + target_batch_size_bytes=parse_bytes("10kib"), + ) + + # Act + timestamp_chunks = [] + for batch in engine: + timestamp_chunks.append([b.ts_init for b in batch]) + + # Assert + latest_timestamp = 0 + for timestamps in timestamp_chunks: + assert max(timestamps) > latest_timestamp + latest_timestamp = max(timestamps) + assert timestamps == sorted(timestamps) + + @pytest.mark.skip(reason="deserialization error") + def test_batch_generic_data(self): + # Arrange + TestPersistenceStubs.setup_news_event_persistence() + process_files( + glob_path=f"{TEST_DATA_DIR}/news_events.csv", + reader=CSVReader(block_parser=TestPersistenceStubs.news_event_parser), + catalog=self.catalog, + ) + data_config = BacktestDataConfig( + catalog_path=self.catalog.path, + catalog_fs_protocol="memory", + data_cls=NewsEventData, + client_id="NewsClient", + ) + # Add some arbitrary instrument data to appease BacktestEngine + instrument_data_config = BacktestDataConfig( + catalog_path=self.catalog.path, + catalog_fs_protocol="memory", + instrument_id=self.catalog.instruments(as_nautilus=True)[0].id.value, + data_cls=InstrumentStatusUpdate, + ) + streaming = BetfairTestStubs.streaming_config( + catalog_path=self.catalog.path, + ) + engine = BacktestEngineConfig(streaming=streaming) + run_config = BacktestRunConfig( + engine=engine, + data=[data_config, instrument_data_config], + venues=[BetfairTestStubs.betfair_venue_config()], + batch_size_bytes=parse_bytes("1mib"), + ) + + # Act + node = BacktestNode(configs=[run_config]) + node.run() + + # Assert + assert node diff --git a/tests/unit_tests/portfolio/test_portfolio.py b/tests/unit_tests/portfolio/test_portfolio.py index 0703eb057bc0..f5e5649b4fce 100644 --- a/tests/unit_tests/portfolio/test_portfolio.py +++ b/tests/unit_tests/portfolio/test_portfolio.py @@ -71,7 +71,7 @@ class TestPortfolio: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/risk/test_risk_engine.py b/tests/unit_tests/risk/test_risk_engine.py index 9b43b361813e..7a8bc6d2ecb8 100644 --- a/tests/unit_tests/risk/test_risk_engine.py +++ b/tests/unit_tests/risk/test_risk_engine.py @@ -72,6 +72,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/trading/test_trading_strategy.py b/tests/unit_tests/trading/test_trading_strategy.py index b473bd52ca7a..cb79beb6dda6 100644 --- a/tests/unit_tests/trading/test_trading_strategy.py +++ b/tests/unit_tests/trading/test_trading_strategy.py @@ -76,6 +76,7 @@ def setup(self): self.logger = Logger( clock=self.clock, level_stdout=LogLevel.DEBUG, + bypass=True, ) self.trader_id = TestIdStubs.trader_id() diff --git a/tests/unit_tests/trading/test_trading_trader.py b/tests/unit_tests/trading/test_trading_trader.py index 79d76672f17e..f9edb994be14 100644 --- a/tests/unit_tests/trading/test_trading_trader.py +++ b/tests/unit_tests/trading/test_trading_trader.py @@ -56,7 +56,7 @@ class TestTrader: def setup(self): # Fixture Setup self.clock = TestClock() - self.logger = Logger(self.clock) + self.logger = Logger(self.clock, bypass=True) self.trader_id = TestIdStubs.trader_id() diff --git a/version.json b/version.json index f1967d421c89..2c63319bf177 100644 --- a/version.json +++ b/version.json @@ -1,6 +1,6 @@ { "schemaVersion": 1, "label": "", - "message": "v1.168.0", + "message": "v1.169.0", "color": "orange" }