diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c5784b994..df224ace7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,3 @@ repos: - --aggressive - --experimental - --ignore=W503,E501,E722,E402 -- repo: https://github.com/asottile/add-trailing-comma - rev: v2.4.0 - hooks: - - id: add-trailing-comma diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..56e6e605a --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,35 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +formats: all + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + # You can also specify other tool versions: + # nodejs: "19" + # rust: "1.64" + # golang: "1.19" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - method: pip + path: . + extra_requirements: + - docs + system_packages: true diff --git a/.readthedocs.yml b/.readthedocs.yml deleted file mode 100644 index fffb2f7ce..000000000 --- a/.readthedocs.yml +++ /dev/null @@ -1,10 +0,0 @@ -python: - version: 3 - pip_install: true - extra_requirements: - - docs - -formats: - - htmlzip - - pdf - - epub diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7adce34b5..3ff5b942f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -161,8 +161,11 @@ After you've tested your changes locally, you'll want to add more permanent test 6. Check [Codebeat](https://codebeat.co/a/evhub/projects) and [LGTM](https://lgtm.com/dashboard) for `coconut` and `compiled-cocotest` 7. Make sure [`coconut-develop`](https://pypi.python.org/pypi/coconut-develop) package looks good 8. Run `make docs` and ensure local documentation looks good - 9. Make sure [develop documentation](http://coconut.readthedocs.io/en/develop/) looks good - 10. Make sure [Github Actions](https://github.com/evhub/coconut/actions) and [AppVeyor](https://ci.appveyor.com/project/evhub/coconut) are passing + 9. Make sure all of the following are passing: + 1. [Github Actions](https://github.com/evhub/coconut/actions) + 2. [AppVeyor](https://ci.appveyor.com/project/evhub/coconut) + 3. [readthedocs](https://readthedocs.org/projects/coconut/builds/) + 10. Make sure [develop documentation](http://coconut.readthedocs.io/en/develop/) looks good 11. Turn off `develop` in `root.py` 12. Set `root.py` to new version number 13. If major release, set `root.py` to new version name diff --git a/DOCS.md b/DOCS.md index 34cd5eac3..9b4d61a9a 100644 --- a/DOCS.md +++ b/DOCS.md @@ -90,10 +90,11 @@ The full list of optional dependencies is: - `watch`: enables use of the `--watch` flag. - `mypy`: enables use of the `--mypy` flag. - `backports`: installs libraries that backport newer Python features to older versions, which Coconut will automatically use instead of the standard library if the standard library is not available. Specifically: - - Installs [`typing`](https://pypi.org/project/typing/) and [`typing_extensions`](https://pypi.org/project/typing-extensions/) to backport [`typing`](https://docs.python.org/3/library/typing.html). - - Installs [`aenum`](https://pypi.org/project/aenum) to backport [`enum`](https://docs.python.org/3/library/enum.html). - - Installs [`trollius`](https://pypi.python.org/pypi/trollius) to backport [`asyncio`](https://docs.python.org/3/library/asyncio.html). - Installs [`dataclasses`](https://pypi.org/project/dataclasses/) to backport [`dataclasses`](https://docs.python.org/3/library/dataclasses.html). + - Installs [`typing`](https://pypi.org/project/typing/) to backport [`typing`](https://docs.python.org/3/library/typing.html) ([`typing_extensions`](https://pypi.org/project/typing-extensions/) is always installed for backporting individual `typing` objects). + - Installs [`aenum`](https://pypi.org/project/aenum) to backport [`enum`](https://docs.python.org/3/library/enum.html). + - Installs [`async_generator`](https://github.com/python-trio/async_generator) to backport [`async` generators](https://peps.python.org/pep-0525/) and [`asynccontextmanager`](https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager). + - Installs [`trollius`](https://pypi.python.org/pypi/trollius) to backport [`async`/`await`](https://docs.python.org/3/library/asyncio-task.html) and [`asyncio`](https://docs.python.org/3/library/asyncio.html). - `xonsh`: enables use of Coconut's [`xonsh` support](#xonsh-support). - `kernel`: lightweight subset of `jupyter` that only includes the dependencies that are strictly necessary for Coconut's [Jupyter kernel](#kernel). - `tests`: everything necessary to test the Coconut language itself. @@ -204,7 +205,7 @@ dest destination directory for compiled files (defaults to run the compiler in a separate thread with the given stack size in kilobytes --site-install, --siteinstall - set up coconut.convenience to be imported on Python start + set up coconut.api to be imported on Python start --site-uninstall, --siteuninstall revert the effects of --site-install --verbose print verbose debug output @@ -220,7 +221,7 @@ coconut-run ``` as an alias for ``` -coconut --run --quiet --target sys --line-numbers --argv +coconut --quiet --target sys --line-numbers --keep-lines --run --argv ``` which will quietly compile and run ``, passing any additional arguments to the script, mimicking how the `python` command works. @@ -281,7 +282,7 @@ Finally, while Coconut will try to compile Python-3-specific syntax to its unive - the `nonlocal` keyword, - keyword-only function parameters (use [pattern-matching function definition](#pattern-matching-functions) for universal code), -- `async` and `await` statements (requires `--target 3.5`), +- `async` and `await` statements (requires a specific target; Coconut will attempt different backports based on the targeted version), - `:=` assignment expressions (requires `--target 3.8`), - positional-only function parameters (use [pattern-matching function definition](#pattern-matching-functions) for universal code) (requires `--target 3.8`), - `a[x, *y]` variadic generic syntax (use [type parameter syntax](#type-parameter-syntax) for universal code) (requires `--target 3.11`), and @@ -323,16 +324,17 @@ If the `--strict` (`-s` for short) flag is enabled, Coconut will perform additio The style issues which will cause `--strict` to throw an error are: -- mixing of tabs and spaces (without `--strict` will show a warning), -- use of `from __future__` imports (Coconut does these automatically) (without `--strict` will show a warning), -- inheriting from `object` in classes (Coconut does this automatically) (without `--strict` will show a warning), -- semicolons at end of lines (without `--strict` will show a warning), -- use of `u` to denote Unicode strings (all Coconut strings are Unicode strings) (without `--strict` will show a warning), -- missing new line at end of file, -- trailing whitespace at end of lines, -- use of the Python-style `lambda` statement (use [Coconut's lambda syntax](#lambdas) instead), -- use of backslash continuation (use [parenthetical continuation](#enhanced-parenthetical-continuation) instead), -- Python-3.10/PEP-634-style dotted names in pattern-matching (Coconut style is to preface these with `==`), and +- mixing of tabs and spaces (without `--strict` will show a warning). +- use of `from __future__` imports (Coconut does these automatically) (without `--strict` will show a warning). +- inheriting from `object` in classes (Coconut does this automatically) (without `--strict` will show a warning). +- semicolons at end of lines (without `--strict` will show a warning). +- use of `u` to denote Unicode strings (all Coconut strings are Unicode strings) (without `--strict` will show a warning). +- commas after [statement lambdas](#statement-lambdas) (not recommended as it can be unclear whether the comma is inside or outside the lambda) (without `--strict` will show a warning). +- missing new line at end of file. +- trailing whitespace at end of lines. +- use of the Python-style `lambda` statement (use [Coconut's lambda syntax](#lambdas) instead). +- use of backslash continuation (use [parenthetical continuation](#enhanced-parenthetical-continuation) instead). +- Python-3.10/PEP-634-style dotted names in pattern-matching (Coconut style is to preface these with `==`). - use of `:` instead of `<:` to specify upper bounds in [Coconut's type parameter syntax](#type-parameter-syntax). ## Integrations @@ -391,7 +393,7 @@ Simply installing Coconut should add a `Coconut` kernel to your Jupyter/IPython The Coconut kernel will always compile using the parameters: `--target sys --line-numbers --keep-lines --no-wrap-types`. -Coconut also provides the following convenience commands: +Coconut also provides the following api commands: - `coconut --jupyter notebook` will ensure that the Coconut kernel is available and launch a Jupyter/IPython notebook. - `coconut --jupyter console` will launch a Jupyter/IPython console using the Coconut kernel. @@ -422,14 +424,14 @@ To distribute your code with checkable type annotations, you'll need to include ##### Syntax -To explicitly annotate your code with types to be checked, Coconut supports: +To explicitly annotate your code with types to be checked, Coconut supports (on all Python versions): * [Python 3 function type annotations](https://www.python.org/dev/peps/pep-0484/), * [Python 3.6 variable type annotations](https://www.python.org/dev/peps/pep-0526/), -* [PEP 695 type parameter syntax](#type-parameter-syntax) for easily adding type parameters to classes, functions, [`data` types](#data), and type aliases, +* [Python 3.12 type parameter syntax](#type-parameter-syntax) for easily adding type parameters to classes, functions, [`data` types](#data), and type aliases, * Coconut's own [enhanced type annotation syntax](#enhanced-type-annotation), and * Coconut's [protocol intersection operator](#protocol-intersection). -By default, all type annotations are compiled to Python-2-compatible type comments, which means it all works on any Python version. +By default, all type annotations are compiled to Python-2-compatible type comments, which means they should all work on any Python version. Sometimes, MyPy will not know how to handle certain Coconut constructs, such as `addpattern`. For the `addpattern` case, it is recommended to pass `--allow-redefinition` to MyPy (i.e. run `coconut --mypy --allow-redefinition`), though in some cases `--allow-redefinition` may not be sufficient. In that case, either hide the offending code using [`TYPE_CHECKING`](#type_checking) or put a `# type: ignore` comment on the Coconut line which is generating the line MyPy is complaining about and the comment will be added to every generated line. @@ -1052,9 +1054,8 @@ _Note: these are only the default, built-in unicode operators. Coconut supports ≥ (\u2265) or ⊇ (\u2287) => ">=" ⊊ (\u228a) => "<" ⊋ (\u228b) => ">" -∧ (\u2227) or ∩ (\u2229) => "&" -∨ (\u2228) or ∪ (\u222a) => "|" -⊻ (\u22bb) => "^" +∩ (\u2229) => "&" +∪ (\u222a) => "|" « (\xab) => "<<" » (\xbb) => ">>" … (\u2026) => "..." @@ -1547,6 +1548,60 @@ b = 2 c = a + b ``` +### `async with for` + +In modern Python `async` code, such as when using [`contextlib.aclosing`](https://docs.python.org/3/library/contextlib.html#contextlib.aclosing), it is often recommended to use a pattern like +```coconut_python +async with aclosing(my_generator()) as values: + async for value in values: + ... +``` +since it is substantially safer than the more syntactically straightforward +```coconut_python +async for value in my_generator(): + ... +``` + +This is especially true when using [`trio`](https://github.com/python-trio/trio), which [completely disallows iterating over `async` generators with `async for`](https://discuss.python.org/t/preventing-yield-inside-certain-context-managers/1091), instead requiring the above `async with ... async for` pattern using utilities such as [`trio_util.trio_async_generator`](https://trio-util.readthedocs.io/en/latest/#trio_util.trio_async_generator). + +Since this pattern can often be quite syntactically cumbersome, Coconut provides the shortcut syntax +``` +async with for aclosing(my_generator()) as values: + ... +``` +which compiles to exactly the pattern above. + +`async with for` also [supports pattern-matching, just like normal Coconut `for` loops](#match-for). + +##### Example + +**Coconut:** +```coconut +from trio_util import trio_async_generator + +@trio_async_generator +async def my_generator(): + # yield values, possibly from a nursery or cancel scope + # ... + +async with for value in my_generator(): + print(value) +``` + +**Python:** +```coconut_python +from trio_util import trio_async_generator + +@trio_async_generator +async def my_generator(): + # yield values, possibly from a nursery or cancel scope + # ... + +async with my_generator() as agen: + async for value in agen: + print(value) +``` + ### Handling Keyword/Variable Name Overlap In Coconut, the following keywords are also valid variable names: @@ -1614,7 +1669,7 @@ If the last `statement` (not followed by a semicolon) in a statement lambda is a Statement lambdas also support implicit lambda syntax such that `def -> _` is equivalent to `def (_=None) -> _` as well as explicitly marking them as pattern-matching such that `match def (x) -> x` will be a pattern-matching function. -Note that statement lambdas have a lower precedence than normal lambdas and thus capture things like trailing commas. +Note that statement lambdas have a lower precedence than normal lambdas and thus capture things like trailing commas. To avoid confusion, statement lambdas should always be wrapped in their own set of parentheses. ##### Example @@ -1780,7 +1835,7 @@ mod(5, 3) Since Coconut syntax is a superset of Python 3 syntax, it supports [Python 3 function type annotation syntax](https://www.python.org/dev/peps/pep-0484/) and [Python 3.6 variable type annotation syntax](https://www.python.org/dev/peps/pep-0526/). By default, Coconut compiles all type annotations into Python-2-compatible type comments. If you want to keep the type annotations instead, simply pass a `--target` that supports them. -Since not all supported Python versions support the [`typing`](https://docs.python.org/3/library/typing.html) module, Coconut provides the [`TYPE_CHECKING`](#type_checking) built-in for hiding your `typing` imports and `TypeVar` definitions from being executed at runtime. Coconut will also automatically use [`typing_extensions`](https://pypi.org/project/typing-extensions/) over `typing` when importing objects not available in `typing` on the current Python version. +Since not all supported Python versions support the [`typing`](https://docs.python.org/3/library/typing.html) module, Coconut provides the [`TYPE_CHECKING`](#type_checking) built-in for hiding your `typing` imports and `TypeVar` definitions from being executed at runtime. Coconut will also automatically use [`typing_extensions`](https://pypi.org/project/typing-extensions/) over `typing` objects at runtime when importing them from `typing`, even when they aren't natively supported on the current Python version (this works even if you just do `import typing` and then `typing.`). Furthermore, when compiling type annotations to Python 3 versions without [PEP 563](https://www.python.org/dev/peps/pep-0563/) support, Coconut wraps annotation in strings to prevent them from being evaluated at runtime (note that `--no-wrap-types` disables all wrapping, including via PEP 563 support). @@ -2524,7 +2579,7 @@ _Can't be done without a long series of checks in place of the destructuring ass ### Type Parameter Syntax -Coconut fully supports [PEP 695](https://peps.python.org/pep-0695/) type parameter syntax (with the caveat that all type variables are invariant rather than inferred). +Coconut fully supports [Python 3.12 PEP 695](https://peps.python.org/pep-0695/) type parameter syntax on all Python versions. That includes type parameters for classes, [`data` types](#data), and [all types of function definition](#function-definition). For different types of function definition, the type parameters always come in brackets right after the function name. Coconut's [enhanced type annotation syntax](#enhanced-type-annotation) is supported for all type parameter bounds. @@ -2532,6 +2587,8 @@ _Warning: until `mypy` adds support for `infer_variance=True` in `TypeVar`, `Typ Additionally, Coconut supports the alternative bounds syntax of `type NewType[T <: bound] = ...` rather than `type NewType[T: bound] = ...`, to make it more clear that it is an upper bound rather than a type. In `--strict` mode, `<:` is required over `:` for all type parameter bounds. _DEPRECATED: `<=` can also be used as an alternative to `<:`._ +Note that the `<:` syntax should only be used for [type bounds](https://peps.python.org/pep-0695/#upper-bound-specification), not [type constraints](https://peps.python.org/pep-0695/#constrained-type-specification)—for type constraints, Coconut style prefers the vanilla Python `:` syntax, which helps to disambiguate between the two cases, as they are functionally different but otherwise hard to tell apart at a glance. This is enforced in `--strict` mode. + _Note that, by default, all type declarations are wrapped in strings to enable forward references and improve runtime performance. If you don't want that—e.g. because you want to use type annotations at runtime—simply pass the `--no-wrap-types` flag._ ##### PEP 695 Docs @@ -3208,7 +3265,9 @@ _Can't be done without a series of method definitions for each data type. See th In Haskell, `fmap(func, obj)` takes a data type `obj` and returns a new data type with `func` mapped over the contents. Coconut's `fmap` function does the exact same thing for Coconut's [data types](#data). -`fmap` can also be used on built-ins such as `str`, `list`, `set`, and `dict` as a variant of `map` that returns back an object of the same type. The behavior of `fmap` for a given object can be overridden by defining an `__fmap__(self, func)` magic method that will be called whenever `fmap` is invoked on that object. Note that `__fmap__` implementations should always satisfy the [Functor Laws](https://wiki.haskell.org/Functor). +`fmap` can also be used on the built-in objects `str`, `dict`, `list`, `tuple`, `set`, `frozenset`, and `dict` as a variant of `map` that returns back an object of the same type. + +The behavior of `fmap` for a given object can be overridden by defining an `__fmap__(self, func)` magic method that will be called whenever `fmap` is invoked on that object. Note that `__fmap__` implementations should always satisfy the [Functor Laws](https://wiki.haskell.org/Functor). For `dict`, or any other `collections.abc.Mapping`, `fmap` will map over the mapping's `.items()` instead of the default iteration through its `.keys()`, with the new mapping reconstructed from the mapped over items. _DEPRECATED: `fmap$(starmap_over_mappings=True)` will `starmap` over the `.items()` instead of `map` over them._ @@ -3909,7 +3968,7 @@ if group: **windowsof**(_size_, _iterable_, _fillvalue_=`...`, _step_=`1`) -`windowsof` produces an iterable that effectively mimics a sliding window over _iterable_ of size _size_. _step_ determines the spacing between windowsof. +`windowsof` produces an iterable that effectively mimics a sliding window over _iterable_ of size _size_. _step_ determines the spacing between windows. If _size_ is larger than _iterable_, `windowsof` will produce an empty iterable. If that is not the desired behavior, _fillvalue_ can be passed and will be used in place of missing values. Also, if _fillvalue_ is passed and the length of the _iterable_ is not divisible by _step_, _fillvalue_ will be used in that case to pad the last window as well. Note that _fillvalue_ will only ever appear in the last window. @@ -4266,7 +4325,7 @@ Recommended usage is as a debugging tool, where the code `from coconut import em ### Automatic Compilation -If you don't care about the exact compilation parameters you want to use, automatic compilation lets Coconut take care of everything for you. Automatic compilation can be enabled either by importing [`coconut.convenience`](#coconut-convenience) before you import anything else, or by running `coconut --site-install`. Once automatic compilation is enabled, Coconut will check each of your imports to see if you are attempting to import a `.coco` file and, if so, automatically compile it for you. Note that, for Coconut to know what file you are trying to import, it will need to be accessible via `sys.path`, just like a normal import. +If you don't care about the exact compilation parameters you want to use, automatic compilation lets Coconut take care of everything for you. Automatic compilation can be enabled either by importing [`coconut.api`](#coconut-api) before you import anything else, or by running `coconut --site-install`. Once automatic compilation is enabled, Coconut will check each of your imports to see if you are attempting to import a `.coco` file and, if so, automatically compile it for you. Note that, for Coconut to know what file you are trying to import, it will need to be accessible via `sys.path`, just like a normal import. Automatic compilation always compiles modules and packages in-place, and always uses `--target sys`. Automatic compilation is always available in the Coconut interpreter, and, if using the Coconut interpreter, a `reload` built-in is provided to easily reload imported modules. Additionally, the interpreter always allows importing from the current working directory, letting you easily compile and play around with a `.coco` file simply by running the Coconut interpreter and importing it. @@ -4276,15 +4335,17 @@ While automatic compilation is the preferred method for dynamically compiling Co ```coconut # coding: coconut ``` -declaration which can be added to `.py` files to have them treated as Coconut files instead. To use such a coding declaration, you'll need to either run `coconut --site-install` or `import coconut.convenience` at some point before you first attempt to import a file with a `# coding: coconut` declaration. Like automatic compilation, compilation is always done with `--target sys` and is always available from the Coconut interpreter. +declaration which can be added to `.py` files to have them treated as Coconut files instead. To use such a coding declaration, you'll need to either run `coconut --site-install` or `import coconut.api` at some point before you first attempt to import a file with a `# coding: coconut` declaration. Like automatic compilation, compilation is always done with `--target sys` and is always available from the Coconut interpreter. + +### `coconut.api` -### `coconut.convenience` +In addition to enabling automatic compilation, `coconut.api` can also be used to call the Coconut compiler from code instead of from the command line. See below for specifications of the different api functions. -In addition to enabling automatic compilation, `coconut.convenience` can also be used to call the Coconut compiler from code instead of from the command line. See below for specifications of the different convenience functions. +_DEPRECATED: `coconut.convenience` is a deprecated alias for `coconut.api`._ #### `get_state` -**coconut.convenience.get\_state**(_state_=`None`) +**coconut.api.get\_state**(_state_=`None`) Gets a state object which stores the current compilation parameters. State objects can be configured with [**setup**](#setup) or [**cmd**](#cmd) and then used in [**parse**](#parse) or [**coconut\_eval**](#coconut_eval). @@ -4292,9 +4353,9 @@ If _state_ is `None`, gets a new state object, whereas if _state_ is `False`, th #### `parse` -**coconut.convenience.parse**(_code_=`""`, _mode_=`"sys"`, _state_=`False`, _keep\_internal\_state_=`None`) +**coconut.api.parse**(_code_=`""`, _mode_=`"sys"`, _state_=`False`, _keep\_internal\_state_=`None`) -Likely the most useful of the convenience functions, `parse` takes Coconut code as input and outputs the equivalent compiled Python code. _mode_ is used to indicate the context for the parsing and _state_ is the state object storing the compilation parameters to use as obtained from [**get_state**](#get_state) (if `False`, uses the global state object). _keep\_internal\_state_ determines whether the state object will keep internal state (such as what [custom operators](#custom-operators) have been declared)—if `None`, internal state will be kept iff you are not using the global _state_. +Likely the most useful of the api functions, `parse` takes Coconut code as input and outputs the equivalent compiled Python code. _mode_ is used to indicate the context for the parsing and _state_ is the state object storing the compilation parameters to use as obtained from [**get_state**](#get_state) (if `False`, uses the global state object). _keep\_internal\_state_ determines whether the state object will keep internal state (such as what [custom operators](#custom-operators) have been declared)—if `None`, internal state will be kept iff you are not using the global _state_. If _code_ is not passed, `parse` will output just the given _mode_'s header, which can be executed to set up an execution environment in which future code can be parsed and executed without a header. @@ -4341,7 +4402,7 @@ Each _mode_ has two components: what parser it uses, and what header it prepends ##### Example ```coconut_python -from coconut.convenience import parse +from coconut.api import parse exec(parse()) while True: exec(parse(input(), mode="block")) @@ -4349,7 +4410,7 @@ while True: #### `setup` -**coconut.convenience.setup**(_target_=`None`, _strict_=`False`, _minify_=`False`, _line\_numbers_=`False`, _keep\_lines_=`False`, _no\_tco_=`False`, _no\_wrap_=`False`, *, _state_=`False`) +**coconut.api.setup**(_target_=`None`, _strict_=`False`, _minify_=`False`, _line\_numbers_=`False`, _keep\_lines_=`False`, _no\_tco_=`False`, _no\_wrap_=`False`, *, _state_=`False`) `setup` can be used to set up the given state object with the given command-line flags. If _state_ is `False`, the global state object is used. @@ -4365,7 +4426,7 @@ The possible values for each flag argument are: #### `cmd` -**coconut.convenience.cmd**(_args_=`None`, *, _argv_=`None`, _interact_=`False`, _default\_target_=`None`, _state_=`False`) +**coconut.api.cmd**(_args_=`None`, *, _argv_=`None`, _interact_=`False`, _default\_target_=`None`, _state_=`False`) Executes the given _args_ as if they were fed to `coconut` on the command-line, with the exception that unless _interact_ is true or `-i` is passed, the interpreter will not be started. Additionally, _argv_ can be used to pass in arguments as in `--argv` and _default\_target_ can be used to set the default `--target`. @@ -4373,13 +4434,13 @@ Has the same effect of setting the command-line flags on the given _state_ objec #### `coconut_eval` -**coconut.convenience.coconut_eval**(_expression_, _globals_=`None`, _locals_=`None`, _state_=`False`, _keep\_internal\_state_=`None`) +**coconut.api.coconut_eval**(_expression_, _globals_=`None`, _locals_=`None`, _state_=`False`, _keep\_internal\_state_=`None`) Version of [`eval`](https://docs.python.org/3/library/functions.html#eval) which can evaluate Coconut code. #### `version` -**coconut.convenience.version**(**[**_which_**]**) +**coconut.api.version**(**[**_which_**]**) Retrieves a string containing information about the Coconut version. The optional argument _which_ is the type of version information desired. Possible values of _which_ are: @@ -4391,19 +4452,19 @@ Retrieves a string containing information about the Coconut version. The optiona #### `auto_compilation` -**coconut.convenience.auto_compilation**(_on_=`True`) +**coconut.api.auto_compilation**(_on_=`True`) -Turns [automatic compilation](#automatic-compilation) on or off. This function is called automatically when `coconut.convenience` is imported. +Turns [automatic compilation](#automatic-compilation) on or off. This function is called automatically when `coconut.api` is imported. #### `use_coconut_breakpoint` -**coconut.convenience.use_coconut_breakpoint**(_on_=`True`) +**coconut.api.use_coconut_breakpoint**(_on_=`True`) -Switches the [`breakpoint` built-in](https://www.python.org/dev/peps/pep-0553/) which Coconut makes universally available to use [`coconut.embed`](#coconut-embed) instead of [`pdb.set_trace`](https://docs.python.org/3/library/pdb.html#pdb.set_trace) (or undoes that switch if `on=False`). This function is called automatically when `coconut.convenience` is imported. +Switches the [`breakpoint` built-in](https://www.python.org/dev/peps/pep-0553/) which Coconut makes universally available to use [`coconut.embed`](#coconut-embed) instead of [`pdb.set_trace`](https://docs.python.org/3/library/pdb.html#pdb.set_trace) (or undoes that switch if `on=False`). This function is called automatically when `coconut.api` is imported. #### `CoconutException` -If an error is encountered in a convenience function, a `CoconutException` instance may be raised. `coconut.convenience.CoconutException` is provided to allow catching such errors. +If an error is encountered in a api function, a `CoconutException` instance may be raised. `coconut.api.CoconutException` is provided to allow catching such errors. ### `coconut.__coconut__` diff --git a/FAQ.md b/FAQ.md index 755cdbbb2..201885b2e 100644 --- a/FAQ.md +++ b/FAQ.md @@ -94,4 +94,4 @@ If you don't get the reference, the image above is from [Monty Python and the Ho ### Who developed Coconut? -[Evan Hubinger](https://github.com/evhub) is a [full-time AGI safety researcher](https://www.alignmentforum.org/users/evhub) at the [Machine Intelligence Research Institute](https://intelligence.org/). He can be reached by asking a question on [Coconut's Gitter chat room](https://gitter.im/evhub/coconut), through email at , or on [LinkedIn](https://www.linkedin.com/in/ehubinger). +[Evan Hubinger](https://github.com/evhub) is an [AI safety research scientist](https://www.alignmentforum.org/users/evhub) at [Anthropic](https://www.anthropic.com/). He can be reached by asking a question on [Coconut's Gitter chat room](https://gitter.im/evhub/coconut), through email at , or on [LinkedIn](https://www.linkedin.com/in/ehubinger). diff --git a/HELP.md b/HELP.md index e016cb271..99b1a5c4b 100644 --- a/HELP.md +++ b/HELP.md @@ -133,7 +133,7 @@ Compiling single files is not the only way to use the Coconut command-line utili The Coconut compiler supports a large variety of different compilation options, the help for which can always be accessed by entering `coconut -h` into the command line. One of the most useful of these is `--line-numbers` (or `-l` for short). Using `--line-numbers` will add the line numbers of your source code as comments in the compiled code, allowing you to see what line in your source code corresponds to a line in the compiled code where an error occurred, for ease of debugging. -_Note: If you don't need the full control of the Coconut compiler, you can also [access your Coconut code just by importing it](./DOCS.md#automatic-compilation), either from the Coconut interpreter, or in any Python file where you import [`coconut.convenience`](./DOCS.md#coconut-convenience)._ +_Note: If you don't need the full control of the Coconut compiler, you can also [access your Coconut code just by importing it](./DOCS.md#automatic-compilation), either from the Coconut interpreter, or in any Python file where you import [`coconut.api`](./DOCS.md#coconut-api)._ ### Using IPython/Jupyter diff --git a/__coconut__/__init__.pyi b/__coconut__/__init__.pyi index 75c660612..b85237ebc 100644 --- a/__coconut__/__init__.pyi +++ b/__coconut__/__init__.pyi @@ -8,13 +8,13 @@ License: Apache 2.0 Description: MyPy stub file for __coconut__.py. """ -import sys -import typing as _t - # ----------------------------------------------------------------------------------------------------------------------- # TYPE VARS: # ----------------------------------------------------------------------------------------------------------------------- +import sys +import typing as _t + _Callable = _t.Callable[..., _t.Any] _Iterable = _t.Iterable[_t.Any] _Tuple = _t.Tuple[_t.Any, ...] @@ -55,21 +55,14 @@ _P = _t.ParamSpec("_P") class _SupportsIndex(_t.Protocol): def __index__(self) -> int: ... - # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- -if sys.version_info >= (3, 11): - from typing import dataclass_transform as _dataclass_transform +if sys.version_info >= (3,): + import builtins as _builtins else: - try: - from typing_extensions import dataclass_transform as _dataclass_transform - except ImportError: - dataclass_transform = ... - -import _coconut as __coconut # we mock _coconut as a package since mypy doesn't handle namespace classes very well -_coconut = __coconut + import __builtin__ as _builtins if sys.version_info >= (3, 2): from functools import lru_cache as _lru_cache @@ -81,13 +74,24 @@ if sys.version_info >= (3, 7): from dataclasses import dataclass as _dataclass else: @_dataclass_transform() - def _dataclass(cls: t_coype[_T], **kwargs: _t.Any) -> type[_T]: ... + def _dataclass(cls: type[_T], **kwargs: _t.Any) -> type[_T]: ... + +if sys.version_info >= (3, 11): + from typing import dataclass_transform as _dataclass_transform +else: + try: + from typing_extensions import dataclass_transform as _dataclass_transform + except ImportError: + dataclass_transform = ... try: from typing_extensions import deprecated as _deprecated # type: ignore except ImportError: def _deprecated(message: _t.Text) -> _t.Callable[[_T], _T]: ... # type: ignore +import _coconut as __coconut # we mock _coconut as a package since mypy doesn't handle namespace classes very well +_coconut = __coconut + # ----------------------------------------------------------------------------------------------------------------------- # STUB: @@ -153,18 +157,18 @@ py_repr = repr py_breakpoint = breakpoint # all py_ functions, but not py_ types, go here -chr = chr -hex = hex -input = input -map = map -oct = oct -open = open -print = print -range = range -zip = zip -filter = filter -reversed = reversed -enumerate = enumerate +chr = _builtins.chr +hex = _builtins.hex +input = _builtins.input +map = _builtins.map +oct = _builtins.oct +open = _builtins.open +print = _builtins.print +range = _builtins.range +zip = _builtins.zip +filter = _builtins.filter +reversed = _builtins.reversed +enumerate = _builtins.enumerate _coconut_py_str = py_str @@ -435,6 +439,9 @@ def recursive_iterator(func: _T_iter_func) -> _T_iter_func: return func +# if sys.version_info >= (3, 12): +# from typing import override +# else: try: from typing_extensions import override as _override # type: ignore override = _override @@ -442,6 +449,7 @@ except ImportError: def override(func: _Tfunc) -> _Tfunc: return func + def _coconut_call_set_names(cls: object) -> None: ... diff --git a/_coconut/__init__.pyi b/_coconut/__init__.pyi index ed242669c..38433b7ac 100644 --- a/_coconut/__init__.pyi +++ b/_coconut/__init__.pyi @@ -31,25 +31,33 @@ import multiprocessing as _multiprocessing import pickle as _pickle from multiprocessing import dummy as _multiprocessing_dummy +if sys.version_info >= (3,): + import builtins as _builtins +else: + import __builtin__ as _builtins + if sys.version_info >= (3,): import copyreg as _copyreg else: import copy_reg as _copyreg -if sys.version_info >= (3, 4): - import asyncio as _asyncio +if sys.version_info >= (3,): + from itertools import zip_longest as _zip_longest else: - import trollius as _asyncio # type: ignore + from itertools import izip_longest as _zip_longest if sys.version_info < (3, 3): _abc = _collections else: from collections import abc as _abc -if sys.version_info >= (3,): - from itertools import zip_longest as _zip_longest +if sys.version_info >= (3, 4): + import asyncio as _asyncio else: - from itertools import izip_longest as _zip_longest + import trollius as _asyncio # type: ignore + +if sys.version_info >= (3, 5): + import async_generator as _async_generator # type: ignore try: import numpy as _numpy # type: ignore @@ -65,41 +73,6 @@ else: # ----------------------------------------------------------------------------------------------------------------------- typing = _t - -from typing_extensions import TypeVar -typing.TypeVar = TypeVar # type: ignore - -if sys.version_info < (3, 8): - try: - from typing_extensions import Protocol - except ImportError: - Protocol = ... # type: ignore - typing.Protocol = Protocol # type: ignore - -if sys.version_info < (3, 10): - try: - from typing_extensions import TypeAlias, ParamSpec, Concatenate - except ImportError: - TypeAlias = ... # type: ignore - ParamSpec = ... # type: ignore - Concatenate = ... # type: ignore - typing.TypeAlias = TypeAlias # type: ignore - typing.ParamSpec = ParamSpec # type: ignore - typing.Concatenate = Concatenate # type: ignore - -if sys.version_info < (3, 11): - try: - from typing_extensions import TypeVarTuple, Unpack - except ImportError: - TypeVarTuple = ... # type: ignore - Unpack = ... # type: ignore - typing.TypeVarTuple = TypeVarTuple # type: ignore - typing.Unpack = Unpack # type: ignore - -# ----------------------------------------------------------------------------------------------------------------------- -# STUB: -# ----------------------------------------------------------------------------------------------------------------------- - collections = _collections copy = _copy functools = _functools @@ -117,6 +90,7 @@ multiprocessing_dummy = _multiprocessing_dummy copyreg = _copyreg asyncio = _asyncio +async_generator = _async_generator pickle = _pickle if sys.version_info >= (2, 7): OrderedDict = collections.OrderedDict @@ -137,62 +111,62 @@ tee_type: _t.Any = ... reiterables: _t.Any = ... fmappables: _t.Any = ... -Ellipsis = Ellipsis -NotImplemented = NotImplemented -NotImplementedError = NotImplementedError -Exception = Exception -AttributeError = AttributeError -ImportError = ImportError -IndexError = IndexError -KeyError = KeyError -NameError = NameError -TypeError = TypeError -ValueError = ValueError -StopIteration = StopIteration -RuntimeError = RuntimeError -callable = callable -classmethod = classmethod -complex = complex -all = all -any = any -bool = bool -bytes = bytes -dict = dict -enumerate = enumerate -filter = filter -float = float -frozenset = frozenset -getattr = getattr -hasattr = hasattr -hash = hash -id = id -int = int -isinstance = isinstance -issubclass = issubclass -iter = iter +Ellipsis = _builtins.Ellipsis +NotImplemented = _builtins.NotImplemented +NotImplementedError = _builtins.NotImplementedError +Exception = _builtins.Exception +AttributeError = _builtins.AttributeError +ImportError = _builtins.ImportError +IndexError = _builtins.IndexError +KeyError = _builtins.KeyError +NameError = _builtins.NameError +TypeError = _builtins.TypeError +ValueError = _builtins.ValueError +StopIteration = _builtins.StopIteration +RuntimeError = _builtins.RuntimeError +callable = _builtins.callable +classmethod = _builtins.classmethod +complex = _builtins.complex +all = _builtins.all +any = _builtins.any +bool = _builtins.bool +bytes = _builtins.bytes +dict = _builtins.dict +enumerate = _builtins.enumerate +filter = _builtins.filter +float = _builtins.float +frozenset = _builtins.frozenset +getattr = _builtins.getattr +hasattr = _builtins.hasattr +hash = _builtins.hash +id = _builtins.id +int = _builtins.int +isinstance = _builtins.isinstance +issubclass = _builtins.issubclass +iter = _builtins.iter len: _t.Callable[..., int] = ... # pattern-matching needs an untyped _coconut.len to avoid type errors -list = list -locals = locals -globals = globals -map = map -min = min -max = max -next = next -object = object -print = print -property = property -range = range -reversed = reversed -set = set -setattr = setattr -slice = slice -str = str -sum = sum -super = super -tuple = tuple -type = type -zip = zip -vars = vars -repr = repr +list = _builtins.list +locals = _builtins.locals +globals = _builtins.globals +map = _builtins.map +min = _builtins.min +max = _builtins.max +next = _builtins.next +object = _builtins.object +print = _builtins.print +property = _builtins.property +range = _builtins.range +reversed = _builtins.reversed +set = _builtins.set +setattr = _builtins.setattr +slice = _builtins.slice +str = _builtins.str +sum = _builtins.sum +super = _builtins.super +tuple = _builtins.tuple +type = _builtins.type +zip = _builtins.zip +vars = _builtins.vars +repr = _builtins.repr if sys.version_info >= (3,): - bytearray = bytearray + bytearray = _builtins.bytearray diff --git a/coconut/api.py b/coconut/api.py new file mode 100644 index 000000000..0e1d42d6e --- /dev/null +++ b/coconut/api.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# ----------------------------------------------------------------------------------------------------------------------- +# INFO: +# ----------------------------------------------------------------------------------------------------------------------- + +""" +Author: Evan Hubinger +License: Apache 2.0 +Description: Coconut's main external API. +""" + +# ----------------------------------------------------------------------------------------------------------------------- +# IMPORTS: +# ----------------------------------------------------------------------------------------------------------------------- + +from __future__ import print_function, absolute_import, unicode_literals, division + +from coconut.root import * # NOQA + +import sys +import os.path +import codecs +try: + from encodings import utf_8 +except ImportError: + utf_8 = None + +from coconut.integrations import embed +from coconut.exceptions import CoconutException +from coconut.command import Command +from coconut.command.cli import cli_version +from coconut.compiler import Compiler +from coconut.constants import ( + version_tag, + code_exts, + coconut_import_hook_args, + coconut_kernel_kwargs, +) + +# ----------------------------------------------------------------------------------------------------------------------- +# COMMAND: +# ----------------------------------------------------------------------------------------------------------------------- + +GLOBAL_STATE = None + + +def get_state(state=None): + """Get a Coconut state object; None gets a new state, False gets the global state.""" + global GLOBAL_STATE + if state is None: + return Command() + elif state is False: + if GLOBAL_STATE is None: + GLOBAL_STATE = Command() + return GLOBAL_STATE + else: + return state + + +def cmd(cmd_args, interact=False, state=False, **kwargs): + """Process command-line arguments.""" + if isinstance(cmd_args, (str, bytes)): + cmd_args = cmd_args.split() + return get_state(state).cmd(cmd_args, interact=interact, **kwargs) + + +VERSIONS = { + "num": VERSION, + "name": VERSION_NAME, + "spec": VERSION_STR, + "tag": version_tag, + "-v": cli_version, +} + + +def version(which="num"): + """Get the Coconut version.""" + if which in VERSIONS: + return VERSIONS[which] + else: + raise CoconutException( + "invalid version type " + repr(which), + extra="valid versions are " + ", ".join(VERSIONS), + ) + + +# ----------------------------------------------------------------------------------------------------------------------- +# COMPILER: +# ----------------------------------------------------------------------------------------------------------------------- + +def setup(*args, **kwargs): + """Set up the given state object.""" + state = kwargs.pop("state", False) + return get_state(state).setup(*args, **kwargs) + + +PARSERS = { + "sys": lambda comp: comp.parse_sys, + "exec": lambda comp: comp.parse_exec, + "file": lambda comp: comp.parse_file, + "package": lambda comp: comp.parse_package, + "block": lambda comp: comp.parse_block, + "single": lambda comp: comp.parse_single, + "eval": lambda comp: comp.parse_eval, + "lenient": lambda comp: comp.parse_lenient, + "xonsh": lambda comp: comp.parse_xonsh, +} + +# deprecated aliases +PARSERS["any"] = PARSERS["debug"] = PARSERS["lenient"] + + +def parse(code="", mode="sys", state=False, keep_internal_state=None): + """Compile Coconut code.""" + if keep_internal_state is None: + keep_internal_state = bool(state) + command = get_state(state) + if command.comp is None: + command.setup() + if mode not in PARSERS: + raise CoconutException( + "invalid parse mode " + repr(mode), + extra="valid modes are " + ", ".join(PARSERS), + ) + return PARSERS[mode](command.comp)(code, keep_state=keep_internal_state) + + +def coconut_eval(expression, globals=None, locals=None, state=False, **kwargs): + """Compile and evaluate Coconut code.""" + command = get_state(state) + if command.comp is None: + setup() + command.check_runner(set_sys_vars=False) + if globals is None: + globals = {} + command.runner.update_vars(globals) + compiled_python = parse(expression, "eval", state, **kwargs) + return eval(compiled_python, globals, locals) + + +# ----------------------------------------------------------------------------------------------------------------------- +# BREAKPOINT: +# ----------------------------------------------------------------------------------------------------------------------- + + +def _coconut_breakpoint(): + """Determine coconut.embed depth based on whether we're being + called by Coconut's breakpoint() or Python's breakpoint().""" + if sys.version_info >= (3, 7): + return embed(depth=1) + else: + return embed(depth=2) + + +def use_coconut_breakpoint(on=True): + """Switches the breakpoint() built-in (universally accessible via + coconut.__coconut__.breakpoint) to use coconut.embed.""" + if on: + sys.breakpointhook = _coconut_breakpoint + else: + sys.breakpointhook = sys.__breakpointhook__ + + +use_coconut_breakpoint() + + +# ----------------------------------------------------------------------------------------------------------------------- +# AUTOMATIC COMPILATION: +# ----------------------------------------------------------------------------------------------------------------------- + + +class CoconutImporter(object): + """Finder and loader for compiling Coconut files at import time.""" + ext = code_exts[0] + command = None + + def run_compiler(self, path): + """Run the Coconut compiler on the given path.""" + if self.command is None: + self.command = Command() + self.command.cmd([path] + list(coconut_import_hook_args)) + + def find_module(self, fullname, path=None): + """Searches for a Coconut file of the given name and compiles it.""" + basepaths = [""] + list(sys.path) + if fullname.startswith("."): + if path is None: + # we can't do a relative import if there's no package path + return + fullname = fullname[1:] + basepaths.insert(0, path) + fullpath = os.path.join(*fullname.split(".")) + for head in basepaths: + path = os.path.join(head, fullpath) + filepath = path + self.ext + dirpath = os.path.join(path, "__init__" + self.ext) + if os.path.exists(filepath): + self.run_compiler(filepath) + # Coconut file was found and compiled, now let Python import it + return + if os.path.exists(dirpath): + self.run_compiler(path) + # Coconut package was found and compiled, now let Python import it + return + + +coconut_importer = CoconutImporter() + + +def auto_compilation(on=True): + """Turn automatic compilation of Coconut files on or off.""" + if on: + if coconut_importer not in sys.meta_path: + sys.meta_path.insert(0, coconut_importer) + else: + try: + sys.meta_path.remove(coconut_importer) + except ValueError: + pass + + +auto_compilation() + + +# ----------------------------------------------------------------------------------------------------------------------- +# ENCODING: +# ----------------------------------------------------------------------------------------------------------------------- + + +if utf_8 is not None: + class CoconutStreamReader(utf_8.StreamReader, object): + """Compile Coconut code from a stream of UTF-8.""" + coconut_compiler = None + + @classmethod + def compile_coconut(cls, source): + """Compile the given Coconut source text.""" + if cls.coconut_compiler is None: + cls.coconut_compiler = Compiler(**coconut_kernel_kwargs) + return cls.coconut_compiler.parse_sys(source) + + @classmethod + def decode(cls, input_bytes, errors="strict"): + """Decode and compile the given Coconut source bytes.""" + input_str, len_consumed = super(CoconutStreamReader, cls).decode(input_bytes, errors) + return cls.compile_coconut(input_str), len_consumed + + class CoconutIncrementalDecoder(utf_8.IncrementalDecoder, object): + """Compile Coconut at the end of incrementally decoding UTF-8.""" + invertible = False + _buffer_decode = CoconutStreamReader.decode + + +def get_coconut_encoding(encoding="coconut"): + """Get a CodecInfo for the given Coconut encoding.""" + if not encoding.startswith("coconut"): + return None + if encoding != "coconut": + raise CoconutException("unknown Coconut encoding: " + repr(encoding)) + if utf_8 is None: + raise CoconutException("coconut encoding requires encodings.utf_8") + return codecs.CodecInfo( + name=encoding, + encode=utf_8.encode, + decode=CoconutStreamReader.decode, + incrementalencoder=utf_8.IncrementalEncoder, + incrementaldecoder=CoconutIncrementalDecoder, + streamreader=CoconutStreamReader, + streamwriter=utf_8.StreamWriter, + ) + + +codecs.register(get_coconut_encoding) diff --git a/coconut/api.pyi b/coconut/api.pyi new file mode 100644 index 000000000..b2845d394 --- /dev/null +++ b/coconut/api.pyi @@ -0,0 +1,108 @@ +#----------------------------------------------------------------------------------------------------------------------- +# INFO: +#----------------------------------------------------------------------------------------------------------------------- + +""" +Author: Evan Hubinger +License: Apache 2.0 +Description: MyPy stub file for api.py. +""" + +#----------------------------------------------------------------------------------------------------------------------- +# IMPORTS: +#----------------------------------------------------------------------------------------------------------------------- + +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Text, + Union, +) + +from coconut.command.command import Command + +class CoconutException(Exception): + ... + +#----------------------------------------------------------------------------------------------------------------------- +# COMMAND: +#----------------------------------------------------------------------------------------------------------------------- + +GLOBAL_STATE: Optional[Command] = None + + +def get_state(state: Optional[Command]=None) -> Command: ... + + +def cmd(args: Union[Text, bytes, Iterable], interact: bool=False) -> None: ... + + +VERSIONS: Dict[Text, Text] = ... + + +def version(which: Optional[Text]=None) -> Text: ... + + +#----------------------------------------------------------------------------------------------------------------------- +# COMPILER: +#----------------------------------------------------------------------------------------------------------------------- + + +def setup( + target: Optional[str]=None, + strict: bool=False, + minify: bool=False, + line_numbers: bool=False, + keep_lines: bool=False, + no_tco: bool=False, + no_wrap: bool=False, +) -> None: ... + + +PARSERS: Dict[Text, Callable] = ... + + +def parse( + code: Text, + mode: Text=..., + state: Optional[Command]=..., + keep_internal_state: Optional[bool]=None, +) -> Text: ... + + +def coconut_eval( + expression: Text, + globals: Optional[Dict[Text, Any]]=None, + locals: Optional[Dict[Text, Any]]=None, + state: Optional[Command]=..., + keep_internal_state: Optional[bool]=None, +) -> Any: ... + + +# ----------------------------------------------------------------------------------------------------------------------- +# ENABLERS: +# ----------------------------------------------------------------------------------------------------------------------- + + +def use_coconut_breakpoint(on: bool=True) -> None: ... + + +class CoconutImporter: + ext: str + + @staticmethod + def run_compiler(path: str) -> None: ... + + def find_module(self, fullname: str, path: Optional[str]=None) -> None: ... + + +coconut_importer = CoconutImporter() + + +def auto_compilation(on: bool=True) -> None: ... + + +def get_coconut_encoding(encoding: str=...) -> Any: ... diff --git a/coconut/command/cli.py b/coconut/command/cli.py index 73af5fde9..62e9b8050 100644 --- a/coconut/command/cli.py +++ b/coconut/command/cli.py @@ -269,7 +269,7 @@ arguments.add_argument( "--site-install", "--siteinstall", action="store_true", - help="set up coconut.convenience to be imported on Python start", + help="set up coconut.api to be imported on Python start", ) arguments.add_argument( diff --git a/coconut/command/resources/zcoconut.pth b/coconut/command/resources/zcoconut.pth index 8ca5c334e..56fab7383 100644 --- a/coconut/command/resources/zcoconut.pth +++ b/coconut/command/resources/zcoconut.pth @@ -1 +1 @@ -import coconut.convenience +import coconut.api diff --git a/coconut/command/util.py b/coconut/command/util.py index 85fdaa404..11ebce971 100644 --- a/coconut/command/util.py +++ b/coconut/command/util.py @@ -47,6 +47,7 @@ get_encoding, get_clock_time, memoize, + assert_remove_prefix, ) from coconut.constants import ( WINDOWS, @@ -173,7 +174,7 @@ def showpath(path): else: path = os.path.relpath(path) if path.startswith(os.curdir + os.sep): - path = path[len(os.curdir + os.sep):] + path = assert_remove_prefix(path, os.curdir + os.sep) return path @@ -423,13 +424,13 @@ def subpath(path, base_path): def invert_mypy_arg(arg): """Convert --arg into --no-arg or equivalent.""" if arg.startswith("--no-"): - return "--" + arg[len("--no-"):] + return "--" + assert_remove_prefix(arg, "--no-") elif arg.startswith("--allow-"): - return "--disallow-" + arg[len("--allow-"):] + return "--disallow-" + assert_remove_prefix(arg, "--allow-") elif arg.startswith("--disallow-"): - return "--allow-" + arg[len("--disallow-"):] + return "--allow-" + assert_remove_prefix(arg, "--disallow-") elif arg.startswith("--"): - return "--no-" + arg[len("--"):] + return "--no-" + assert_remove_prefix(arg, "--") else: return None @@ -552,7 +553,7 @@ class Runner(object): def __init__(self, comp=None, exit=sys.exit, store=False, path=None): """Create the executor.""" - from coconut.convenience import auto_compilation, use_coconut_breakpoint + from coconut.api import auto_compilation, use_coconut_breakpoint auto_compilation(on=interpreter_uses_auto_compilation) use_coconut_breakpoint(on=interpreter_uses_coconut_breakpoint) self.exit = exit diff --git a/coconut/compiler/compiler.py b/coconut/compiler/compiler.py index 9a7ba1bd6..bc5a800b8 100644 --- a/coconut/compiler/compiler.py +++ b/coconut/compiler/compiler.py @@ -87,6 +87,7 @@ all_builtins, in_place_op_funcs, match_first_arg_var, + import_existing, ) from coconut.util import ( pickleable_obj, @@ -97,6 +98,7 @@ get_target_info, get_clock_time, get_name, + assert_remove_prefix, ) from coconut.exceptions import ( CoconutException, @@ -195,8 +197,27 @@ def set_to_tuple(tokens): raise CoconutInternalException("invalid set maker item", tokens[0]) -def import_stmt(imp_from, imp, imp_as): +def import_stmt(imp_from, imp, imp_as, raw=False): """Generate an import statement.""" + if not raw: + module_path = (imp if imp_from is None else imp_from).split(".", 1) + existing_imp = import_existing.get(module_path[0]) + if existing_imp is not None: + return handle_indentation( + """ +if _coconut.typing.TYPE_CHECKING: + {raw_import} +else: + try: + {imp_name} = {imp_lookup} + except _coconut.AttributeError as _coconut_imp_err: + raise _coconut.ImportError(_coconut.str(_coconut_imp_err)) + """, + ).format( + raw_import=import_stmt(imp_from, imp, imp_as, raw=True), + imp_name=imp_as if imp_as is not None else imp, + imp_lookup=".".join([existing_imp] + module_path[1:] + ([imp] if imp_from is not None else [])), + ) return ( ("from " + imp_from + " " if imp_from is not None else "") + "import " + imp @@ -452,7 +473,7 @@ def __init__(self, *args, **kwargs): """Creates a new compiler with the given parsing parameters.""" self.setup(*args, **kwargs) - # changes here should be reflected in __reduce__ and in the stub for coconut.convenience.setup + # changes here should be reflected in __reduce__ and in the stub for coconut.api.setup def setup(self, target=None, strict=False, minify=False, line_numbers=False, keep_lines=False, no_tco=False, no_wrap=False): """Initializes parsing parameters.""" if target is None: @@ -726,6 +747,7 @@ def bind(cls): cls.new_testlist_star_expr <<= trace_attach(cls.new_testlist_star_expr_ref, cls.method("new_testlist_star_expr_handle")) cls.anon_namedtuple <<= trace_attach(cls.anon_namedtuple_ref, cls.method("anon_namedtuple_handle")) cls.base_match_for_stmt <<= trace_attach(cls.base_match_for_stmt_ref, cls.method("base_match_for_stmt_handle")) + cls.async_with_for_stmt <<= trace_attach(cls.async_with_for_stmt_ref, cls.method("async_with_for_stmt_handle")) cls.unsafe_typedef_tuple <<= trace_attach(cls.unsafe_typedef_tuple_ref, cls.method("unsafe_typedef_tuple_handle")) cls.funcname_typeparams <<= trace_attach(cls.funcname_typeparams_ref, cls.method("funcname_typeparams_handle")) cls.impl_call <<= trace_attach(cls.impl_call_ref, cls.method("impl_call_handle")) @@ -1794,6 +1816,8 @@ def transform_returns(self, original, loc, raw_lines, tre_return_grammar=None, i and (not is_gen or self.target_info >= (3, 3)) # don't transform async returns if they're supported and (not is_async or self.target_info >= (3, 5)) + # don't transform async generators if they're supported + and (not (is_gen and is_async) or self.target_info >= (3, 6)) ): func_code = "".join(raw_lines) return func_code, tco, tre @@ -1832,9 +1856,18 @@ def transform_returns(self, original, loc, raw_lines, tre_return_grammar=None, i # attempt tco/tre/async universalization if disabled_until_level is None: + # disallow yield from in async generators + if is_async and is_gen and self.yield_from_regex.search(base): + raise self.make_err( + CoconutSyntaxError, + "yield from not allowed in async generators", + original, + loc, + ) + # handle generator/async returns if not normal_func and self.return_regex.match(base): - to_return = base[len("return"):].strip() + to_return = assert_remove_prefix(base, "return").strip() if to_return: to_return = "(" + to_return + ")" # only use trollius Return when trollius is imported @@ -1853,6 +1886,20 @@ def transform_returns(self, original, loc, raw_lines, tre_return_grammar=None, i ) line = indent + "raise " + ret_err + "(" + to_return + ")" + comment + dedent + # handle async generator yields + if is_async and is_gen and self.target_info < (3, 6): + if self.yield_regex.match(base): + to_yield = assert_remove_prefix(base, "yield").strip() + line = indent + "await _coconut.async_generator.yield_(" + to_yield + ")" + comment + dedent + elif self.yield_regex.search(base): + raise self.make_err( + CoconutTargetError, + "found Python 3.6 async generator yield in non-statement position (Coconut only backports async generator yields to 3.5 if they are at the start of the line)", + original, + loc, + target="36", + ) + # TRE tre_base = None if attempt_tre: @@ -1894,10 +1941,10 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, done = False while not done: if def_stmt.startswith("addpattern "): - def_stmt = def_stmt[len("addpattern "):] + def_stmt = assert_remove_prefix(def_stmt, "addpattern ") addpattern = True elif def_stmt.startswith("copyclosure "): - def_stmt = def_stmt[len("copyclosure "):] + def_stmt = assert_remove_prefix(def_stmt, "copyclosure ") copyclosure = True elif def_stmt.startswith("def"): done = True @@ -2004,15 +2051,17 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, original, loc, target="sys", ) - elif is_gen and self.target_info < (3, 6): + elif self.target_info >= (3, 5): + if is_gen and self.target_info < (3, 6): + decorators += "@_coconut.async_generator.async_generator\n" + def_stmt = "async " + def_stmt + elif is_gen: raise self.make_err( CoconutTargetError, - "found Python 3.6 async generator", + "found Python 3.6 async generator (Coconut can only backport async generators as far back as 3.5)", original, loc, - target="36", + target="35", ) - elif self.target_info >= (3, 5): - def_stmt = "async " + def_stmt else: decorators += "@_coconut.asyncio.coroutine\n" @@ -2235,7 +2284,7 @@ def deferred_code_proc(self, inputstring, add_code_at_start=False, ignore_names= # look for functions if line.startswith(funcwrapper): - func_id = int(line[len(funcwrapper):]) + func_id = int(assert_remove_prefix(line, funcwrapper)) original, loc, decorators, funcdef, is_async, in_method, is_stmt_lambda = self.get_ref("func", func_id) # process inner code @@ -3072,9 +3121,7 @@ def single_import(self, path, imp_as, type_ignore=False): imp_from += imp.rsplit("." + imp_as, 1)[0] imp, imp_as = imp_as, None - if imp_from is None and imp == "sys": - out.append((imp_as if imp_as is not None else imp) + " = _coconut_sys") - elif imp_as is not None and "." in imp_as: + if imp_as is not None and "." in imp_as: import_as_var = self.get_temp_var("import") out.append(import_stmt(imp_from, imp, import_as_var)) fake_mods = imp_as.split(".") @@ -3084,10 +3131,10 @@ def single_import(self, path, imp_as, type_ignore=False): "try:", openindent + mod_name, closeindent + "except:", - openindent + mod_name + ' = _coconut.types.ModuleType("' + mod_name + '")', + openindent + mod_name + ' = _coconut.types.ModuleType(_coconut_py_str("' + mod_name + '"))', closeindent + "else:", openindent + "if not _coconut.isinstance(" + mod_name + ", _coconut.types.ModuleType):", - openindent + mod_name + ' = _coconut.types.ModuleType("' + mod_name + '")' + closeindent * 2, + openindent + mod_name + ' = _coconut.types.ModuleType(_coconut_py_str("' + mod_name + '"))' + closeindent * 2, )) out.append(".".join(fake_mods) + " = " + import_as_var) else: @@ -3375,7 +3422,12 @@ def set_letter_literal_handle(self, tokens): def stmt_lambdef_handle(self, original, loc, tokens): """Process multi-line lambdef statements.""" - got_kwds, params, stmts_toks = tokens + got_kwds, params, stmts_toks, followed_by = tokens + + if followed_by == ",": + self.strict_err_or_warn("found statement lambda followed by comma; this isn't recommended as it can be unclear whether the comma is inside or outside the lambda (just wrap the lambda in parentheses)", original, loc) + else: + internal_assert(followed_by == "", "invalid stmt_lambdef followed_by", followed_by) is_async = False add_kwds = [] @@ -3557,32 +3609,22 @@ def funcname_typeparams_handle(self, tokens): def type_param_handle(self, original, loc, tokens): """Compile a type param into an assignment.""" - bounds = "" - kwargs = "" + args = "" + bound_op = None + bound_op_type = "" if "TypeVar" in tokens: TypeVarFunc = "TypeVar" + bound_op_type = "bound" if len(tokens) == 2: name_loc, name = tokens else: name_loc, name, bound_op, bound = tokens - if bound_op == "<=": - self.strict_err_or_warn( - "use of " + repr(bound_op) + " as a type parameter bound declaration operator is deprecated (Coconut style is to use '<:' operator)", - original, - loc, - ) - elif bound_op == ":": - self.strict_err( - "found use of " + repr(bound_op) + " as a type parameter bound declaration operator (Coconut style is to use '<:' operator)", - original, - loc, - ) - else: - self.internal_assert(bound_op == "<:", original, loc, "invalid type_param bound_op", bound_op) - bounds = ", bound=" + self.wrap_typedef(bound, for_py_typedef=False) - # uncomment this line whenever mypy adds support for infer_variance in TypeVar - # (and remove the warning about it in the DOCS) - # kwargs = ", infer_variance=True" + args = ", bound=" + self.wrap_typedef(bound, for_py_typedef=False) + elif "TypeVar constraint" in tokens: + TypeVarFunc = "TypeVar" + bound_op_type = "constraint" + name_loc, name, bound_op, constraints = tokens + args = ", " + ", ".join(self.wrap_typedef(c, for_py_typedef=False) for c in constraints) elif "TypeVarTuple" in tokens: TypeVarFunc = "TypeVarTuple" name_loc, name = tokens @@ -3592,6 +3634,27 @@ def type_param_handle(self, original, loc, tokens): else: raise CoconutInternalException("invalid type_param tokens", tokens) + kwargs = "" + if bound_op is not None: + self.internal_assert(bound_op_type in ("bound", "constraint"), original, loc, "invalid type_param bound_op", bound_op) + # # uncomment this line whenever mypy adds support for infer_variance in TypeVar + # # (and remove the warning about it in the DOCS) + # kwargs = ", infer_variance=True" + if bound_op == "<=": + self.strict_err_or_warn( + "use of " + repr(bound_op) + " as a type parameter " + bound_op_type + " declaration operator is deprecated (Coconut style is to use '<:' for bounds and ':' for constaints)", + original, + loc, + ) + else: + self.internal_assert(bound_op in (":", "<:"), original, loc, "invalid type_param bound_op", bound_op) + if bound_op_type == "bound" and bound_op != "<:" or bound_op_type == "constraint" and bound_op != ":": + self.strict_err( + "found use of " + repr(bound_op) + " as a type parameter " + bound_op_type + " declaration operator (Coconut style is to use '<:' for bounds and ':' for constaints)", + original, + loc, + ) + name_loc = int(name_loc) internal_assert(name_loc == loc if TypeVarFunc == "TypeVar" else name_loc >= loc, "invalid name location for " + TypeVarFunc, (name_loc, loc, tokens)) @@ -3609,10 +3672,10 @@ def type_param_handle(self, original, loc, tokens): typevar_info["typevar_locs"][name] = name_loc name = temp_name - return '{name} = _coconut.typing.{TypeVarFunc}("{name}"{bounds}{kwargs})\n'.format( + return '{name} = _coconut.typing.{TypeVarFunc}("{name}"{args}{kwargs})\n'.format( name=name, TypeVarFunc=TypeVarFunc, - bounds=bounds, + args=args, kwargs=kwargs, ) @@ -3655,11 +3718,14 @@ def type_alias_stmt_handle(self, tokens): paramdefs = () else: name, paramdefs, typedef = tokens - return "".join(paramdefs) + self.typed_assign_stmt_handle([ - name, - "_coconut.typing.TypeAlias", - self.wrap_typedef(typedef, for_py_typedef=False), - ]) + if self.target_info >= (3, 12): + return "type " + name + " = " + self.wrap_typedef(typedef, for_py_typedef=True) + else: + return "".join(paramdefs) + self.typed_assign_stmt_handle([ + name, + "_coconut.typing.TypeAlias", + self.wrap_typedef(typedef, for_py_typedef=False), + ]) def with_stmt_handle(self, tokens): """Process with statements.""" @@ -3979,6 +4045,51 @@ def base_match_for_stmt_handle(self, original, loc, tokens): body=body, ) + def async_with_for_stmt_handle(self, original, loc, tokens): + """Handle async with for loops.""" + if self.target_info < (3, 5): + raise self.make_err(CoconutTargetError, "async with for statements require Python 3.5+", original, loc, target="35") + + inner_toks, = tokens + + if "match" in inner_toks: + is_match = True + else: + internal_assert("normal" in inner_toks, "invalid async_with_for_stmt inner_toks", inner_toks) + is_match = False + + loop_vars, iter_item, body = inner_toks + temp_var = self.get_temp_var("async_with_for") + + if is_match: + loop = "async " + self.base_match_for_stmt_handle( + original, + loc, + [loop_vars, temp_var, body], + ) + else: + loop = handle_indentation( + """ +async for {loop_vars} in {temp_var}: +{body} + """, + ).format( + loop_vars=loop_vars, + temp_var=temp_var, + body=body, + ) + + return handle_indentation( + """ +async with {iter_item} as {temp_var}: + {loop} + """, + ).format( + iter_item=iter_item, + temp_var=temp_var, + loop=loop + ) + def string_atom_handle(self, tokens): """Handle concatenation of string literals.""" internal_assert(len(tokens) >= 1, "invalid string literal tokens", tokens) diff --git a/coconut/compiler/grammar.py b/coconut/compiler/grammar.py index 5099a2de5..e5943da66 100644 --- a/coconut/compiler/grammar.py +++ b/coconut/compiler/grammar.py @@ -55,6 +55,7 @@ memoize, get_clock_time, keydefaultdict, + assert_remove_prefix, ) from coconut.exceptions import ( CoconutInternalException, @@ -597,10 +598,10 @@ def typedef_op_item_handle(loc, tokens): op_name, = tokens op_name = op_name.strip("_") if op_name.startswith("coconut"): - op_name = op_name[len("coconut"):] + op_name = assert_remove_prefix(op_name, "coconut") op_name = op_name.lstrip("._") if op_name.startswith("operator."): - op_name = op_name[len("operator."):] + op_name = assert_remove_prefix(op_name, "operator.") proto = op_func_protocols.get(op_name) if proto is None: @@ -707,9 +708,9 @@ class Grammar(object): | invalid_syntax("") + ~Literal("|*") + Literal("|") | fixto(Literal("\u2228") | Literal("\u222a"), "|") + amp = ~amp_colon + Literal("&") | fixto(Literal("\u2229"), "&") + caret = Literal("^") + unsafe_bar = ~Literal("|>") + ~Literal("|*") + Literal("|") | fixto(Literal("\u222a"), "|") bar = ~rbanana + unsafe_bar | invalid_syntax("\xa6", "invalid broken bar character", greedy=True) percent = Literal("%") dollar = Literal("$") @@ -801,7 +802,7 @@ class Grammar(object): imag_j = caseless_literal("j") | fixto(caseless_literal("i", suppress=True), "j") basenum = combine( integer + dot + Optional(integer) - | Optional(integer) + dot + integer, + | Optional(integer) + dot + integer ) | integer sci_e = combine(caseless_literal("e") + Optional(plus | neg_minus)) numitem = ~(Literal("0") + Word(nums + "_", exact=1)) + combine(basenum + Optional(sci_e + integer)) @@ -965,13 +966,11 @@ class Grammar(object): ) + rbrace.suppress() dict_literal_ref = ( lbrace.suppress() - + Optional( - tokenlist( - Group(test + colon + test) - | dubstar_expr, - comma, - ), - ) + + Optional(tokenlist( + Group(test + colon + test) + | dubstar_expr, + comma, + )) + rbrace.suppress() ) test_expr = yield_expr | testlist_star_expr @@ -1054,7 +1053,7 @@ class Grammar(object): op_item = trace( typedef_op_item | partial_op_item - | base_op_item, + | base_op_item ) partial_op_atom_tokens = lparen.suppress() + partial_op_item_tokens + rparen.suppress() @@ -1093,10 +1092,10 @@ class Grammar(object): (star | dubstar) + tfpdef | star_sep_arg | slash_sep_arg - | tfpdef_default, - ), - ), - ), + | tfpdef_default + ) + ) + ) ) parameters = condense(lparen + args_list + rparen) set_args_list = trace( @@ -1108,10 +1107,10 @@ class Grammar(object): (star | dubstar) + setname + setarg_comma | star_sep_setarg | slash_sep_setarg - | setname + Optional(default) + setarg_comma, - ), - ), - ), + | setname + Optional(default) + setarg_comma + ) + ) + ) ) match_args_list = trace( Group( @@ -1121,12 +1120,12 @@ class Grammar(object): (star | dubstar) + match | star # not star_sep because pattern-matching can handle star separators on any Python version | slash # not slash_sep as above - | match + Optional(equals.suppress() + test), + | match + Optional(equals.suppress() + test) ), comma, - ), - ), - ), + ) + ) + ) ) call_item = ( @@ -1149,10 +1148,10 @@ class Grammar(object): Group( questionmark | unsafe_name + condense(equals + questionmark) - | call_item, + | call_item ), comma, - ), + ) ) methodcaller_args = ( itemlist(condense(call_item), comma) @@ -1165,7 +1164,7 @@ class Grammar(object): sliceop = condense(unsafe_colon + slicetest) subscript = condense( slicetest + sliceop + Optional(sliceop) - | Optional(subscript_star) + test, + | Optional(subscript_star) + test ) subscriptlist = itemlist(subscript, comma, suppress_trailing=False) | new_namedexpr_test @@ -1183,7 +1182,7 @@ class Grammar(object): anon_namedtuple_ref = tokenlist( Group( unsafe_name + maybe_typedef + equals.suppress() + test - | ellipsis_tokens + maybe_typedef + equals.suppress() + refname, + | ellipsis_tokens + maybe_typedef + equals.suppress() + refname ), comma, ) @@ -1205,7 +1204,7 @@ class Grammar(object): lparen.suppress() + typedef_tuple + rparen.suppress() - ), + ) ) list_expr = Forward() @@ -1215,7 +1214,7 @@ class Grammar(object): multisemicolon | attach(comprehension_expr, add_bracks_handle) | namedexpr_test + ~comma - | list_expr, + | list_expr ) + rbrack.suppress(), array_literal_handle, ) @@ -1244,7 +1243,7 @@ class Grammar(object): (new_namedexpr_test + FollowedBy(rbrace))("test") | (new_namedexpr_testlist_has_comma + FollowedBy(rbrace))("list") | addspace(new_namedexpr_test + comp_for + FollowedBy(rbrace))("comp") - | (testlist_star_namedexpr + FollowedBy(rbrace))("testlist_star_expr"), + | (testlist_star_namedexpr + FollowedBy(rbrace))("testlist_star_expr") ) set_literal_ref = lbrace.suppress() + setmaker + rbrace.suppress() set_letter_literal_ref = combine(set_letter + lbrace.suppress()) + Optional(setmaker) + rbrace.suppress() @@ -1263,7 +1262,7 @@ class Grammar(object): | set_letter_literal | lazy_list | typedef_ellipsis - | ellipsis, + | ellipsis ) atom = ( # known_atom must come before name to properly parse string prefixes @@ -1307,7 +1306,7 @@ class Grammar(object): trailer = simple_trailer | complex_trailer attrgetter_atom_tokens = dot.suppress() + unsafe_dotted_name + Optional( - lparen + Optional(methodcaller_args) + rparen.suppress(), + lparen + Optional(methodcaller_args) + rparen.suppress() ) attrgetter_atom = attach(attrgetter_atom_tokens, attrgetter_atom_handle) itemgetter_atom_tokens = dot.suppress() + OneOrMore(condense(Optional(dollar) + lbrack) + subscriptgrouplist + rbrack.suppress()) @@ -1344,7 +1343,7 @@ class Grammar(object): base_assign_item = condense( simple_assign | lparen + assignlist + rparen - | lbrack + assignlist + rbrack, + | lbrack + assignlist + rbrack ) star_assign_item_ref = condense(star + base_assign_item) assign_item = star_assign_item | base_assign_item @@ -1357,8 +1356,10 @@ class Grammar(object): type_param = Forward() type_param_bound_op = lt_colon | colon | le type_var_name = stores_loc_item + setname + type_param_constraint = lparen.suppress() + Group(tokenlist(typedef_test, comma, require_sep=True)) + rparen.suppress() type_param_ref = ( - (type_var_name + Optional(type_param_bound_op + typedef_test))("TypeVar") + (type_var_name + type_param_bound_op + type_param_constraint)("TypeVar constraint") + | (type_var_name + Optional(type_param_bound_op + typedef_test))("TypeVar") | (star.suppress() + type_var_name)("TypeVarTuple") | (dubstar.suppress() + type_var_name)("ParamSpec") ) @@ -1386,7 +1387,7 @@ class Grammar(object): disallow_keywords(reserved_vars) + ~any_string + atom_item - + Optional(power_in_impl_call), + + Optional(power_in_impl_call) ) impl_call = Forward() impl_call_ref = ( @@ -1397,7 +1398,7 @@ class Grammar(object): ZeroOrMore(unary) + ( impl_call | await_item + Optional(power) - ), + ) ) mulop = mul_star | div_slash | div_dubslash | percent | matrix_at @@ -1440,7 +1441,7 @@ class Grammar(object): infix_item = attach( Group(Optional(compose_expr)) + OneOrMore( - infix_op + Group(Optional(lambdef | compose_expr)), + infix_op + Group(Optional(lambdef | compose_expr)) ), infix_handle, ) @@ -1516,7 +1517,7 @@ class Grammar(object): partial_atom_tokens("partial"), partial_op_atom_tokens("op partial"), comp_pipe_expr("expr"), - ), + ) ) normal_pipe_expr = Forward() normal_pipe_expr_tokens = OneOrMore(pipe_item) + last_pipe_item @@ -1570,44 +1571,44 @@ class Grammar(object): | Group(ZeroOrMore(simple_stmt_item + semicolon.suppress())) + closing_stmt, ) general_stmt_lambdef = ( - Group( - any_len_perm( - keyword("async"), - keyword("copyclosure"), - ), - ) + keyword("def").suppress() + Group(any_len_perm( + keyword("async"), + keyword("copyclosure"), + )) + keyword("def").suppress() + stmt_lambdef_params + arrow.suppress() + stmt_lambdef_body ) match_stmt_lambdef = ( - Group( - any_len_perm( - keyword("match").suppress(), - keyword("async"), - keyword("copyclosure"), - ), - ) + keyword("def").suppress() + Group(any_len_perm( + keyword("match").suppress(), + keyword("async"), + keyword("copyclosure"), + )) + keyword("def").suppress() + stmt_lambdef_match_params + arrow.suppress() + stmt_lambdef_body ) - stmt_lambdef_ref = general_stmt_lambdef | match_stmt_lambdef + stmt_lambdef_ref = ( + general_stmt_lambdef + | match_stmt_lambdef + ) + ( + fixto(FollowedBy(comma), ",") + | fixto(always_match, "") + ) lambdef <<= addspace(lambdef_base + test) | stmt_lambdef lambdef_no_cond = trace(addspace(lambdef_base + test_no_cond)) typedef_callable_arg = Group( test("arg") - | (dubstar.suppress() + refname)("paramspec"), - ) - typedef_callable_params = Optional( - Group( - labeled_group(maybeparens(lparen, ellipsis_tokens, rparen), "ellipsis") - | lparen.suppress() + Optional(tokenlist(typedef_callable_arg, comma)) + rparen.suppress() - | labeled_group(negable_atom_item, "arg"), - ), + | (dubstar.suppress() + refname)("paramspec") ) + typedef_callable_params = Optional(Group( + labeled_group(maybeparens(lparen, ellipsis_tokens, rparen), "ellipsis") + | lparen.suppress() + Optional(tokenlist(typedef_callable_arg, comma)) + rparen.suppress() + | labeled_group(negable_atom_item, "arg") + )) unsafe_typedef_callable = attach( Optional(keyword("async"), default="") + typedef_callable_params @@ -1663,7 +1664,7 @@ class Grammar(object): setname + colon_eq + ( test + ~colon_eq | attach(namedexpr, add_parens_handle) - ), + ) ) namedexpr_test <<= ( test + ~colon_eq @@ -1744,26 +1745,26 @@ class Grammar(object): imp_as = keyword("as").suppress() - imp_name import_item = Group( unsafe_dotted_imp_name + imp_as - | dotted_imp_name, + | dotted_imp_name ) from_import_item = Group( unsafe_imp_name + imp_as - | imp_name, + | imp_name ) import_names = Group( maybeparens(lparen, tokenlist(import_item, comma), rparen) - | star, + | star ) from_import_names = Group( maybeparens(lparen, tokenlist(from_import_item, comma), rparen) - | star, + | star ) basic_import = keyword("import").suppress() - import_names import_from_name = condense( ZeroOrMore(unsafe_dot) + unsafe_dotted_name | OneOrMore(unsafe_dot) - | star, + | star ) from_import = ( keyword("from").suppress() @@ -1809,7 +1810,7 @@ class Grammar(object): | string_atom | complex_number | Optional(neg_minus) + number - | match_dotted_name_const, + | match_dotted_name_const ) empty_const = fixto( lparen + rparen @@ -1862,34 +1863,32 @@ class Grammar(object): | lparen.suppress() + matchlist_star + rparen.suppress() )("star") - base_match = trace( - Group( - (negable_atom_item + arrow.suppress() + match)("view") - | match_string - | match_const("const") - | (keyword_atom | keyword("is").suppress() + negable_atom_item)("is") - | (keyword("in").suppress() + negable_atom_item)("in") - | iter_match - | match_lazy("lazy") - | sequence_match - | star_match - | (lparen.suppress() + match + rparen.suppress())("paren") - | (lbrace.suppress() + matchlist_dict + Optional(dubstar.suppress() + (setname | condense(lbrace + rbrace)) + Optional(comma.suppress())) + rbrace.suppress())("dict") - | ( - Group(Optional(set_letter)) - + lbrace.suppress() - + ( - Group(tokenlist(match_const, comma, allow_trailing=False)) + Optional(comma.suppress() + set_star + Optional(comma.suppress())) - | Group(always_match) + set_star + Optional(comma.suppress()) - | Group(Optional(tokenlist(match_const, comma))) - ) + rbrace.suppress() - )("set") - | (keyword("data").suppress() + dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("data") - | (keyword("class").suppress() + dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("class") - | (dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("data_or_class") - | Optional(keyword("as").suppress()) + setname("var"), - ), - ) + base_match = trace(Group( + (negable_atom_item + arrow.suppress() + match)("view") + | match_string + | match_const("const") + | (keyword_atom | keyword("is").suppress() + negable_atom_item)("is") + | (keyword("in").suppress() + negable_atom_item)("in") + | iter_match + | match_lazy("lazy") + | sequence_match + | star_match + | (lparen.suppress() + match + rparen.suppress())("paren") + | (lbrace.suppress() + matchlist_dict + Optional(dubstar.suppress() + (setname | condense(lbrace + rbrace)) + Optional(comma.suppress())) + rbrace.suppress())("dict") + | ( + Group(Optional(set_letter)) + + lbrace.suppress() + + ( + Group(tokenlist(match_const, comma, allow_trailing=False)) + Optional(comma.suppress() + set_star + Optional(comma.suppress())) + | Group(always_match) + set_star + Optional(comma.suppress()) + | Group(Optional(tokenlist(match_const, comma))) + ) + rbrace.suppress() + )("set") + | (keyword("data").suppress() + dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("data") + | (keyword("class").suppress() + dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("class") + | (dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("data_or_class") + | Optional(keyword("as").suppress()) + setname("var") + )) matchlist_isinstance = base_match + OneOrMore(keyword("is").suppress() + negable_atom_item) isinstance_match = labeled_group(matchlist_isinstance, "isinstance_is") | base_match @@ -1937,29 +1936,25 @@ class Grammar(object): destructuring_stmt_ref, match_dotted_name_const_ref = disable_inside(base_destructuring_stmt, must_be_dotted_name + ~lparen) # both syntaxes here must be kept the same except for the keywords - case_match_co_syntax = trace( - Group( - (keyword("match") | keyword("case")).suppress() - + stores_loc_item - + many_match - + Optional(keyword("if").suppress() + namedexpr_test) - - full_suite, - ), - ) + case_match_co_syntax = trace(Group( + (keyword("match") | keyword("case")).suppress() + + stores_loc_item + + many_match + + Optional(keyword("if").suppress() + namedexpr_test) + - full_suite + )) cases_stmt_co_syntax = ( (keyword("cases") | keyword("case")) + testlist_star_namedexpr + colon.suppress() + newline.suppress() + indent.suppress() + Group(OneOrMore(case_match_co_syntax)) + dedent.suppress() + Optional(keyword("else").suppress() + suite) ) - case_match_py_syntax = trace( - Group( - keyword("case").suppress() - + stores_loc_item - + many_match - + Optional(keyword("if").suppress() + namedexpr_test) - - full_suite, - ), - ) + case_match_py_syntax = trace(Group( + keyword("case").suppress() + + stores_loc_item + + many_match + + Optional(keyword("if").suppress() + namedexpr_test) + - full_suite + )) cases_stmt_py_syntax = ( keyword("match") + testlist_star_namedexpr + colon.suppress() + newline.suppress() + indent.suppress() + Group(OneOrMore(case_match_py_syntax)) @@ -1973,26 +1968,34 @@ class Grammar(object): - ( lparen.suppress() + testlist + rparen.suppress() + end_simple_stmt_item | testlist - ), + ) ) if_stmt = condense( addspace(keyword("if") + condense(namedexpr_test + suite)) - ZeroOrMore(addspace(keyword("elif") - condense(namedexpr_test - suite))) - - Optional(else_stmt), + - Optional(else_stmt) ) while_stmt = addspace(keyword("while") - condense(namedexpr_test - suite - Optional(else_stmt))) for_stmt = addspace(keyword("for") + assignlist + keyword("in") - condense(new_testlist_star_expr - suite - Optional(else_stmt))) + suite_with_else_tokens = colon.suppress() + condense(nocolon_suite + Optional(else_stmt)) + base_match_for_stmt = Forward() - base_match_for_stmt_ref = keyword("for").suppress() + many_match + keyword("in").suppress() - new_testlist_star_expr - colon.suppress() - condense(nocolon_suite - Optional(else_stmt)) + base_match_for_stmt_ref = ( + keyword("for").suppress() + + many_match + + keyword("in").suppress() + - new_testlist_star_expr + - suite_with_else_tokens + ) match_for_stmt = Optional(keyword("match").suppress()) + base_match_for_stmt except_item = ( testlist_has_comma("list") | test("test") ) - Optional( - keyword("as").suppress() - setname, + keyword("as").suppress() - setname ) except_clause = attach(keyword("except") + except_item, except_handle) except_star_clause = Forward() @@ -2005,7 +2008,7 @@ class Grammar(object): | keyword("except") - suite | OneOrMore(except_star_clause - suite) ) - Optional(else_stmt) - Optional(keyword("finally") - suite) - ), + ) ) with_item = addspace(test + Optional(keyword("as") + base_assign_item)) @@ -2019,14 +2022,12 @@ class Grammar(object): op_tfpdef = unsafe_typedef_default | condense(setname + Optional(default)) op_funcdef_arg = setname | condense(lparen.suppress() + op_tfpdef + rparen.suppress()) op_funcdef_name = unsafe_backtick.suppress() + funcname_typeparams + unsafe_backtick.suppress() - op_funcdef = trace( - attach( - Group(Optional(op_funcdef_arg)) - + op_funcdef_name - + Group(Optional(op_funcdef_arg)), - op_funcdef_handle, - ), - ) + op_funcdef = trace(attach( + Group(Optional(op_funcdef_arg)) + + op_funcdef_name + + Group(Optional(op_funcdef_arg)), + op_funcdef_handle, + )) return_typedef = Forward() return_typedef_ref = arrow.suppress() + typedef_test @@ -2036,18 +2037,16 @@ class Grammar(object): name_match_funcdef = Forward() op_match_funcdef = Forward() - op_match_funcdef_arg = Group( - Optional( - Group( - ( - lparen.suppress() - + match - + Optional(equals.suppress() + test) - + rparen.suppress() - ) | interior_name_match, - ), - ), - ) + op_match_funcdef_arg = Group(Optional( + Group( + ( + lparen.suppress() + + match + + Optional(equals.suppress() + test) + + rparen.suppress() + ) | interior_name_match + ) + )) name_match_funcdef_ref = keyword("def").suppress() + funcname_typeparams + lparen.suppress() + match_args_list + match_guard + rparen.suppress() op_match_funcdef_ref = keyword("def").suppress() + op_match_funcdef_arg + op_funcdef_name + op_match_funcdef_arg + match_guard base_match_funcdef = trace(op_match_funcdef | name_match_funcdef) @@ -2061,21 +2060,17 @@ class Grammar(object): - dedent.suppress() ) ) - def_match_funcdef = trace( - attach( - base_match_funcdef - + end_func_colon - - func_suite, - join_match_funcdef, - ), - ) - match_def_modifiers = trace( - any_len_perm( - keyword("match").suppress(), - # addpattern is detected later - keyword("addpattern"), - ), - ) + def_match_funcdef = trace(attach( + base_match_funcdef + + end_func_colon + - func_suite, + join_match_funcdef, + )) + match_def_modifiers = trace(any_len_perm( + keyword("match").suppress(), + # addpattern is detected later + keyword("addpattern"), + )) match_funcdef = addspace(match_def_modifiers + def_match_funcdef) where_stmt = attach( @@ -2105,56 +2100,71 @@ class Grammar(object): | condense(newline - indent - math_funcdef_body - dedent) ) end_func_equals = return_typedef + equals.suppress() | fixto(equals, ":") - math_funcdef = trace( - attach( - condense(addspace(keyword("def") + base_funcdef) + end_func_equals) - math_funcdef_suite, - math_funcdef_handle, - ), - ) - math_match_funcdef = trace( - addspace( - match_def_modifiers - + attach( - base_match_funcdef - + end_func_equals - + ( - attach(implicit_return_stmt, make_suite_handle) - | ( - newline.suppress() - indent.suppress() - + Optional(docstring) - + attach(math_funcdef_body, make_suite_handle) - + dedent.suppress() - ) - ), - join_match_funcdef, + math_funcdef = trace(attach( + condense(addspace(keyword("def") + base_funcdef) + end_func_equals) - math_funcdef_suite, + math_funcdef_handle, + )) + math_match_funcdef = trace(addspace( + match_def_modifiers + + attach( + base_match_funcdef + + end_func_equals + + ( + attach(implicit_return_stmt, make_suite_handle) + | ( + newline.suppress() - indent.suppress() + + Optional(docstring) + + attach(math_funcdef_body, make_suite_handle) + + dedent.suppress() + ) ), - ), - ) + join_match_funcdef, + ) + )) async_stmt = Forward() + async_with_for_stmt = Forward() + async_with_for_stmt_ref = ( + labeled_group( + (keyword("async") + keyword("with") + keyword("for")).suppress() + + assignlist + keyword("in").suppress() + - test + - suite_with_else_tokens, + "normal", + ) + | labeled_group( + (any_len_perm( + keyword("match"), + required=(keyword("async"), keyword("with")), + ) + keyword("for")).suppress() + + many_match + keyword("in").suppress() + - test + - suite_with_else_tokens, + "match", + ) + ) async_stmt_ref = addspace( keyword("async") + (with_stmt | for_stmt | match_for_stmt) # handles async [match] for - | keyword("match").suppress() + keyword("async") + base_match_for_stmt, # handles match async for + | keyword("match").suppress() + keyword("async") + base_match_for_stmt # handles match async for + | async_with_for_stmt ) async_funcdef = keyword("async").suppress() + (funcdef | math_funcdef) - async_match_funcdef = trace( - addspace( - any_len_perm( - keyword("match").suppress(), - # addpattern is detected later - keyword("addpattern"), - required=(keyword("async").suppress(),), - ) + (def_match_funcdef | math_match_funcdef), - ), - ) + async_match_funcdef = trace(addspace( + any_len_perm( + keyword("match").suppress(), + # addpattern is detected later + keyword("addpattern"), + required=(keyword("async").suppress(),), + ) + (def_match_funcdef | math_match_funcdef), + )) async_keyword_normal_funcdef = Group( any_len_perm_at_least_one( keyword("yield"), keyword("copyclosure"), required=(keyword("async").suppress(),), - ), + ) ) + (funcdef | math_funcdef) async_keyword_match_funcdef = Group( any_len_perm_at_least_one( @@ -2164,7 +2174,7 @@ class Grammar(object): # addpattern is detected later keyword("addpattern"), required=(keyword("async").suppress(),), - ), + ) ) + (def_match_funcdef | math_match_funcdef) async_keyword_funcdef = Forward() async_keyword_funcdef_ref = async_keyword_normal_funcdef | async_keyword_match_funcdef @@ -2179,7 +2189,7 @@ class Grammar(object): any_len_perm_at_least_one( keyword("yield"), keyword("copyclosure"), - ), + ) ) + (funcdef | math_funcdef) keyword_match_funcdef = Group( any_len_perm_at_least_one( @@ -2188,7 +2198,7 @@ class Grammar(object): keyword("match").suppress(), # addpattern is detected later keyword("addpattern"), - ), + ) ) + (def_match_funcdef | math_match_funcdef) keyword_funcdef = Forward() keyword_funcdef_ref = keyword_normal_funcdef | keyword_match_funcdef @@ -2202,27 +2212,23 @@ class Grammar(object): ) datadef = Forward() - data_args = Group( - Optional( - lparen.suppress() + ZeroOrMore( - Group( - # everything here must end with arg_comma - (unsafe_name + arg_comma.suppress())("name") - | (unsafe_name + equals.suppress() + test + arg_comma.suppress())("default") - | (star.suppress() + unsafe_name + arg_comma.suppress())("star") - | (unsafe_name + colon.suppress() + typedef_test + equals.suppress() + test + arg_comma.suppress())("type default") - | (unsafe_name + colon.suppress() + typedef_test + arg_comma.suppress())("type"), - ), - ) + rparen.suppress(), - ), - ) + data_args = Group(Optional( + lparen.suppress() + ZeroOrMore(Group( + # everything here must end with arg_comma + (unsafe_name + arg_comma.suppress())("name") + | (unsafe_name + equals.suppress() + test + arg_comma.suppress())("default") + | (star.suppress() + unsafe_name + arg_comma.suppress())("star") + | (unsafe_name + colon.suppress() + typedef_test + equals.suppress() + test + arg_comma.suppress())("type default") + | (unsafe_name + colon.suppress() + typedef_test + arg_comma.suppress())("type") + )) + rparen.suppress() + )) data_inherit = Optional(keyword("from").suppress() + testlist) data_suite = Group( colon.suppress() - ( (newline.suppress() + indent.suppress() + Optional(docstring) + Group(OneOrMore(stmt)) - dedent.suppress())("complex") | (newline.suppress() + indent.suppress() + docstring - dedent.suppress() | docstring)("docstring") | simple_stmt("simple") - ) | newline("empty"), + ) | newline("empty") ) datadef_ref = ( Optional(decorators, default="") @@ -2236,7 +2242,7 @@ class Grammar(object): match_datadef = Forward() match_data_args = lparen.suppress() + Group( - match_args_list + match_guard, + match_args_list + match_guard ) + rparen.suppress() # we don't support type_params here since we don't support types match_datadef_ref = ( @@ -2255,8 +2261,8 @@ class Grammar(object): at.suppress() - Group( simple_decorator - | complex_decorator, - ), + | complex_decorator + ) ) decoratable_normal_funcdef_stmt = Forward() @@ -2276,7 +2282,7 @@ class Grammar(object): if_stmt | try_stmt | match_stmt - | passthrough_stmt, + | passthrough_stmt ) compound_stmt = trace( decoratable_class_stmt @@ -2287,7 +2293,7 @@ class Grammar(object): | async_stmt | match_for_stmt | simple_compound_stmt - | where_stmt, + | where_stmt ) endline_semicolon = Forward() endline_semicolon_ref = semicolon.suppress() + newline @@ -2298,7 +2304,7 @@ class Grammar(object): | pass_stmt | del_stmt | global_stmt - | nonlocal_stmt, + | nonlocal_stmt ) special_stmt = ( keyword_stmt @@ -2315,7 +2321,7 @@ class Grammar(object): simple_stmt <<= condense( simple_stmt_item + ZeroOrMore(fixto(semicolon, "\n") + simple_stmt_item) - + (newline | endline_semicolon), + + (newline | endline_semicolon) ) anything_stmt = Forward() stmt <<= final( @@ -2324,7 +2330,7 @@ class Grammar(object): # must be after destructuring due to ambiguity | cases_stmt # at the very end as a fallback case for the anything parser - | anything_stmt, + | anything_stmt ) base_suite <<= condense(newline + indent - OneOrMore(stmt) - dedent) simple_suite = attach(stmt, make_suite_handle) @@ -2348,7 +2354,8 @@ class Grammar(object): unsafe_anything_stmt = originalTextFor(regex_item("[^\n]+\n+")) unsafe_xonsh_command = originalTextFor( (Optional(at) + dollar | bang) - + (parens | brackets | braces | unsafe_name), + + ~(lparen + rparen | lbrack + rbrack | lbrace + rbrace) + + (parens | brackets | braces | unsafe_name) ) xonsh_parser, _anything_stmt, _xonsh_command = disable_outside( single_parser, @@ -2368,11 +2375,12 @@ class Grammar(object): whitespace_regex = compile_regex(r"\s") - def_regex = compile_regex(r"((async|addpattern|copyclosure)\s+)*def\b") + def_regex = compile_regex(r"\b((async|addpattern|copyclosure)\s+)*def\b") yield_regex = compile_regex(r"\byield(?!\s+_coconut\.asyncio\.From)\b") + yield_from_regex = compile_regex(r"\byield\s+from\b") - tco_disable_regex = compile_regex(r"try\b|(async\s+)?(with\b|for\b)|while\b") - return_regex = compile_regex(r"return\b") + tco_disable_regex = compile_regex(r"\b(try\b|(async\s+)?(with\b|for\b)|while\b)") + return_regex = compile_regex(r"\breturn\b") noqa_regex = compile_regex(r"\b[Nn][Oo][Qq][Aa]\b") @@ -2410,7 +2418,7 @@ def get_tre_return_grammar(self, func_name): dot + unsafe_name | brackets # don't match the last set of parentheses - | parens + ~end_marker + ~rparen, + | parens + ~end_marker + ~rparen ), ) + original_function_call_tokens, @@ -2429,7 +2437,7 @@ def get_tre_return_grammar(self, func_name): | brackets | braces | lambdas - | ~colon + any_char, + | ~colon + any_char ) rest_of_tfpdef = originalTextFor( ZeroOrMore( @@ -2438,27 +2446,25 @@ def get_tre_return_grammar(self, func_name): | brackets | braces | lambdas - | ~comma + ~rparen + ~equals + any_char, - ), + | ~comma + ~rparen + ~equals + any_char + ) ) tfpdef_tokens = unsafe_name - Optional(colon - rest_of_tfpdef).suppress() tfpdef_default_tokens = tfpdef_tokens - Optional(equals - rest_of_tfpdef) type_comment = Optional( comment_tokens - | passthrough_item, + | passthrough_item ).suppress() parameters_tokens = Group( - Optional( - tokenlist( - Group( - dubstar - tfpdef_tokens - | star - Optional(tfpdef_tokens) - | slash - | tfpdef_default_tokens, - ) + type_comment, - comma + type_comment, - ), - ), + Optional(tokenlist( + Group( + dubstar - tfpdef_tokens + | star - Optional(tfpdef_tokens) + | slash + | tfpdef_default_tokens + ) + type_comment, + comma + type_comment, + )) ) split_func = ( diff --git a/coconut/compiler/header.py b/coconut/compiler/header.py index 1ba8b4188..39ff27fca 100644 --- a/coconut/compiler/header.py +++ b/coconut/compiler/header.py @@ -42,6 +42,7 @@ from coconut.util import ( univ_open, get_target_info, + assert_remove_prefix, ) from coconut.compiler.util import ( split_comment, @@ -60,7 +61,7 @@ def gethash(compiled): if len(lines) < 3 or not lines[2].startswith(hash_prefix): return None else: - return lines[2][len(hash_prefix):] + return assert_remove_prefix(lines[2], hash_prefix) def minify_header(compiled): @@ -305,8 +306,8 @@ def pattern_prepender(func): return pattern_prepender''' if not strict else r'''def prepattern(*args, **kwargs): - """Deprecated built-in 'prepattern' disabled by --strict compilation; use 'addpattern' instead.""" - raise _coconut.NameError("deprecated built-in 'prepattern' disabled by --strict compilation; use 'addpattern' instead")''' + """Deprecated Coconut built-in 'prepattern' disabled by --strict compilation; use 'addpattern' instead.""" + raise _coconut.NameError("deprecated Coconut built-in 'prepattern' disabled by --strict compilation; use 'addpattern' instead")''' ), def_datamaker=( r'''def datamaker(data_type): @@ -314,14 +315,14 @@ def pattern_prepender(func): return _coconut.functools.partial(makedata, data_type)''' if not strict else r'''def datamaker(*args, **kwargs): - """Deprecated built-in 'datamaker' disabled by --strict compilation; use 'makedata' instead.""" - raise _coconut.NameError("deprecated built-in 'datamaker' disabled by --strict compilation; use 'makedata' instead")''' + """Deprecated Coconut built-in 'datamaker' disabled by --strict compilation; use 'makedata' instead.""" + raise _coconut.NameError("deprecated Coconut built-in 'datamaker' disabled by --strict compilation; use 'makedata' instead")''' ), of_is_call=( "of = call" if not strict else r'''def of(*args, **kwargs): - """Deprecated built-in 'of' disabled by --strict compilation; use 'call' instead.""" - raise _coconut.NameError("deprecated built-in 'of' disabled by --strict compilation; use 'call' instead")''' + """Deprecated Coconut built-in 'of' disabled by --strict compilation; use 'call' instead.""" + raise _coconut.NameError("deprecated Coconut built-in 'of' disabled by --strict compilation; use 'call' instead")''' ), return_method_of_self=pycondition( (3,), @@ -534,28 +535,36 @@ async def __anext__(self): underscore_imports="{tco_comma}{call_set_names_comma}{handle_cls_args_comma}_namedtuple_of, _coconut, _coconut_Expected, _coconut_MatchError, _coconut_SupportsAdd, _coconut_SupportsMinus, _coconut_SupportsMul, _coconut_SupportsPow, _coconut_SupportsTruediv, _coconut_SupportsFloordiv, _coconut_SupportsMod, _coconut_SupportsAnd, _coconut_SupportsXor, _coconut_SupportsOr, _coconut_SupportsLshift, _coconut_SupportsRshift, _coconut_SupportsMatmul, _coconut_SupportsInv, _coconut_iter_getitem, _coconut_base_compose, _coconut_forward_compose, _coconut_back_compose, _coconut_forward_star_compose, _coconut_back_star_compose, _coconut_forward_dubstar_compose, _coconut_back_dubstar_compose, _coconut_pipe, _coconut_star_pipe, _coconut_dubstar_pipe, _coconut_back_pipe, _coconut_back_star_pipe, _coconut_back_dubstar_pipe, _coconut_none_pipe, _coconut_none_star_pipe, _coconut_none_dubstar_pipe, _coconut_bool_and, _coconut_bool_or, _coconut_none_coalesce, _coconut_minus, _coconut_map, _coconut_partial, _coconut_get_function_match_error, _coconut_base_pattern_func, _coconut_addpattern, _coconut_sentinel, _coconut_assert, _coconut_raise, _coconut_mark_as_match, _coconut_reiterable, _coconut_self_match_types, _coconut_dict_merge, _coconut_exec, _coconut_comma_op, _coconut_multi_dim_arr, _coconut_mk_anon_namedtuple, _coconut_matmul, _coconut_py_str, _coconut_flatten, _coconut_multiset, _coconut_back_none_pipe, _coconut_back_none_star_pipe, _coconut_back_none_dubstar_pipe, _coconut_forward_none_compose, _coconut_back_none_compose, _coconut_forward_none_star_compose, _coconut_back_none_star_compose, _coconut_forward_none_dubstar_compose, _coconut_back_none_dubstar_compose, _coconut_call_or_coefficient, _coconut_in, _coconut_not_in".format(**format_dict), import_typing=pycondition( (3, 5), - if_ge="import typing", + if_ge=''' +import typing as _typing +for _name in dir(_typing): + if not hasattr(typing, _name): + setattr(typing, _name, getattr(_typing, _name)) + ''', if_lt=''' -class typing_mock{object}: - """The typing module is not available at runtime in Python 3.4 or earlier; - try hiding your typedefs behind an 'if TYPE_CHECKING:' block.""" - TYPE_CHECKING = False - Any = Ellipsis - def cast(self, t, x): +if not hasattr(typing, "TYPE_CHECKING"): + typing.TYPE_CHECKING = False +if not hasattr(typing, "Any"): + typing.Any = Ellipsis +if not hasattr(typing, "cast"): + def cast(t, x): """typing.cast[T](t: Type[T], x: Any) -> T = x""" return x - def __getattr__(self, name): - raise _coconut.ImportError("the typing module is not available at runtime in Python 3.4 or earlier; try hiding your typedefs behind an 'if TYPE_CHECKING:' block") + typing.cast = cast + cast = staticmethod(cast) +if not hasattr(typing, "TypeVar"): def TypeVar(name, *args, **kwargs): """Runtime mock of typing.TypeVar for Python 3.4 and earlier.""" return name + typing.TypeVar = TypeVar + TypeVar = staticmethod(TypeVar) +if not hasattr(typing, "Generic"): class Generic_mock{object}: """Runtime mock of typing.Generic for Python 3.4 and earlier.""" __slots__ = () def __getitem__(self, vars): return _coconut.object - Generic = Generic_mock() -typing = typing_mock() + typing.Generic = Generic_mock() '''.format(**format_dict), indent=1, ), @@ -563,10 +572,11 @@ def __getitem__(self, vars): import_typing_36=pycondition( (3, 6), if_lt=''' -def NamedTuple(name, fields): - return _coconut.collections.namedtuple(name, [x for x, t in fields]) -typing.NamedTuple = NamedTuple -NamedTuple = staticmethod(NamedTuple) +if not hasattr(typing, "NamedTuple"): + def NamedTuple(name, fields): + return _coconut.collections.namedtuple(name, [x for x, t in fields]) + typing.NamedTuple = NamedTuple + NamedTuple = staticmethod(NamedTuple) ''', indent=1, newline=True, @@ -574,15 +584,12 @@ def NamedTuple(name, fields): import_typing_38=pycondition( (3, 8), if_lt=''' -try: - from typing_extensions import Protocol -except ImportError: +if not hasattr(typing, "Protocol"): class YouNeedToInstallTypingExtensions{object}: __slots__ = () def __init__(self): raise _coconut.TypeError('Protocols cannot be instantiated') - Protocol = YouNeedToInstallTypingExtensions -typing.Protocol = Protocol + typing.Protocol = YouNeedToInstallTypingExtensions '''.format(**format_dict), indent=1, newline=True, @@ -590,18 +597,15 @@ def __init__(self): import_typing_310=pycondition( (3, 10), if_lt=''' -try: - from typing_extensions import ParamSpec, TypeAlias, Concatenate -except ImportError: +if not hasattr(typing, "ParamSpec"): def ParamSpec(name, *args, **kwargs): """Runtime mock of typing.ParamSpec for Python 3.9 and earlier.""" return _coconut.typing.TypeVar(name) + typing.ParamSpec = ParamSpec +if not hasattr(typing, "TypeAlias") or not hasattr(typing, "Concatenate"): class you_need_to_install_typing_extensions{object}: __slots__ = () - TypeAlias = Concatenate = you_need_to_install_typing_extensions() -typing.ParamSpec = ParamSpec -typing.TypeAlias = TypeAlias -typing.Concatenate = Concatenate + typing.TypeAlias = typing.Concatenate = you_need_to_install_typing_extensions() '''.format(**format_dict), indent=1, newline=True, @@ -609,17 +613,15 @@ class you_need_to_install_typing_extensions{object}: import_typing_311=pycondition( (3, 11), if_lt=''' -try: - from typing_extensions import TypeVarTuple, Unpack -except ImportError: +if not hasattr(typing, "TypeVarTuple"): def TypeVarTuple(name, *args, **kwargs): """Runtime mock of typing.TypeVarTuple for Python 3.10 and earlier.""" return _coconut.typing.TypeVar(name) + typing.TypeVarTuple = TypeVarTuple +if not hasattr(typing, "Unpack"): class you_need_to_install_typing_extensions{object}: __slots__ = () - Unpack = you_need_to_install_typing_extensions() -typing.TypeVarTuple = TypeVarTuple -typing.Unpack = Unpack + typing.Unpack = you_need_to_install_typing_extensions() '''.format(**format_dict), indent=1, newline=True, @@ -747,7 +749,7 @@ def getheader(which, use_hash, target, no_tco, strict, no_wrap): header += "_coconut_header_info = " + header_info + "\n" if which.startswith("package"): - levels_up = int(which[len("package:"):]) + levels_up = int(assert_remove_prefix(which, "package:")) coconut_file_dir = "_coconut_os.path.dirname(_coconut_os.path.abspath(__file__))" for _ in range(levels_up): coconut_file_dir = "_coconut_os.path.dirname(" + coconut_file_dir + ")" @@ -791,7 +793,7 @@ def getheader(which, use_hash, target, no_tco, strict, no_wrap): if which == "sys": return header + '''from coconut.__coconut__ import * from coconut.__coconut__ import {underscore_imports} -'''.format(**format_dict) +'''.format(**format_dict) + section("Compiled Coconut") # __coconut__, code, file diff --git a/coconut/compiler/matching.py b/coconut/compiler/matching.py index 2bc2e5a8d..96765f91a 100644 --- a/coconut/compiler/matching.py +++ b/coconut/compiler/matching.py @@ -1064,7 +1064,7 @@ def match_class(self, tokens, item): match_args_var = other_cls_matcher.get_temp_var() other_cls_matcher.add_def( handle_indentation(""" -{match_args_var} = _coconut.getattr({cls_name}, '__match_args__', ()) +{match_args_var} = _coconut.getattr({cls_name}, '__match_args__', ()) {type_any} {type_ignore} if not _coconut.isinstance({match_args_var}, _coconut.tuple): raise _coconut.TypeError("{cls_name}.__match_args__ must be a tuple") if _coconut.len({match_args_var}) < {num_pos_matches}: @@ -1073,6 +1073,8 @@ def match_class(self, tokens, item): cls_name=cls_name, match_args_var=match_args_var, num_pos_matches=len(pos_matches), + type_any=self.comp.wrap_comment(" type: _coconut.typing.Any"), + type_ignore=self.comp.type_ignore_comment(), ), ) with other_cls_matcher.down_a_level(): @@ -1161,7 +1163,7 @@ def match_data_or_class(self, tokens, item): self.add_def( handle_indentation( """ -{is_data_result_var} = _coconut.getattr({cls_name}, "{is_data_var}", False) or _coconut.isinstance({cls_name}, _coconut.tuple) and _coconut.all(_coconut.getattr(_coconut_x, "{is_data_var}", False) for _coconut_x in {cls_name}){type_ignore} +{is_data_result_var} = _coconut.getattr({cls_name}, "{is_data_var}", False) or _coconut.isinstance({cls_name}, _coconut.tuple) and _coconut.all(_coconut.getattr(_coconut_x, "{is_data_var}", False) for _coconut_x in {cls_name}) {type_ignore} """, ).format( is_data_result_var=is_data_result_var, diff --git a/coconut/compiler/templates/header.py_template b/coconut/compiler/templates/header.py_template index 5fa1e9760..33f3b8503 100644 --- a/coconut/compiler/templates/header.py_template +++ b/coconut/compiler/templates/header.py_template @@ -17,11 +17,32 @@ class _coconut{object}:{COMMENT.EVERYTHING_HERE_MUST_BE_COPIED_TO_STUB_FILE} from multiprocessing import dummy as multiprocessing_dummy {maybe_bind_lru_cache}{import_copyreg} {import_asyncio} + try: + import async_generator + except ImportError: + class you_need_to_install_async_generator{object}: + __slots__ = () + async_generator = you_need_to_install_async_generator() {import_pickle} {import_OrderedDict} {import_collections_abc} + typing = types.ModuleType(_coconut_py_str("typing")) + try: + import typing_extensions + except ImportError: + typing_extensions = None + else: + for _name in dir(typing_extensions): + if not _name.startswith("__"): + setattr(typing, _name, getattr(typing_extensions, _name)) + typing.__doc__ = "Coconut version of typing that makes use of typing.typing_extensions when possible.\n\n" + (getattr(typing, "__doc__") or "The typing module is not available at runtime in Python 3.4 or earlier; try hiding your typedefs behind an 'if TYPE_CHECKING:' block.") {import_typing} -{import_typing_36}{import_typing_38}{import_typing_310}{import_typing_311}{set_zip_longest} +{import_typing_36}{import_typing_38}{import_typing_310}{import_typing_311} + def _typing_getattr(name): + raise _coconut.AttributeError("typing.%s is not available on the current Python version and couldn't be looked up in typing_extensions; try hiding your typedefs behind an 'if TYPE_CHECKING:' block" % (name,)) + typing.__getattr__ = _typing_getattr + _typing_getattr = staticmethod(_typing_getattr) +{set_zip_longest} try: import numpy except ImportError: @@ -1217,7 +1238,7 @@ class groupsof(_coconut_has_iter): def __copy__(self): return self.__class__(self.group_size, self.get_new_iter()) class recursive_iterator(_coconut_baseclass): - """Decorator that optimizes a recursive function that returns an iterator (e.g. a recursive generator).""" + """Decorator that memoizes a recursive function that returns an iterator (e.g. a recursive generator).""" __slots__ = ("func", "reit_store", "backup_reit_store") def __init__(self, func): self.func = func @@ -1485,7 +1506,14 @@ def makedata(data_type, *args, **kwargs): {class_amap} def fmap(func, obj, **kwargs): """fmap(func, obj) creates a copy of obj with func applied to its contents. - Supports asynchronous iterables, mappings (maps over .items()), and numpy arrays (uses np.vectorize). + + Supports: + * Coconut data types + * `str`, `dict`, `list`, `tuple`, `set`, `frozenset` + * `dict` (maps over .items()) + * asynchronous iterables + * numpy arrays (uses np.vectorize) + * pandas objects (uses .apply) Override by defining obj.__fmap__(func). """ diff --git a/coconut/compiler/util.py b/coconut/compiler/util.py index e6de4537f..bebec4a09 100644 --- a/coconut/compiler/util.py +++ b/coconut/compiler/util.py @@ -95,6 +95,7 @@ comment_chars, non_syntactic_newline, allow_explicit_keyword_vars, + reserved_prefix, ) from coconut.exceptions import ( CoconutException, @@ -702,24 +703,6 @@ def maybeparens(lparen, item, rparen, prefer_parens=False): return item | lparen.suppress() + item + rparen.suppress() -@memoize() -def tokenlist(item, sep, suppress=True, allow_trailing=True, at_least_two=False, require_sep=False): - """Create a list of tokens matching the item.""" - if suppress: - sep = sep.suppress() - if not require_sep: - out = item + (OneOrMore if at_least_two else ZeroOrMore)(sep + item) - if allow_trailing: - out += Optional(sep) - elif not allow_trailing: - out = item + OneOrMore(sep + item) - elif at_least_two: - out = item + OneOrMore(sep + item) + Optional(sep) - else: - out = OneOrMore(item + sep) + Optional(item) - return out - - def interleaved_tokenlist(required_item, other_item, sep, allow_trailing=False, at_least_two=False): """Create a grammar to match interleaved required_items and other_items, where required_item must show up at least once.""" @@ -750,6 +733,30 @@ def interleaved_tokenlist(required_item, other_item, sep, allow_trailing=False, return out +@memoize() +def tokenlist(item, sep, suppress=True, allow_trailing=True, at_least_two=False, require_sep=False, suppress_trailing=False): + """Create a list of tokens matching the item.""" + if suppress: + sep = sep.suppress() + if suppress_trailing: + trailing_sep = sep.suppress() + else: + trailing_sep = sep + if not require_sep: + out = item + (OneOrMore if at_least_two else ZeroOrMore)(sep + item) + if allow_trailing: + out += Optional(trailing_sep) + elif not allow_trailing: + out = item + OneOrMore(sep + item) + elif at_least_two: + out = item + OneOrMore(sep + item) + Optional(trailing_sep) + elif suppress_trailing: + out = item + OneOrMore(sep + item) + Optional(trailing_sep) | item + trailing_sep + else: + out = OneOrMore(item + sep) + Optional(item) + return out + + def add_list_spacing(tokens): """Parse action to add spacing after seps but not elsewhere.""" out = [] @@ -764,21 +771,19 @@ def add_list_spacing(tokens): add_list_spacing.ignore_one_token = True -def itemlist(item, sep, suppress_trailing=True): +def itemlist(item, sep, suppress_trailing=True, **kwargs): """Create a list of items separated by seps with comma-like spacing added. A trailing sep is allowed.""" return attach( - item - + ZeroOrMore(sep + item) - + Optional(sep.suppress() if suppress_trailing else sep), + tokenlist(item, sep, suppress=False, suppress_trailing=suppress_trailing, **kwargs), add_list_spacing, ) -def exprlist(expr, op): +def exprlist(expr, op, **kwargs): """Create a list of exprs separated by ops with plus-like spacing added. No trailing op is allowed.""" - return addspace(expr + ZeroOrMore(op + expr)) + return addspace(tokenlist(expr, op, suppress=False, allow_trailing=False, **kwargs)) def stores_loc_action(loc, tokens): @@ -1363,3 +1368,37 @@ def add_int_and_strs(int_part=0, str_parts=(), parens=False): if parens: out = "(" + out + ")" return out + + +# ----------------------------------------------------------------------------------------------------------------------- +# PYTEST: +# ----------------------------------------------------------------------------------------------------------------------- + + +class FixPytestNames(ast.NodeTransformer): + """Renames invalid names added by pytest assert rewriting.""" + + def fix_name(self, name): + """Make the given pytest name a valid but non-colliding identifier.""" + return name.replace("@", reserved_prefix + "_pytest_") + + def visit_Name(self, node): + """Special method to visit ast.Names.""" + node.id = self.fix_name(node.id) + return node + + def visit_alias(self, node): + """Special method to visit ast.aliases.""" + node.asname = self.fix_name(node.asname) + return node + + +def pytest_rewrite_asserts(code, module_name=reserved_prefix + "_pytest_module"): + """Uses pytest to rewrite the assert statements in the given code.""" + from _pytest.assertion.rewrite import rewrite_asserts # hidden since it's not always available + + module_name = module_name.encode("utf-8") + tree = ast.parse(code) + rewrite_asserts(tree, module_name) + fixed_tree = ast.fix_missing_locations(FixPytestNames().visit(tree)) + return ast.unparse(fixed_tree) diff --git a/coconut/constants.py b/coconut/constants.py index fc8815357..c6bc04fdd 100644 --- a/coconut/constants.py +++ b/coconut/constants.py @@ -90,6 +90,7 @@ def get_bool_env_var(env_var, default=False): XONSH = ( PY35 and not (PYPY and PY39) + and (PY38 or not PY36) ) py_version_str = sys.version.split()[0] @@ -423,12 +424,21 @@ def get_bool_env_var(env_var, default=False): "itertools.zip_longest": ("itertools./izip_longest", (3,)), "math.gcd": ("fractions./gcd", (3, 5)), "time.process_time": ("time./clock", (3, 3)), - # _dummy_thread was removed in Python 3.9, so this no longer works + # # _dummy_thread was removed in Python 3.9, so this no longer works # "_dummy_thread": ("dummy_thread", (3,)), # third-party backports "asyncio": ("trollius", (3, 4)), "enum": ("aenum", (3, 4)), + "contextlib.asynccontextmanager": ("async_generator./asynccontextmanager", (3, 7)), + "contextlib.aclosing": ("async_generator./aclosing", (3, 10)), + "inspect.isasyncgen": ("async_generator./isasyncgen", (3, 6)), + "inspect.isasyncgenfunction": ("async_generator./isasyncgenfunction", (3, 6)), + "sys.get_asyncgen_hooks": ("async_generator./get_asyncgen_hooks", (3, 6)), + "sys.set_asyncgen_hooks": ("async_generator./set_asyncgen_hooks", (3, 6)), + + # typing_extensions (even though we have special support for getting + # these from typing, we need to do this for the sake of type checkers) "typing.AsyncContextManager": ("typing_extensions./AsyncContextManager", (3, 6)), "typing.AsyncGenerator": ("typing_extensions./AsyncGenerator", (3, 6)), "typing.AsyncIterable": ("typing_extensions./AsyncIterable", (3, 6)), @@ -481,6 +491,10 @@ def get_bool_env_var(env_var, default=False): "typing.Unpack": ("typing_extensions./Unpack", (3, 11)), } +import_existing = { + "typing": "_coconut.typing", +} + self_match_types = ( "bool", "bytearray", @@ -527,7 +541,7 @@ def get_bool_env_var(env_var, default=False): '__file__', '__annotations__', '__debug__', - # don't include builtins that aren't always made available by Coconut: + # # don't include builtins that aren't always made available by Coconut: # 'BlockingIOError', 'ChildProcessError', 'ConnectionError', # 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError', # 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError', @@ -598,8 +612,8 @@ def get_bool_env_var(env_var, default=False): ) # always use atomic --xxx=yyy rather than --xxx yyy -coconut_run_args = ("--run", "--target=sys", "--line-numbers", "--quiet") -coconut_run_verbose_args = ("--run", "--target=sys", "--line-numbers") +coconut_run_verbose_args = ("--run", "--target=sys", "--line-numbers", "--keep-lines") +coconut_run_args = coconut_run_verbose_args + ("--quiet",) coconut_import_hook_args = ("--target=sys", "--line-numbers", "--keep-lines", "--quiet") default_mypy_args = ( @@ -767,11 +781,8 @@ def get_bool_env_var(env_var, default=False): "\u2260", # != "\u2264", # <= "\u2265", # >= - "\u2227", # & "\u2229", # & - "\u2228", # | "\u222a", # | - "\u22bb", # ^ "\xab", # << "\xbb", # >> "\u2026", # ... @@ -799,11 +810,20 @@ def get_bool_env_var(env_var, default=False): PURE_PYTHON = get_bool_env_var(pure_python_env_var) # the different categories here are defined in requirements.py, -# anything after a colon is ignored but allows different versions -# for different categories, and tuples denote the use of environment -# markers as specified in requirements.py +# tuples denote the use of environment markers all_reqs = { "main": ( + ("argparse", "py<27"), + ("psutil", "py>=27"), + ("futures", "py<3"), + ("backports.functools-lru-cache", "py<3"), + ("prompt_toolkit", "py<3"), + ("prompt_toolkit", "py>=3"), + ("pygments", "py<39"), + ("pygments", "py>=39"), + ("typing_extensions", "py==35"), + ("typing_extensions", "py==36"), + ("typing_extensions", "py37"), ), "cpython": ( "cPyparsing", @@ -811,32 +831,12 @@ def get_bool_env_var(env_var, default=False): "purepython": ( "pyparsing", ), - "non-py26": ( - "psutil", - ), - "py2": ( - "futures", - "backports.functools-lru-cache", - ("prompt_toolkit", "mark2"), - ), - "py3": ( - ("prompt_toolkit", "mark3"), - ), - "py26": ( - "argparse", - ), - "py<39": ( - ("pygments", "mark<39"), - ), - "py39": ( - ("pygments", "mark39"), - ), "kernel": ( - ("ipython", "py2"), + ("ipython", "py<3"), ("ipython", "py3;py<37"), ("ipython", "py==37"), ("ipython", "py38"), - ("ipykernel", "py2"), + ("ipykernel", "py<3"), ("ipykernel", "py3;py<38"), ("ipykernel", "py38"), ("jupyter-client", "py<35"), @@ -844,7 +844,7 @@ def get_bool_env_var(env_var, default=False): ("jupyter-client", "py36"), ("jedi", "py<39"), ("jedi", "py39"), - ("pywinpty", "py2;windows"), + ("pywinpty", "py<3;windows"), ), "jupyter": ( "jupyter", @@ -858,24 +858,22 @@ def get_bool_env_var(env_var, default=False): "mypy": ( "mypy[python2]", "types-backports", - ("typing_extensions", "py==35"), - ("typing_extensions", "py==36"), - ("typing_extensions", "py37"), + ("typing", "py<35"), ), "watch": ( "watchdog", ), "xonsh": ( - "xonsh", + ("xonsh", "py<36"), + ("xonsh", "py>=36;py<38"), + ("xonsh", "py38"), ), "backports": ( - ("trollius", "py2;cpy"), + ("trollius", "py<3;cpy"), ("aenum", "py<34"), ("dataclasses", "py==36"), ("typing", "py<35"), - ("typing_extensions", "py==35"), - ("typing_extensions", "py==36"), - ("typing_extensions", "py37"), + ("async_generator", "py3"), ), "dev": ( ("pre-commit", "py3"), @@ -884,16 +882,17 @@ def get_bool_env_var(env_var, default=False): ), "docs": ( "sphinx", - ("pygments", "mark<39"), - ("pygments", "mark39"), + ("pygments", "py<39"), + ("pygments", "py>=39"), "myst-parser", "pydata-sphinx-theme", ), "tests": ( - "pytest", + ("pytest", "py<36"), + ("pytest", "py36"), "pexpect", ("numpy", "py34"), - ("numpy", "py2;cpy"), + ("numpy", "py<3;cpy"), ("pandas", "py36"), ), } @@ -902,17 +901,17 @@ def get_bool_env_var(env_var, default=False): min_versions = { "cPyparsing": (2, 4, 7, 1, 2, 1), ("pre-commit", "py3"): (3,), - "psutil": (5,), + ("psutil", "py>=27"): (5,), "jupyter": (1, 0), "types-backports": (0, 1), - "futures": (3, 4), - "backports.functools-lru-cache": (1, 6), - "argparse": (1, 4), + ("futures", "py<3"): (3, 4), + ("backports.functools-lru-cache", "py<3"): (1, 6), + ("argparse", "py<27"): (1, 4), "pexpect": (4,), - ("trollius", "py2;cpy"): (2, 2), + ("trollius", "py<3;cpy"): (2, 2), "requests": (2, 31), ("numpy", "py34"): (1,), - ("numpy", "py2;cpy"): (1,), + ("numpy", "py<3;cpy"): (1,), ("dataclasses", "py==36"): (0, 8), ("aenum", "py<34"): (3,), "pydata-sphinx-theme": (0, 13), @@ -924,15 +923,19 @@ def get_bool_env_var(env_var, default=False): ("ipython", "py38"): (8,), ("ipykernel", "py38"): (6,), ("jedi", "py39"): (0, 18), - ("pygments", "mark39"): (2, 15), + ("pygments", "py>=39"): (2, 15), + ("xonsh", "py38"): (0, 14), + ("pytest", "py36"): (7,), + ("async_generator", "py3"): (1, 10), # pinned reqs: (must be added to pinned_reqs below) # don't upgrade until myst-parser supports the new version "sphinx": (6,), - # don't upgrade this; it breaks on Python 3.7 + # don't upgrade these; they breaks on Python 3.7 ("ipython", "py==37"): (7, 34), - # don't upgrade these; it breaks on Python 3.6 + # don't upgrade these; they breaks on Python 3.6 + ("xonsh", "py>=36;py<38"): (0, 11), ("pandas", "py36"): (1,), ("jupyter-client", "py36"): (7, 1, 2), ("typing_extensions", "py==36"): (4, 1), @@ -943,23 +946,23 @@ def get_bool_env_var(env_var, default=False): ("jupyter-client", "py==35"): (6, 1, 12), ("jupytext", "py3"): (1, 8), ("jupyterlab", "py35"): (2, 2), - "xonsh": (0, 9), + ("xonsh", "py<36"): (0, 9), ("typing_extensions", "py==35"): (3, 10), # don't upgrade this to allow all versions - ("prompt_toolkit", "mark3"): (1,), + ("prompt_toolkit", "py>=3"): (1,), # don't upgrade this; it breaks on Python 2.6 - "pytest": (3,), + ("pytest", "py<36"): (3,), # don't upgrade this; it breaks on unix "vprof": (0, 36), # don't upgrade this; it breaks on Python 3.4 - ("pygments", "mark<39"): (2, 3), + ("pygments", "py<39"): (2, 3), # don't upgrade these; they break on Python 2 ("jupyter-client", "py<35"): (5, 3), - ("pywinpty", "py2;windows"): (0, 5), + ("pywinpty", "py<3;windows"): (0, 5), ("jupyter-console", "py<35"): (5, 2), - ("ipython", "py2"): (5, 4), - ("ipykernel", "py2"): (4, 10), - ("prompt_toolkit", "mark2"): (1,), + ("ipython", "py<3"): (5, 4), + ("ipykernel", "py<3"): (4, 10), + ("prompt_toolkit", "py<3"): (1,), "watchdog": (0, 10), "papermill": (1, 2), # don't upgrade this; it breaks with old IPython versions @@ -972,6 +975,7 @@ def get_bool_env_var(env_var, default=False): pinned_reqs = ( "sphinx", ("ipython", "py==37"), + ("xonsh", "py>=36;py<38"), ("pandas", "py36"), ("jupyter-client", "py36"), ("typing_extensions", "py==36"), @@ -982,17 +986,17 @@ def get_bool_env_var(env_var, default=False): ("jupyter-client", "py==35"), ("jupytext", "py3"), ("jupyterlab", "py35"), - "xonsh", + ("xonsh", "py<36"), ("typing_extensions", "py==35"), - ("prompt_toolkit", "mark3"), - "pytest", + ("prompt_toolkit", "py>=3"), + ("pytest", "py<36"), "vprof", - ("pygments", "mark<39"), - ("pywinpty", "py2;windows"), + ("pygments", "py<39"), + ("pywinpty", "py<3;windows"), ("jupyter-console", "py<35"), - ("ipython", "py2"), - ("ipykernel", "py2"), - ("prompt_toolkit", "mark2"), + ("ipython", "py<3"), + ("ipykernel", "py<3"), + ("prompt_toolkit", "py<3"), "watchdog", "papermill", ("jedi", "py<39"), @@ -1007,9 +1011,9 @@ def get_bool_env_var(env_var, default=False): ("jupyter-client", "py==35"): _, "pyparsing": _, "cPyparsing": (_, _, _), - ("prompt_toolkit", "mark2"): _, + ("prompt_toolkit", "py<3"): _, ("jedi", "py<39"): _, - ("pywinpty", "py2;windows"): _, + ("pywinpty", "py<3;windows"): _, ("ipython", "py3;py<37"): _, } @@ -1179,7 +1183,7 @@ def get_bool_env_var(env_var, default=False): conda_build_env_var = "CONDA_BUILD" -disabled_xonsh_modes = ("exec", "eval") +enabled_xonsh_modes = ("single",) # ----------------------------------------------------------------------------------------------------------------------- # DOCUMENTATION CONSTANTS: diff --git a/coconut/convenience.py b/coconut/convenience.py index 917734d60..14a6bed5a 100644 --- a/coconut/convenience.py +++ b/coconut/convenience.py @@ -8,7 +8,7 @@ """ Author: Evan Hubinger License: Apache 2.0 -Description: Convenience functions for using Coconut as a module. +Description: Deprecated alias for coconut.api. """ # ----------------------------------------------------------------------------------------------------------------------- @@ -17,257 +17,4 @@ from __future__ import print_function, absolute_import, unicode_literals, division -from coconut.root import * # NOQA - -import sys -import os.path -import codecs -try: - from encodings import utf_8 -except ImportError: - utf_8 = None - -from coconut.integrations import embed -from coconut.exceptions import CoconutException -from coconut.command import Command -from coconut.command.cli import cli_version -from coconut.compiler import Compiler -from coconut.constants import ( - version_tag, - code_exts, - coconut_import_hook_args, - coconut_kernel_kwargs, -) - -# ----------------------------------------------------------------------------------------------------------------------- -# COMMAND: -# ----------------------------------------------------------------------------------------------------------------------- - -GLOBAL_STATE = None - - -def get_state(state=None): - """Get a Coconut state object; None gets a new state, False gets the global state.""" - global GLOBAL_STATE - if state is None: - return Command() - elif state is False: - if GLOBAL_STATE is None: - GLOBAL_STATE = Command() - return GLOBAL_STATE - else: - return state - - -def cmd(cmd_args, interact=False, state=False, **kwargs): - """Process command-line arguments.""" - if isinstance(cmd_args, (str, bytes)): - cmd_args = cmd_args.split() - return get_state(state).cmd(cmd_args, interact=interact, **kwargs) - - -VERSIONS = { - "num": VERSION, - "name": VERSION_NAME, - "spec": VERSION_STR, - "tag": version_tag, - "-v": cli_version, -} - - -def version(which="num"): - """Get the Coconut version.""" - if which in VERSIONS: - return VERSIONS[which] - else: - raise CoconutException( - "invalid version type " + repr(which), - extra="valid versions are " + ", ".join(VERSIONS), - ) - - -# ----------------------------------------------------------------------------------------------------------------------- -# COMPILER: -# ----------------------------------------------------------------------------------------------------------------------- - -def setup(*args, **kwargs): - """Set up the given state object.""" - state = kwargs.pop("state", False) - return get_state(state).setup(*args, **kwargs) - - -PARSERS = { - "sys": lambda comp: comp.parse_sys, - "exec": lambda comp: comp.parse_exec, - "file": lambda comp: comp.parse_file, - "package": lambda comp: comp.parse_package, - "block": lambda comp: comp.parse_block, - "single": lambda comp: comp.parse_single, - "eval": lambda comp: comp.parse_eval, - "lenient": lambda comp: comp.parse_lenient, - "xonsh": lambda comp: comp.parse_xonsh, -} - -# deprecated aliases -PARSERS["any"] = PARSERS["debug"] = PARSERS["lenient"] - - -def parse(code="", mode="sys", state=False, keep_internal_state=None): - """Compile Coconut code.""" - if keep_internal_state is None: - keep_internal_state = bool(state) - command = get_state(state) - if command.comp is None: - command.setup() - if mode not in PARSERS: - raise CoconutException( - "invalid parse mode " + repr(mode), - extra="valid modes are " + ", ".join(PARSERS), - ) - return PARSERS[mode](command.comp)(code, keep_state=keep_internal_state) - - -def coconut_eval(expression, globals=None, locals=None, state=False, **kwargs): - """Compile and evaluate Coconut code.""" - command = get_state(state) - if command.comp is None: - setup() - command.check_runner(set_sys_vars=False) - if globals is None: - globals = {} - command.runner.update_vars(globals) - compiled_python = parse(expression, "eval", state, **kwargs) - return eval(compiled_python, globals, locals) - - -# ----------------------------------------------------------------------------------------------------------------------- -# BREAKPOINT: -# ----------------------------------------------------------------------------------------------------------------------- - - -def _coconut_breakpoint(): - """Determine coconut.embed depth based on whether we're being - called by Coconut's breakpoint() or Python's breakpoint().""" - if sys.version_info >= (3, 7): - return embed(depth=1) - else: - return embed(depth=2) - - -def use_coconut_breakpoint(on=True): - """Switches the breakpoint() built-in (universally accessible via - coconut.__coconut__.breakpoint) to use coconut.embed.""" - if on: - sys.breakpointhook = _coconut_breakpoint - else: - sys.breakpointhook = sys.__breakpointhook__ - - -use_coconut_breakpoint() - - -# ----------------------------------------------------------------------------------------------------------------------- -# AUTOMATIC COMPILATION: -# ----------------------------------------------------------------------------------------------------------------------- - - -class CoconutImporter(object): - """Finder and loader for compiling Coconut files at import time.""" - ext = code_exts[0] - - @staticmethod - def run_compiler(path): - """Run the Coconut compiler on the given path.""" - cmd([path] + list(coconut_import_hook_args)) - - def find_module(self, fullname, path=None): - """Searches for a Coconut file of the given name and compiles it.""" - basepaths = [""] + list(sys.path) - if fullname.startswith("."): - if path is None: - # we can't do a relative import if there's no package path - return - fullname = fullname[1:] - basepaths.insert(0, path) - fullpath = os.path.join(*fullname.split(".")) - for head in basepaths: - path = os.path.join(head, fullpath) - filepath = path + self.ext - dirpath = os.path.join(path, "__init__" + self.ext) - if os.path.exists(filepath): - self.run_compiler(filepath) - # Coconut file was found and compiled, now let Python import it - return - if os.path.exists(dirpath): - self.run_compiler(path) - # Coconut package was found and compiled, now let Python import it - return - - -coconut_importer = CoconutImporter() - - -def auto_compilation(on=True): - """Turn automatic compilation of Coconut files on or off.""" - if on: - if coconut_importer not in sys.meta_path: - sys.meta_path.insert(0, coconut_importer) - else: - try: - sys.meta_path.remove(coconut_importer) - except ValueError: - pass - - -auto_compilation() - - -# ----------------------------------------------------------------------------------------------------------------------- -# ENCODING: -# ----------------------------------------------------------------------------------------------------------------------- - - -if utf_8 is not None: - class CoconutStreamReader(utf_8.StreamReader, object): - """Compile Coconut code from a stream of UTF-8.""" - coconut_compiler = None - - @classmethod - def compile_coconut(cls, source): - """Compile the given Coconut source text.""" - if cls.coconut_compiler is None: - cls.coconut_compiler = Compiler(**coconut_kernel_kwargs) - return cls.coconut_compiler.parse_sys(source) - - @classmethod - def decode(cls, input_bytes, errors="strict"): - """Decode and compile the given Coconut source bytes.""" - input_str, len_consumed = super(CoconutStreamReader, cls).decode(input_bytes, errors) - return cls.compile_coconut(input_str), len_consumed - - class CoconutIncrementalDecoder(utf_8.IncrementalDecoder, object): - """Compile Coconut at the end of incrementally decoding UTF-8.""" - invertible = False - _buffer_decode = CoconutStreamReader.decode - - -def get_coconut_encoding(encoding="coconut"): - """Get a CodecInfo for the given Coconut encoding.""" - if not encoding.startswith("coconut"): - return None - if encoding != "coconut": - raise CoconutException("unknown Coconut encoding: " + repr(encoding)) - if utf_8 is None: - raise CoconutException("coconut encoding requires encodings.utf_8") - return codecs.CodecInfo( - name=encoding, - encode=utf_8.encode, - decode=CoconutStreamReader.decode, - incrementalencoder=utf_8.IncrementalEncoder, - incrementaldecoder=CoconutIncrementalDecoder, - streamreader=CoconutStreamReader, - streamwriter=utf_8.StreamWriter, - ) - - -codecs.register(get_coconut_encoding) +from coconut.api import * # NOQA diff --git a/coconut/convenience.pyi b/coconut/convenience.pyi index ef9b64194..bfc8f7043 100644 --- a/coconut/convenience.pyi +++ b/coconut/convenience.pyi @@ -12,97 +12,4 @@ Description: MyPy stub file for convenience.py. # IMPORTS: #----------------------------------------------------------------------------------------------------------------------- -from typing import ( - Any, - Callable, - Dict, - Iterable, - Optional, - Text, - Union, -) - -from coconut.command.command import Command - -class CoconutException(Exception): - ... - -#----------------------------------------------------------------------------------------------------------------------- -# COMMAND: -#----------------------------------------------------------------------------------------------------------------------- - -GLOBAL_STATE: Optional[Command] = None - - -def get_state(state: Optional[Command]=None) -> Command: ... - - -def cmd(args: Union[Text, bytes, Iterable], interact: bool=False) -> None: ... - - -VERSIONS: Dict[Text, Text] = ... - - -def version(which: Optional[Text]=None) -> Text: ... - - -#----------------------------------------------------------------------------------------------------------------------- -# COMPILER: -#----------------------------------------------------------------------------------------------------------------------- - - -def setup( - target: Optional[str]=None, - strict: bool=False, - minify: bool=False, - line_numbers: bool=False, - keep_lines: bool=False, - no_tco: bool=False, - no_wrap: bool=False, -) -> None: ... - - -PARSERS: Dict[Text, Callable] = ... - - -def parse( - code: Text, - mode: Text=..., - state: Optional[Command]=..., - keep_internal_state: Optional[bool]=None, -) -> Text: ... - - -def coconut_eval( - expression: Text, - globals: Optional[Dict[Text, Any]]=None, - locals: Optional[Dict[Text, Any]]=None, - state: Optional[Command]=..., - keep_internal_state: Optional[bool]=None, -) -> Any: ... - - -# ----------------------------------------------------------------------------------------------------------------------- -# ENABLERS: -# ----------------------------------------------------------------------------------------------------------------------- - - -def use_coconut_breakpoint(on: bool=True) -> None: ... - - -class CoconutImporter: - ext: str - - @staticmethod - def run_compiler(path: str) -> None: ... - - def find_module(self, fullname: str, path: Optional[str]=None) -> None: ... - - -coconut_importer = CoconutImporter() - - -def auto_compilation(on: bool=True) -> None: ... - - -def get_coconut_encoding(encoding: str=...) -> Any: ... +from coconut.api import * diff --git a/coconut/integrations.py b/coconut/integrations.py index 7636e1e7e..f13375c65 100644 --- a/coconut/integrations.py +++ b/coconut/integrations.py @@ -23,7 +23,7 @@ from coconut.constants import ( coconut_kernel_kwargs, - disabled_xonsh_modes, + enabled_xonsh_modes, ) from coconut.util import memoize_with_exceptions @@ -57,12 +57,12 @@ def load_ipython_extension(ipython): ipython.push(newvars) # import here to avoid circular dependencies - from coconut import convenience + from coconut import api from coconut.exceptions import CoconutException from coconut.terminal import logger - magic_state = convenience.get_state() - convenience.setup(state=magic_state, **coconut_kernel_kwargs) + magic_state = api.get_state() + api.setup(state=magic_state, **coconut_kernel_kwargs) # add magic function def magic(line, cell=None): @@ -74,9 +74,9 @@ def magic(line, cell=None): # first line in block is cmd, rest is code line = line.strip() if line: - convenience.cmd(line, default_target="sys", state=magic_state) + api.cmd(line, default_target="sys", state=magic_state) code = cell - compiled = convenience.parse(code, state=magic_state) + compiled = api.parse(code, state=magic_state) except CoconutException: logger.print_exc() else: @@ -96,34 +96,33 @@ class CoconutXontribLoader(object): timing_info = [] @memoize_with_exceptions(128) - def _base_memoized_parse_xonsh(self, code): + def memoized_parse_xonsh(self, code): return self.compiler.parse_xonsh(code, keep_state=True) - def memoized_parse_xonsh(self, code): + def compile_code(self, code): """Memoized self.compiler.parse_xonsh.""" - # .strip() outside the memoization - return self._base_memoized_parse_xonsh(code.strip()) + # hide imports to avoid circular dependencies + from coconut.exceptions import CoconutException + from coconut.terminal import format_error + from coconut.util import get_clock_time + from coconut.terminal import logger - def new_parse(self, parser, code, mode="exec", *args, **kwargs): - """Coconut-aware version of xonsh's _parse.""" - if self.loaded and mode not in disabled_xonsh_modes: - # hide imports to avoid circular dependencies - from coconut.exceptions import CoconutException - from coconut.terminal import format_error - from coconut.util import get_clock_time - from coconut.terminal import logger + parse_start_time = get_clock_time() + quiet, logger.quiet = logger.quiet, True + success = False + try: + # .strip() outside the memoization + compiled = self.memoized_parse_xonsh(code.strip()) + except CoconutException as err: + err_str = format_error(err).splitlines()[0] + compiled = code + " #" + err_str + else: + success = True + finally: + logger.quiet = quiet + self.timing_info.append(("parse", get_clock_time() - parse_start_time)) - parse_start_time = get_clock_time() - quiet, logger.quiet = logger.quiet, True - try: - code = self.memoized_parse_xonsh(code) - except CoconutException as err: - err_str = format_error(err).splitlines()[0] - code += " #" + err_str - finally: - logger.quiet = quiet - self.timing_info.append(("parse", get_clock_time() - parse_start_time)) - return parser.__class__.parse(parser, code, mode=mode, *args, **kwargs) + return compiled, success def new_try_subproc_toks(self, ctxtransformer, node, *args, **kwargs): """Version of try_subproc_toks that handles the fact that Coconut @@ -136,37 +135,47 @@ def new_try_subproc_toks(self, ctxtransformer, node, *args, **kwargs): finally: ctxtransformer.mode = mode - def new_ctxvisit(self, ctxtransformer, node, inp, *args, **kwargs): + def new_parse(self, parser, code, mode="exec", *args, **kwargs): + """Coconut-aware version of xonsh's _parse.""" + if self.loaded and mode in enabled_xonsh_modes: + code, _ = self.compile_code(code) + return parser.__class__.parse(parser, code, mode=mode, *args, **kwargs) + + def new_ctxvisit(self, ctxtransformer, node, inp, ctx, mode="exec", *args, **kwargs): """Version of ctxvisit that ensures looking up original lines in inp using Coconut line numbers will work properly.""" - if self.loaded: + if self.loaded and mode in enabled_xonsh_modes: from xonsh.tools import get_logical_line # hide imports to avoid circular dependencies from coconut.terminal import logger from coconut.compiler.util import extract_line_num_from_comment - compiled = self.memoized_parse_xonsh(inp) - - original_lines = tuple(inp.splitlines()) - used_lines = set() - new_inp_lines = [] - last_ln = 1 - for compiled_line in compiled.splitlines(): - ln = extract_line_num_from_comment(compiled_line, default=last_ln + 1) - try: - line, _, _ = get_logical_line(original_lines, ln - 1) - except IndexError: - logger.log_exc() - line = original_lines[-1] - if line in used_lines: - line = "" - else: - used_lines.add(line) - new_inp_lines.append(line) - last_ln = ln - inp = "\n".join(new_inp_lines) + "\n" - return ctxtransformer.__class__.ctxvisit(ctxtransformer, node, inp, *args, **kwargs) + compiled, success = self.compile_code(inp) + + if success: + original_lines = tuple(inp.splitlines()) + used_lines = set() + new_inp_lines = [] + last_ln = 1 + for compiled_line in compiled.splitlines(): + ln = extract_line_num_from_comment(compiled_line, default=last_ln + 1) + try: + line, _, _ = get_logical_line(original_lines, ln - 1) + except IndexError: + logger.log_exc() + line = original_lines[-1] + if line in used_lines: + line = "" + else: + used_lines.add(line) + new_inp_lines.append(line) + last_ln = ln + inp = "\n".join(new_inp_lines) + + inp += "\n" + + return ctxtransformer.__class__.ctxvisit(ctxtransformer, node, inp, ctx, mode, *args, **kwargs) def __call__(self, xsh, **kwargs): # hide imports to avoid circular dependencies @@ -203,8 +212,8 @@ def __call__(self, xsh, **kwargs): def unload(self, xsh): if not self.loaded: # hide imports to avoid circular dependencies - from coconut.exceptions import CoconutException - raise CoconutException("attempting to unload Coconut xontrib but it was never loaded") + from coconut.terminal import logger + logger.warn("attempting to unload Coconut xontrib but it was never loaded") self.loaded = False diff --git a/coconut/requirements.py b/coconut/requirements.py index 04be698d7..6ead04b53 100644 --- a/coconut/requirements.py +++ b/coconut/requirements.py @@ -25,7 +25,6 @@ from coconut.constants import ( CPYTHON, PY34, - PY39, IPY, MYPY, XONSH, @@ -42,6 +41,7 @@ ver_str_to_tuple, ver_tuple_to_str, get_next_version, + assert_remove_prefix, ) # ----------------------------------------------------------------------------------------------------------------------- @@ -71,89 +71,122 @@ def get_base_req(req, include_extras=True): return req +def process_mark(mark): + """Get the check string and whether it currently applies for the given mark.""" + assert not mark.startswith("py2"), "confusing mark; should be changed: " + mark + if mark.startswith("py=="): + ver = assert_remove_prefix(mark, "py==") + if len(ver) == 1: + ver_tuple = (int(ver),) + else: + ver_tuple = (int(ver[0]), int(ver[1:])) + next_ver_tuple = get_next_version(ver_tuple) + check_str = ( + "python_version>='" + ver_tuple_to_str(ver_tuple) + "'" + + " and python_version<'" + ver_tuple_to_str(next_ver_tuple) + "'" + ) + holds_now = ( + sys.version_info >= ver_tuple + and sys.version_info < next_ver_tuple + ) + elif mark in ("py3", "py>=3"): + check_str = "python_version>='3'" + holds_now = not PY2 + elif mark == "py<3": + check_str = "python_version<'3'" + holds_now = PY2 + elif mark.startswith("py<"): + full_ver = assert_remove_prefix(mark, "py<") + main_ver, sub_ver = full_ver[0], full_ver[1:] + check_str = "python_version<'{main}.{sub}'".format(main=main_ver, sub=sub_ver) + holds_now = sys.version_info < (int(main_ver), int(sub_ver)) + elif mark.startswith("py") or mark.startswith("py>="): + full_ver = assert_remove_prefix(mark, "py") + if full_ver.startswith(">="): + full_ver = assert_remove_prefix(full_ver, ">=") + main_ver, sub_ver = full_ver[0], full_ver[1:] + check_str = "python_version>='{main}.{sub}'".format(main=main_ver, sub=sub_ver) + holds_now = sys.version_info >= (int(main_ver), int(sub_ver)) + elif mark == "cpy": + check_str = "platform_python_implementation=='CPython'" + holds_now = CPYTHON + elif mark == "windows": + check_str = "os_name=='nt'" + holds_now = WINDOWS + elif mark.startswith("mark"): + check_str = None + holds_now = True + else: + raise ValueError("unknown env marker " + repr(mark)) + return check_str, holds_now + + +def get_req_str(req): + """Get the str that properly versions the given req.""" + req_str = get_base_req(req) + ">=" + ver_tuple_to_str(min_versions[req]) + if req in max_versions: + max_ver = max_versions[req] + if max_ver is None: + max_ver = get_next_version(min_versions[req]) + if None in max_ver: + assert all(v is None for v in max_ver), "invalid max version " + repr(max_ver) + max_ver = get_next_version(min_versions[req], len(max_ver) - 1) + req_str += ",<" + ver_tuple_to_str(max_ver) + return req_str + + +def get_env_markers(req): + """Get the environment markers for the given req.""" + if isinstance(req, tuple): + return req[1].split(";") + else: + return () + + def get_reqs(which): """Gets requirements from all_reqs with versions.""" reqs = [] for req in all_reqs[which]: + req_str = get_req_str(req) use_req = True - req_str = get_base_req(req) + ">=" + ver_tuple_to_str(min_versions[req]) - if req in max_versions: - max_ver = max_versions[req] - if max_ver is None: - max_ver = get_next_version(min_versions[req]) - if None in max_ver: - assert all(v is None for v in max_ver), "invalid max version " + repr(max_ver) - max_ver = get_next_version(min_versions[req], len(max_ver) - 1) - req_str += ",<" + ver_tuple_to_str(max_ver) - env_marker = req[1] if isinstance(req, tuple) else None - if env_marker: - markers = [] - for mark in env_marker.split(";"): - if mark.startswith("py=="): - ver = mark[len("py=="):] - if len(ver) == 1: - ver_tuple = (int(ver),) - else: - ver_tuple = (int(ver[0]), int(ver[1:])) - next_ver_tuple = get_next_version(ver_tuple) - if supports_env_markers: - markers.append("python_version>='" + ver_tuple_to_str(ver_tuple) + "'") - markers.append("python_version<'" + ver_tuple_to_str(next_ver_tuple) + "'") - elif sys.version_info < ver_tuple or sys.version_info >= next_ver_tuple: - use_req = False - break - elif mark == "py2": - if supports_env_markers: - markers.append("python_version<'3'") - elif not PY2: - use_req = False - break - elif mark == "py3": - if supports_env_markers: - markers.append("python_version>='3'") - elif PY2: - use_req = False - break - elif mark.startswith("py3") or mark.startswith("py>=3"): - mark = mark[len("py"):] - if mark.startswith(">="): - mark = mark[len(">="):] - ver = mark[len("3"):] - if supports_env_markers: - markers.append("python_version>='3.{ver}'".format(ver=ver)) - elif sys.version_info < (3, ver): - use_req = False - break - elif mark.startswith("py<3"): - ver = mark[len("py<3"):] - if supports_env_markers: - markers.append("python_version<'3.{ver}'".format(ver=ver)) - elif sys.version_info >= (3, ver): - use_req = False - break - elif mark == "cpy": - if supports_env_markers: - markers.append("platform_python_implementation=='CPython'") - elif not CPYTHON: - use_req = False - break - elif mark == "windows": - if supports_env_markers: - markers.append("os_name=='nt'") - elif not WINDOWS: - use_req = False - break - elif mark.startswith("mark"): - pass # ignore - else: - raise ValueError("unknown env marker " + repr(mark)) - if markers: - req_str += ";" + " and ".join(markers) + markers = [] + for mark in get_env_markers(req): + check_str, holds_now = process_mark(mark) + if supports_env_markers: + if check_str is not None: + markers.append(check_str) + else: + if not holds_now: + use_req = False + break + if markers: + req_str += ";" + " and ".join(markers) if use_req: reqs.append(req_str) return reqs +def get_main_reqs(main_reqs_name): + """Get the main requirements and extras.""" + requirements = [] + extras = {} + if using_modern_setuptools: + for req in all_reqs[main_reqs_name]: + req_str = get_req_str(req) + markers = [] + for mark in get_env_markers(req): + check_str, _ = process_mark(mark) + if check_str is not None: + markers.append(check_str) + if markers: + extras.setdefault(":" + " and ".join(markers), []).append(req_str) + else: + requirements.append(req_str) + else: + requirements += get_reqs(main_reqs_name) + return requirements, extras + + def uniqueify(reqs): """Make a list of requirements unique.""" return list(set(reqs)) @@ -181,7 +214,7 @@ def everything_in(req_dict): # SETUP: # ----------------------------------------------------------------------------------------------------------------------- -requirements = get_reqs("main") +requirements, reqs_extras = get_main_reqs("main") extras = { "kernel": get_reqs("kernel"), @@ -218,6 +251,9 @@ def everything_in(req_dict): if not PY34: extras["dev"] = unique_wrt(extras["dev"], extras["mypy"]) +# has to come after dev so they don't get included in it +extras.update(reqs_extras) + if PURE_PYTHON: # override necessary for readthedocs requirements += get_reqs("purepython") @@ -232,29 +268,6 @@ def everything_in(req_dict): else: requirements += get_reqs("purepython") -if using_modern_setuptools: - # modern method - extras[":python_version<'2.7'"] = get_reqs("py26") - extras[":python_version>='2.7'"] = get_reqs("non-py26") - extras[":python_version<'3'"] = get_reqs("py2") - extras[":python_version>='3'"] = get_reqs("py3") - extras[":python_version<'3.9'"] = get_reqs("py<39") - extras[":python_version>='3.9'"] = get_reqs("py39") -else: - # old method - if PY26: - requirements += get_reqs("py26") - else: - requirements += get_reqs("non-py26") - if PY2: - requirements += get_reqs("py2") - else: - requirements += get_reqs("py3") - if PY39: - requirements += get_reqs("py39") - else: - requirements += get_reqs("py<39") - # ----------------------------------------------------------------------------------------------------------------------- # MAIN: # ----------------------------------------------------------------------------------------------------------------------- diff --git a/coconut/root.py b/coconut/root.py index 712f277b0..4b0454312 100644 --- a/coconut/root.py +++ b/coconut/root.py @@ -23,7 +23,7 @@ # VERSION: # ----------------------------------------------------------------------------------------------------------------------- -VERSION = "3.0.1" +VERSION = "3.0.2" VERSION_NAME = None # False for release, int >= 1 for develop DEVELOP = False diff --git a/coconut/tests/constants_test.py b/coconut/tests/constants_test.py index bb2d561c5..7c5186781 100644 --- a/coconut/tests/constants_test.py +++ b/coconut/tests/constants_test.py @@ -98,8 +98,8 @@ def test_imports(self): or PYPY and new_imp.startswith("tkinter") # don't test trollius on PyPy or PYPY and old_imp == "trollius" - # don't test typing_extensions on Python 2 - or PY2 and old_imp.startswith("typing_extensions") + # don't test typing_extensions, async_generator on Python 2 + or PY2 and old_imp.startswith(("typing_extensions", "async_generator")) ): pass elif sys.version_info >= ver_cutoff: diff --git a/coconut/tests/main_test.py b/coconut/tests/main_test.py index 444228b19..b5183d6fb 100644 --- a/coconut/tests/main_test.py +++ b/coconut/tests/main_test.py @@ -46,18 +46,22 @@ WINDOWS, PYPY, IPY, + XONSH, MYPY, PY35, PY36, PY38, + PY39, PY310, + supported_py2_vers, + supported_py3_vers, icoconut_default_kernel_names, icoconut_custom_kernel_name, mypy_err_infixes, get_bool_env_var, ) -from coconut.convenience import ( +from coconut.api import ( auto_compilation, setup, ) @@ -141,6 +145,12 @@ + "', '".join((icoconut_custom_kernel_name,) + icoconut_default_kernel_names) + "'" ) +always_sys_versions = ( + supported_py2_vers[-1], + supported_py3_vers[-2], + supported_py3_vers[-1], +) + # ----------------------------------------------------------------------------------------------------------------------- # UTILITIES: @@ -402,10 +412,10 @@ def using_dest(dest=dest): @contextmanager -def using_coconut(fresh_logger=True, fresh_convenience=False): - """Decorator for ensuring that coconut.terminal.logger and coconut.convenience.* are reset.""" +def using_coconut(fresh_logger=True, fresh_api=False): + """Decorator for ensuring that coconut.terminal.logger and coconut.api.* are reset.""" saved_logger = logger.copy() - if fresh_convenience: + if fresh_api: setup() auto_compilation(False) if fresh_logger: @@ -483,7 +493,7 @@ def comp_agnostic(args=[], **kwargs): comp(path="cocotest", folder="agnostic", args=args, **kwargs) -def comp_2(args=[], **kwargs): +def comp_2(args=[], always_sys=False, **kwargs): """Compiles target_2.""" # remove --mypy checking for target_2 to avoid numpy errors try: @@ -492,27 +502,27 @@ def comp_2(args=[], **kwargs): pass else: args = args[:mypy_ind] - comp(path="cocotest", folder="target_2", args=["--target", "2"] + args, **kwargs) + comp(path="cocotest", folder="target_2", args=["--target", "2" if not always_sys else "sys"] + args, **kwargs) -def comp_3(args=[], **kwargs): +def comp_3(args=[], always_sys=False, **kwargs): """Compiles target_3.""" - comp(path="cocotest", folder="target_3", args=["--target", "3"] + args, **kwargs) + comp(path="cocotest", folder="target_3", args=["--target", "3" if not always_sys else "sys"] + args, **kwargs) -def comp_35(args=[], **kwargs): +def comp_35(args=[], always_sys=False, **kwargs): """Compiles target_35.""" - comp(path="cocotest", folder="target_35", args=["--target", "35"] + args, **kwargs) + comp(path="cocotest", folder="target_35", args=["--target", "35" if not always_sys else "sys"] + args, **kwargs) -def comp_36(args=[], **kwargs): +def comp_36(args=[], always_sys=False, **kwargs): """Compiles target_36.""" - comp(path="cocotest", folder="target_36", args=["--target", "36"] + args, **kwargs) + comp(path="cocotest", folder="target_36", args=["--target", "36" if not always_sys else "sys"] + args, **kwargs) -def comp_38(args=[], **kwargs): +def comp_38(args=[], always_sys=False, **kwargs): """Compiles target_38.""" - comp(path="cocotest", folder="target_38", args=["--target", "38"] + args, **kwargs) + comp(path="cocotest", folder="target_38", args=["--target", "38" if not always_sys else "sys"] + args, **kwargs) def comp_sys(args=[], **kwargs): @@ -536,7 +546,7 @@ def run_extras(**kwargs): call_python([os.path.join(dest, "extras.py")], assert_output=True, check_errors=False, stderr_first=True, **kwargs) -def run(args=[], agnostic_target=None, use_run_arg=False, convert_to_import=False, **kwargs): +def run(args=[], agnostic_target=None, use_run_arg=False, convert_to_import=False, always_sys=False, **kwargs): """Compiles and runs tests.""" if agnostic_target is None: agnostic_args = args @@ -546,16 +556,19 @@ def run(args=[], agnostic_target=None, use_run_arg=False, convert_to_import=Fals with using_dest(): with (using_dest(additional_dest) if "--and" in args else noop_ctx()): + spec_kwargs = kwargs.copy() + spec_kwargs["always_sys"] = always_sys if PY2: - comp_2(args, **kwargs) + comp_2(args, **spec_kwargs) else: - comp_3(args, **kwargs) + comp_3(args, **spec_kwargs) if sys.version_info >= (3, 5): - comp_35(args, **kwargs) + comp_35(args, **spec_kwargs) if sys.version_info >= (3, 6): - comp_36(args, **kwargs) + comp_36(args, **spec_kwargs) if sys.version_info >= (3, 8): - comp_38(args, **kwargs) + comp_38(args, **spec_kwargs) + comp_agnostic(agnostic_args, **kwargs) comp_sys(args, **kwargs) comp_non_strict(args, **kwargs) @@ -675,11 +688,36 @@ def test_code(self): def test_target_3_snip(self): call(["coconut", "-t3", "-c", target_3_snip], assert_output=True) + if MYPY: + def test_universal_mypy_snip(self): + call( + ["coconut", "-c", mypy_snip, "--mypy"], + assert_output=mypy_snip_err_3, + check_errors=False, + check_mypy=False, + ) + + def test_sys_mypy_snip(self): + call( + ["coconut", "--target", "sys", "-c", mypy_snip, "--mypy"], + assert_output=mypy_snip_err_3, + check_errors=False, + check_mypy=False, + ) + + def test_no_wrap_mypy_snip(self): + call( + ["coconut", "--target", "sys", "--no-wrap", "-c", mypy_snip, "--mypy"], + assert_output=mypy_snip_err_3, + check_errors=False, + check_mypy=False, + ) + def test_pipe(self): call('echo ' + escape(coconut_snip) + "| coconut -s", shell=True, assert_output=True) - def test_convenience(self): - call_python(["-c", 'from coconut.convenience import parse; exec(parse("' + coconut_snip + '"))'], assert_output=True) + def test_api(self): + call_python(["-c", 'from coconut.api import parse; exec(parse("' + coconut_snip + '"))'], assert_output=True) def test_import_hook(self): with using_sys_path(src): @@ -708,9 +746,7 @@ def test_import_runnable(self): for _ in range(2): # make sure we can import it twice call_python([runnable_py, "--arg"], assert_output=True, convert_to_import=True) - # not py36 is only because newer Python versions require newer xonsh - # versions that aren't always installed by pip install coconut[tests] - if not WINDOWS and PY35 and not PY36: + if not WINDOWS and XONSH: def test_xontrib(self): p = spawn_cmd("xonsh") p.expect("$") @@ -718,8 +754,22 @@ def test_xontrib(self): p.expect("$") p.sendline("!(ls -la) |> bool") p.expect("True") + p.sendline('$ENV_VAR = "ABC"') + p.expect("$") + p.sendline('echo f"{$ENV_VAR}"; echo f"{$ENV_VAR}"') + p.expect("ABC") + p.expect("ABC") + if not PYPY or PY39: + if PY36: + p.sendline("echo 123;; 123") + p.expect("123;; 123") + p.sendline('execx("10 |> print")') + p.expect("subprocess mode") p.sendline("xontrib unload coconut") p.expect("$") + if (not PYPY or PY39) and PY36: + p.sendline("1 |> print") + p.expect("subprocess mode") p.sendeof() if p.isalive(): p.terminate() @@ -745,10 +795,12 @@ def test_kernel_installation(self): assert kernel in stdout if not WINDOWS and not PYPY: - def test_exit_jupyter(self): + def test_jupyter_console(self): p = spawn_cmd("coconut --jupyter console") p.expect("In", timeout=120) - p.sendline("exit()") + p.sendline("%load_ext coconut") + p.expect("In", timeout=120) + p.sendline("`exit`") p.expect("Shutting down kernel|shutting down") if p.isalive(): p.terminate() @@ -761,33 +813,13 @@ def test_normal(self): run() if MYPY: - def test_universal_mypy_snip(self): - call( - ["coconut", "-c", mypy_snip, "--mypy"], - assert_output=mypy_snip_err_3, - check_errors=False, - check_mypy=False, - ) - - def test_sys_mypy_snip(self): - call( - ["coconut", "--target", "sys", "-c", mypy_snip, "--mypy"], - assert_output=mypy_snip_err_3, - check_errors=False, - check_mypy=False, - ) - - def test_no_wrap_mypy_snip(self): - call( - ["coconut", "--target", "sys", "--no-wrap", "-c", mypy_snip, "--mypy"], - assert_output=mypy_snip_err_3, - check_errors=False, - check_mypy=False, - ) - def test_mypy_sys(self): run(["--mypy"] + mypy_args, agnostic_target="sys", expect_retcode=None, check_errors=False) # fails due to tutorial mypy errors + if sys.version_info[:2] in always_sys_versions: + def test_always_sys(self): + run(["--line-numbers"], agnostic_target="sys", always_sys=True) + # run fewer tests on Windows so appveyor doesn't time out if not WINDOWS: def test_line_numbers_keep_lines(self): @@ -845,24 +877,24 @@ def test_simple_minify(self): run_runnable(["-n", "--minify"]) -@add_test_func_names -class TestExternal(unittest.TestCase): - - if not PYPY or PY2: - def test_prelude(self): - with using_path(prelude): - comp_prelude() - if MYPY and PY38: - run_prelude() - - def test_bbopt(self): - with using_path(bbopt): - comp_bbopt() - if not PYPY and PY38 and not PY310: - install_bbopt() - - # more appveyor timeout prevention - if not WINDOWS: +# more appveyor timeout prevention +if not WINDOWS: + @add_test_func_names + class TestExternal(unittest.TestCase): + + if not PYPY or PY2: + def test_prelude(self): + with using_path(prelude): + comp_prelude() + if MYPY and PY38: + run_prelude() + + def test_bbopt(self): + with using_path(bbopt): + comp_bbopt() + if not PYPY and PY38 and not PY310: + install_bbopt() + def test_pyprover(self): with using_path(pyprover): comp_pyprover() diff --git a/coconut/tests/src/cocotest/agnostic/primary.coco b/coconut/tests/src/cocotest/agnostic/primary.coco index 453106920..7b7d3ef5b 100644 --- a/coconut/tests/src/cocotest/agnostic/primary.coco +++ b/coconut/tests/src/cocotest/agnostic/primary.coco @@ -1601,4 +1601,6 @@ def primary_test() -> bool: assert (...=really_long_var).really_long_var == 10 n = [0] assert n[0] == 0 + assert_raises(-> m{{1:2,2:3}}, TypeError) + assert_raises((def -> from typing import blah), ImportError) # NOQA return True diff --git a/coconut/tests/src/cocotest/agnostic/specific.coco b/coconut/tests/src/cocotest/agnostic/specific.coco index 9c936dddd..2cd9d3858 100644 --- a/coconut/tests/src/cocotest/agnostic/specific.coco +++ b/coconut/tests/src/cocotest/agnostic/specific.coco @@ -180,6 +180,7 @@ def py37_spec_test() -> bool: assert l == list(range(10)) class HasVarGen[*Ts] # type: ignore assert HasVarGen `issubclass` object + assert typing.Protocol.__module__ == "typing_extensions" return True diff --git a/coconut/tests/src/cocotest/agnostic/suite.coco b/coconut/tests/src/cocotest/agnostic/suite.coco index b542db14e..666fb773f 100644 --- a/coconut/tests/src/cocotest/agnostic/suite.coco +++ b/coconut/tests/src/cocotest/agnostic/suite.coco @@ -148,7 +148,7 @@ def suite_test() -> bool: assert one_to_five([1,2,3,4,5]) == [2,3,4] assert not one_to_five([0,1,2,3,4,5]) assert one_to_five([1,5]) == [] - assert -4 == neg_square_u(2) ≠ 4 ∧ 0 ≤ neg_square_u(0) ≤ 0 + assert -4 == neg_square_u(2) ≠ 4 ∩ 0 ≤ neg_square_u(0) ≤ 0 assert is_null(null1()) assert is_null(null2()) assert empty() |> depth_1 == 0 == empty() |> depth_2 @@ -219,6 +219,7 @@ def suite_test() -> bool: assert inh_a.inh_true4() is True assert inh_a.inh_true5() is True assert inh_A.inh_cls_true() is True + assert inh_inh_A().true() is False assert pt.__doc__ out0 = grid() |> grid_trim$(xmax=5, ymax=5) assert out0 == [ @@ -1050,6 +1051,8 @@ forward 2""") == 900 really_long_var = 10 assert ret_args_kwargs(...=really_long_var) == ((), {"really_long_var": 10}) == ret_args_kwargs$(...=really_long_var)() assert ret_args_kwargs(123, ...=really_long_var, abc="abc") == ((123,), {"really_long_var": 10, "abc": "abc"}) == ret_args_kwargs$(123, ...=really_long_var, abc="abc")() + assert "Coconut version of typing" in typing.__doc__ + numlist: NumList = [1, 2.3, 5] # must come at end assert fibs_calls[0] == 1 diff --git a/coconut/tests/src/cocotest/agnostic/util.coco b/coconut/tests/src/cocotest/agnostic/util.coco index 59b3ec93c..38cbadc26 100644 --- a/coconut/tests/src/cocotest/agnostic/util.coco +++ b/coconut/tests/src/cocotest/agnostic/util.coco @@ -22,7 +22,7 @@ class AccessCounter(): self.counts[attr] += 1 return super(AccessCounter, self).__getattribute__(attr) -def assert_raises(c, exc=Exception): +def assert_raises(c, exc): """Test whether callable c raises an exception of type exc.""" try: c() @@ -231,9 +231,8 @@ addpattern def x! if x = False # type: ignore addpattern def x! = True # type: ignore # Type aliases: +import typing if sys.version_info >= (3, 5) or TYPE_CHECKING: - import typing - type list_or_tuple = list | tuple type func_to_int = -> int @@ -244,6 +243,8 @@ if sys.version_info >= (3, 5) or TYPE_CHECKING: type TextMap[T <: typing.Text, U] = typing.Mapping[T, U] + type NumList[T : (int, float)] = typing.List[T] + class HasT: T = 1 @@ -298,7 +299,7 @@ def qsort4(l: int[]) -> int[]: return None # type: ignore def qsort5(l: int$[]) -> int$[]: """Iterator Match Quick Sort.""" - match (head,) :: tail in l: # type: ignore + match (head,) :: tail in l: tail, tail_ = tee(tail) return (qsort5((x for x in tail if x <= head)) :: (head,) # The pivot is a tuple @@ -307,8 +308,8 @@ def qsort5(l: int$[]) -> int$[]: else: return iter(()) def qsort6(l: int$[]) -> int$[]: - match [head] :: tail in l: # type: ignore - tail = reiterable(tail) # type: ignore + match [head] :: tail in l: + tail = reiterable(tail) yield from ( qsort6(x for x in tail if x <= head) :: (head,) @@ -620,11 +621,11 @@ def factorial5(value): return None raise TypeError() -match def fact(n) = fact(n, 1) # type: ignore +match def fact(n) = fact(n, 1) match addpattern def fact(0, acc) = acc # type: ignore addpattern match def fact(n, acc) = fact(n-1, acc*n) # type: ignore -addpattern def factorial(0, acc=1) = acc # type: ignore +addpattern def factorial(0, acc=1) = acc addpattern def factorial(int() as n, acc=1 if n > 0) = # type: ignore """this is a docstring""" factorial(n-1, acc*n) @@ -864,6 +865,10 @@ class clsC: class clsD: d = 4 +class inh_inh_A(inh_A): + @override + def true(self) = False + class MyExc(Exception): def __init__(self, m): super().__init__(m) diff --git a/coconut/tests/src/cocotest/target_35/py35_test.coco b/coconut/tests/src/cocotest/target_35/py35_test.coco index 892b98829..baca2a698 100644 --- a/coconut/tests/src/cocotest/target_35/py35_test.coco +++ b/coconut/tests/src/cocotest/target_35/py35_test.coco @@ -1,7 +1,99 @@ +import asyncio, typing +from contextlib import aclosing + + def py35_test() -> bool: """Performs Python-3.5-specific tests.""" assert .attr |> repr == "operator.attrgetter('attr')" assert .method(1) |> repr == "operator.methodcaller('method', 1)" assert pow$(1) |> repr == "functools.partial(, 1)" assert .[1] |> repr == "operator.itemgetter(1)" + + loop = asyncio.new_event_loop() + + async def ayield(x) = x + :async def arange(n): + for i in range(n): + yield :await ayield(i) + async def afor_test(): + # match syntax 1 + got = [] + async for int(i) in arange(5): + got.append(i) + assert got == range(5) |> list + + # match syntax 2 + got = [] + async match for int(i) in arange(5): + got.append(i) + assert got == range(5) |> list + + # match syntax 3 + got = [] + match async for int(i) in arange(5): + got.append(i) + assert got == range(5) |> list + + # with for non-match + got = [] + async with for i in aclosing(arange(5)): + got.append(i) + assert got == range(5) |> list + + # with for match syntax 1 + got = [] + async with for int(i) in aclosing(arange(5)): + got.append(i) + assert got == range(5) |> list + + # with for match syntax 2 + got = [] + async match with for int(i) in aclosing(arange(5)): + got.append(i) + assert got == range(5) |> list + + # with for match syntax 3 + got = [] + match async with for int(i) in aclosing(arange(5)): + got.append(i) + assert got == range(5) |> list + + # with for match syntax 4 + got = [] + async with match for int(i) in aclosing(arange(5)): + got.append(i) + assert got == range(5) |> list + + return True + loop.run_until_complete(afor_test()) + + async yield def toa(it): + for x in it: + yield x + match yield async def arange_(int(n)): + for x in range(n): + yield x + async def aconsume(ait): + async for _ in ait: + pass + l: typing.List[int] = [] + async def aiter_test(): + range(10) |> toa |> fmap$(l.append) |> aconsume |> await + arange_(10) |> fmap$(l.append) |> aconsume |> await + loop.run_until_complete(aiter_test()) + assert l == list(range(10)) + list(range(10)) + + async def arec(x) = await arec(x-1) if x else x + async def atest(): + assert ( + 10 + |> arec + |> await + |> (.+10) + |> arec + |> await + ) == 0 + loop.run_until_complete(atest()) + + loop.close() return True diff --git a/coconut/tests/src/cocotest/target_36/py36_test.coco b/coconut/tests/src/cocotest/target_36/py36_test.coco index 43e420fa0..c7645db71 100644 --- a/coconut/tests/src/cocotest/target_36/py36_test.coco +++ b/coconut/tests/src/cocotest/target_36/py36_test.coco @@ -1,4 +1,5 @@ -import asyncio, typing +import asyncio + def py36_test() -> bool: """Performs Python-3.6-specific tests.""" @@ -7,49 +8,6 @@ def py36_test() -> bool: loop = asyncio.new_event_loop() - async def ayield(x) = x - :async def arange(n): - for i in range(n): - yield :await ayield(i) - async def afor_test(): - # syntax 1 - got = [] - async for int(i) in arange(5): - got.append(i) - assert got == range(5) |> list - - # syntax 2 - got = [] - async match for int(i) in arange(5): - got.append(i) - assert got == range(5) |> list - - # syntax 3 - got = [] - match async for int(i) in arange(5): - got.append(i) - assert got == range(5) |> list - - return True - loop.run_until_complete(afor_test()) - - async yield def toa(it): - for x in it: - yield x - match yield async def arange_(int(n)): - for x in range(n): - yield x - async def aconsume(ait): - async for _ in ait: - pass - l: typing.List[int] = [] - async def aiter_test(): - range(10) |> toa |> fmap$(l.append) |> aconsume |> await - arange_(10) |> fmap$(l.append) |> aconsume |> await - loop.run_until_complete(aiter_test()) - assert l == list(range(10)) + list(range(10)) - - async def arec(x) = await arec(x-1) if x else x async def outer_func(): funcs = [] for x in range(5): @@ -57,14 +15,6 @@ def py36_test() -> bool: return funcs async def await_all(xs) = [await x for x in xs] async def atest(): - assert ( - 10 - |> arec - |> await - |> (.+10) - |> arec - |> await - ) == 0 assert ( outer_func() |> await @@ -75,5 +25,4 @@ def py36_test() -> bool: loop.run_until_complete(atest()) loop.close() - return True diff --git a/coconut/tests/src/extras.coco b/coconut/tests/src/extras.coco index 6411ff8a2..fb46c2e99 100644 --- a/coconut/tests/src/extras.coco +++ b/coconut/tests/src/extras.coco @@ -100,7 +100,7 @@ def test_setup_none() -> bool: assert version("tag") assert version("-v") assert_raises(-> version("other"), CoconutException) - assert_raises(def -> raise CoconutException("derp").syntax_err(), SyntaxError) + assert_raises((def -> raise CoconutException("derp").syntax_err()), SyntaxError) assert coconut_eval("x -> x + 1")(2) == 3 assert coconut_eval("addpattern") @@ -281,14 +281,14 @@ def test_convenience() -> bool: assert parse("abc", "lenient") == "abc #1: abc" setup() - assert "Deprecated built-in 'prepattern' disabled by --strict compilation" not in parse("\n", mode="file") - assert "Deprecated built-in 'datamaker' disabled by --strict compilation" not in parse("\n", mode="file") - assert "Deprecated built-in 'of' disabled by --strict compilation" not in parse("\n", mode="file") + assert "Deprecated Coconut built-in 'prepattern' disabled by --strict compilation" not in parse("\n", mode="file") + assert "Deprecated Coconut built-in 'datamaker' disabled by --strict compilation" not in parse("\n", mode="file") + assert "Deprecated Coconut built-in 'of' disabled by --strict compilation" not in parse("\n", mode="file") setup(strict=True) - assert "Deprecated built-in 'prepattern' disabled by --strict compilation" in parse("\n", mode="file") - assert "Deprecated built-in 'datamaker' disabled by --strict compilation" in parse("\n", mode="file") - assert "Deprecated built-in 'of' disabled by --strict compilation" in parse("\n", mode="file") + assert "Deprecated Coconut built-in 'prepattern' disabled by --strict compilation" in parse("\n", mode="file") + assert "Deprecated Coconut built-in 'datamaker' disabled by --strict compilation" in parse("\n", mode="file") + assert "Deprecated Coconut built-in 'of' disabled by --strict compilation" in parse("\n", mode="file") assert_raises(-> parse("def f(x):\n \t pass"), CoconutStyleError) assert_raises(-> parse("lambda x: x"), CoconutStyleError) @@ -316,6 +316,7 @@ else: match x: pass"""), CoconutStyleError, err_has="case x:") assert_raises(-> parse("obj."), CoconutStyleError, err_has="getattr") + assert_raises(-> parse("def x -> pass, 1"), CoconutStyleError, err_has="statement lambda") setup(strict=True, target="sys") assert_raises(-> parse("await f x"), CoconutParseError, err_has='invalid use of the keyword "await"') @@ -347,12 +348,17 @@ async def async_map_test() = setup(target="3.2") assert parse(gen_func_def, mode="lenient") not in gen_func_def_outs - setup(target="3.5") + setup(target="3.4") assert_raises(-> parse("async def f(): yield 1"), CoconutTargetError) + setup(target="3.5") + assert parse("async def f(): yield 1") + assert_raises(-> parse("""async def agen(): + yield from range(5)"""), CoconutSyntaxError, err_has="async generator") + setup(target="3.6") assert parse("def f(*, x=None) = x") - assert parse("async def f(): yield 1") + assert "@" not in parse("async def f(x): yield x") setup(target="3.8") assert parse("(a := b)") @@ -363,6 +369,12 @@ async def async_map_test() = setup(target="3.11") assert parse("a[x, *y]") + setup(target="3.12") + assert parse("type Num = int | float").strip().endswith(""" +# Compiled Coconut: ----------------------------------------------------------- + +type Num = int | float""".strip()) + setup(minify=True) assert parse("123 # derp", "lenient") == "123# derp" diff --git a/coconut/util.py b/coconut/util.py index 98489f5b4..1b1b21a62 100644 --- a/coconut/util.py +++ b/coconut/util.py @@ -25,7 +25,6 @@ import json import traceback import time -import ast from zlib import crc32 from warnings import warn from types import MethodType @@ -47,7 +46,6 @@ icoconut_custom_kernel_install_loc, icoconut_custom_kernel_file_loc, WINDOWS, - reserved_prefix, non_syntactic_newline, ) @@ -242,6 +240,12 @@ def __missing__(self, key): return self[key] +def assert_remove_prefix(inputstr, prefix): + """Remove prefix asserting that inputstr starts with it.""" + assert inputstr.startswith(prefix), inputstr + return inputstr[len(prefix):] + + # ----------------------------------------------------------------------------------------------------------------------- # VERSIONING: # ----------------------------------------------------------------------------------------------------------------------- @@ -360,37 +364,3 @@ def make_custom_kernel(executable=None): raw_json = json.dumps(kernel_dict, indent=1) kernel_file.write(raw_json.encode(encoding=default_encoding)) return icoconut_custom_kernel_dir - - -# ----------------------------------------------------------------------------------------------------------------------- -# PYTEST: -# ----------------------------------------------------------------------------------------------------------------------- - - -class FixPytestNames(ast.NodeTransformer): - """Renames invalid names added by pytest assert rewriting.""" - - def fix_name(self, name): - """Make the given pytest name a valid but non-colliding identifier.""" - return name.replace("@", reserved_prefix + "_pytest_") - - def visit_Name(self, node): - """Special method to visit ast.Names.""" - node.id = self.fix_name(node.id) - return node - - def visit_alias(self, node): - """Special method to visit ast.aliases.""" - node.asname = self.fix_name(node.asname) - return node - - -def pytest_rewrite_asserts(code, module_name=reserved_prefix + "_pytest_module"): - """Uses pytest to rewrite the assert statements in the given code.""" - from _pytest.assertion.rewrite import rewrite_asserts # hidden since it's not always available - - module_name = module_name.encode("utf-8") - tree = ast.parse(code) - rewrite_asserts(tree, module_name) - fixed_tree = ast.fix_missing_locations(FixPytestNames().visit(tree)) - return ast.unparse(fixed_tree)