This commit is contained in:
Thomas Hodnemyr 2022-09-17 14:28:51 +02:00
parent 1651903879
commit 9cf4eeedad
1465 changed files with 214240 additions and 29493 deletions

13
cogs/setup.py Normal file
View File

@ -0,0 +1,13 @@
from disnake.ext import commands
import disnake
class Setup(commands.Cog):
typelist = ["liegelord","vm_tracker","diplomacy"]
def __init__(self,bot) -> None:
self.bot = bot
@commands.slash_command(description='Define an advert channel',default_member_permissions=disnake.Permissions(manage_guild=True))
async def create_advert(self,
inter:disnake.ApplicationCommandInteraction,
advert_type:str = commands.Param(description='What type=',choices=typelist),
target: disnake.abc.GuildChannel = commands.Param(description='Channel override, optional')):

View File

@ -0,0 +1,161 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
#if PY_VERSION_HEX >= 0x30B00A6
# define GREENLET_PY311 1
/* _PyInterpreterFrame moved to the internal C API in Python 3.11 */
# include <internal/pycore_frame.h>
#else
# define GREENLET_PY311 0
# define _PyCFrame CFrame
#endif
typedef struct _greenlet {
PyObject_HEAD
char* stack_start;
char* stack_stop;
char* stack_copy;
intptr_t stack_saved;
struct _greenlet* stack_prev;
struct _greenlet* parent;
PyObject* run_info;
struct _frame* top_frame;
int recursion_depth;
#if GREENLET_PY311
_PyInterpreterFrame *current_frame;
_PyStackChunk *datastack_chunk;
PyObject **datastack_top;
PyObject **datastack_limit;
#endif
PyObject* weakreflist;
#if PY_VERSION_HEX >= 0x030700A3
_PyErr_StackItem* exc_info;
_PyErr_StackItem exc_state;
#else
PyObject* exc_type;
PyObject* exc_value;
PyObject* exc_traceback;
#endif
PyObject* dict;
#if PY_VERSION_HEX >= 0x030700A3
PyObject* context;
#endif
#if PY_VERSION_HEX >= 0x30A00B1
_PyCFrame* cframe;
#endif
} PyGreenlet;
#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type)
#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1)
#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL)
#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL)
#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent)
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 8
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

View File

@ -0,0 +1,19 @@
Copyright 2005-2022 SQLAlchemy authors and contributors <see AUTHORS file>.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,237 @@
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 1.4.41
Summary: Database Abstraction Library
Home-page: https://www.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: importlib-metadata ; python_version < "3.8"
Requires-Dist: greenlet (!=0.4.17) ; python_version >= "3" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))))
Provides-Extra: aiomysql
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiomysql'
Requires-Dist: aiomysql ; (python_version >= "3") and extra == 'aiomysql'
Provides-Extra: aiosqlite
Requires-Dist: typing-extensions (!=3.10.0.1) ; extra == 'aiosqlite'
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiosqlite'
Requires-Dist: aiosqlite ; (python_version >= "3") and extra == 'aiosqlite'
Provides-Extra: asyncio
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'asyncio'
Provides-Extra: asyncmy
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'asyncmy'
Requires-Dist: asyncmy (!=0.2.4,>=0.2.3) ; (python_version >= "3") and extra == 'asyncmy'
Provides-Extra: mariadb_connector
Requires-Dist: mariadb (!=1.1.2,>=1.0.1) ; (python_version >= "3") and extra == 'mariadb_connector'
Provides-Extra: mssql
Requires-Dist: pyodbc ; extra == 'mssql'
Provides-Extra: mssql_pymssql
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
Provides-Extra: mssql_pyodbc
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
Provides-Extra: mypy
Requires-Dist: sqlalchemy2-stubs ; extra == 'mypy'
Requires-Dist: mypy (>=0.910) ; (python_version >= "3") and extra == 'mypy'
Provides-Extra: mysql
Requires-Dist: mysqlclient (<2,>=1.4.0) ; (python_version < "3") and extra == 'mysql'
Requires-Dist: mysqlclient (>=1.4.0) ; (python_version >= "3") and extra == 'mysql'
Provides-Extra: mysql_connector
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
Provides-Extra: oracle
Requires-Dist: cx-oracle (<8,>=7) ; (python_version < "3") and extra == 'oracle'
Requires-Dist: cx-oracle (>=7) ; (python_version >= "3") and extra == 'oracle'
Provides-Extra: postgresql
Requires-Dist: psycopg2 (>=2.7) ; extra == 'postgresql'
Provides-Extra: postgresql_asyncpg
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'postgresql_asyncpg'
Requires-Dist: asyncpg ; (python_version >= "3") and extra == 'postgresql_asyncpg'
Provides-Extra: postgresql_pg8000
Requires-Dist: pg8000 (!=1.29.0,>=1.16.6) ; extra == 'postgresql_pg8000'
Provides-Extra: postgresql_psycopg2binary
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
Provides-Extra: postgresql_psycopg2cffi
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
Provides-Extra: pymysql
Requires-Dist: pymysql (<1) ; (python_version < "3") and extra == 'pymysql'
Requires-Dist: pymysql ; (python_version >= "3") and extra == 'pymysql'
Provides-Extra: sqlcipher
Requires-Dist: sqlcipher3-binary ; (python_version >= "3") and extra == 'sqlcipher'
SQLAlchemy
==========
|PyPI| |Python| |Downloads|
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Python Version
.. |Downloads| image:: https://img.shields.io/pypi/dm/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Downloads
The Python SQL Toolkit and Object Relational Mapper
Introduction
-------------
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
that gives application developers the full power and
flexibility of SQL. SQLAlchemy provides a full suite
of well known enterprise-level persistence patterns,
designed for efficient and high-performing database
access, adapted into a simple and Pythonic domain
language.
Major SQLAlchemy features include:
* An industrial strength ORM, built
from the core on the identity map, unit of work,
and data mapper patterns. These patterns
allow transparent persistence of objects
using a declarative configuration system.
Domain models
can be constructed and manipulated naturally,
and changes are synchronized with the
current transaction automatically.
* A relationally-oriented query system, exposing
the full range of SQL's capabilities
explicitly, including joins, subqueries,
correlation, and most everything else,
in terms of the object model.
Writing queries with the ORM uses the same
techniques of relational composition you use
when writing SQL. While you can drop into
literal SQL at any time, it's virtually never
needed.
* A comprehensive and flexible system
of eager loading for related collections and objects.
Collections are cached within a session,
and can be loaded on individual access, all
at once using joins, or by query per collection
across the full result set.
* A Core SQL construction system and DBAPI
interaction layer. The SQLAlchemy Core is
separate from the ORM and is a full database
abstraction layer in its own right, and includes
an extensible Python-based SQL expression
language, schema metadata, connection pooling,
type coercion, and custom types.
* All primary and foreign key constraints are
assumed to be composite and natural. Surrogate
integer primary keys are of course still the
norm, but SQLAlchemy never assumes or hardcodes
to this model.
* Database introspection and generation. Database
schemas can be "reflected" in one step into
Python structures representing database metadata;
those same structures can then generate
CREATE statements right back out - all within
the Core, independent of the ORM.
SQLAlchemy's philosophy:
* SQL databases behave less and less like object
collections the more size and performance start to
matter; object collections behave less and less like
tables and rows the more abstraction starts to matter.
SQLAlchemy aims to accommodate both of these
principles.
* An ORM doesn't need to hide the "R". A relational
database provides rich, set-based functionality
that should be fully exposed. SQLAlchemy's
ORM provides an open-ended set of patterns
that allow a developer to construct a custom
mediation layer between a domain model and
a relational schema, turning the so-called
"object relational impedance" issue into
a distant memory.
* The developer, in all cases, makes all decisions
regarding the design, structure, and naming conventions
of both the object model as well as the relational
schema. SQLAlchemy only provides the means
to automate the execution of these decisions.
* With SQLAlchemy, there's no such thing as
"the ORM generated a bad query" - you
retain full control over the structure of
queries, including how joins are organized,
how subqueries and correlation is used, what
columns are requested. Everything SQLAlchemy
does is ultimately the result of a developer-
initiated decision.
* Don't use an ORM if the problem doesn't need one.
SQLAlchemy consists of a Core and separate ORM
component. The Core offers a full SQL expression
language that allows Pythonic construction
of SQL constructs that render directly to SQL
strings for a target database, returning
result sets that are essentially enhanced DBAPI
cursors.
* Transactions should be the norm. With SQLAlchemy's
ORM, nothing goes to permanent storage until
commit() is called. SQLAlchemy encourages applications
to create a consistent means of delineating
the start and end of a series of operations.
* Never render a literal value in a SQL statement.
Bound parameters are used to the greatest degree
possible, allowing query optimizers to cache
query plans effectively and making SQL injection
attacks a non-issue.
Documentation
-------------
Latest documentation is at:
https://www.sqlalchemy.org/docs/
Installation / Requirements
---------------------------
Full documentation for installation is at
`Installation <https://www.sqlalchemy.org/docs/intro.html#installation>`_.
Getting Help / Development / Bug reporting
------------------------------------------
Please refer to the `SQLAlchemy Community Guide <https://www.sqlalchemy.org/support.html>`_.
Code of Conduct
---------------
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
constructive communication between users and developers.
Please see our current Code of Conduct at
`Code of Conduct <https://www.sqlalchemy.org/codeofconduct.html>`_.
License
-------
SQLAlchemy is distributed under the `MIT license
<https://www.opensource.org/licenses/mit-license.php>`_.

View File

@ -0,0 +1,486 @@
SQLAlchemy-1.4.41.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
SQLAlchemy-1.4.41.dist-info/LICENSE,sha256=dP1v9f1RkXCFEYZD8C5ZwxbqNkRvK9xgAtOyqk0OdxU,1119
SQLAlchemy-1.4.41.dist-info/METADATA,sha256=gNJuv2R12HSAFjkjYhxqbhL9_7FmUvAU6ra5ijFGCKM,9972
SQLAlchemy-1.4.41.dist-info/RECORD,,
SQLAlchemy-1.4.41.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
SQLAlchemy-1.4.41.dist-info/WHEEL,sha256=W26pYN7HLsBT1jrDSL9udgf_mdNKJmYmL23sIP-FcgM,102
SQLAlchemy-1.4.41.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
sqlalchemy/__init__.py,sha256=5BxGQv0nxzcyVGn21eDrHAB_mudXHL1gFbp-8Bx6NvE,4272
sqlalchemy/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/__pycache__/events.cpython-310.pyc,,
sqlalchemy/__pycache__/exc.cpython-310.pyc,,
sqlalchemy/__pycache__/inspection.cpython-310.pyc,,
sqlalchemy/__pycache__/log.cpython-310.pyc,,
sqlalchemy/__pycache__/processors.cpython-310.pyc,,
sqlalchemy/__pycache__/schema.cpython-310.pyc,,
sqlalchemy/__pycache__/types.cpython-310.pyc,,
sqlalchemy/cimmutabledict.cp310-win_amd64.pyd,sha256=7ONb9bFWiFZ7lKfQkHuDP_SaSxk4SyOBjlm3OOWs7Yw,14848
sqlalchemy/connectors/__init__.py,sha256=2h09OYe_UZoXRSQi_KmHawkWDOuzBZj2g5ABtmcBKTo,289
sqlalchemy/connectors/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/connectors/__pycache__/mxodbc.cpython-310.pyc,,
sqlalchemy/connectors/__pycache__/pyodbc.cpython-310.pyc,,
sqlalchemy/connectors/mxodbc.py,sha256=-zj3Jg0Py0Uxlh_HA7KrPrbrpbfW1E_17HcLoF4X3qk,5950
sqlalchemy/connectors/pyodbc.py,sha256=b_T2xsrPXfW77LHPYKz1zYc3pyFnYH8_5V4IZLLAw9Y,7048
sqlalchemy/cprocessors.cp310-win_amd64.pyd,sha256=0H1uba0D64PbIiS3GaMxmCRIy8feGL5eNpEgGXwvg7w,16896
sqlalchemy/cresultproxy.cp310-win_amd64.pyd,sha256=qfErQuv_BRyF6Utqk2ZZsumVbRr5JFzserPNdvoQHfY,20992
sqlalchemy/databases/__init__.py,sha256=Gb2NTUKDtuIKhiegB8byTdZRn4lUbbRqXjmKT2EgVZQ,1048
sqlalchemy/databases/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/__init__.py,sha256=SBefwy167apf6mMUy2OU9f6naeWdrAqBoyXSNaTGMB0,2157
sqlalchemy/dialects/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/firebird/__init__.py,sha256=WjFeOMCyNJpexU5oMcPS-Wez2mPIGtXsO2EUqKhaKAM,1194
sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/firebird/__pycache__/base.cpython-310.pyc,,
sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-310.pyc,,
sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-310.pyc,,
sqlalchemy/dialects/firebird/base.py,sha256=Wxhys-aoUSIqJrmaA-f1QNk9XpkayJlhfgDBOolJ4jg,32160
sqlalchemy/dialects/firebird/fdb.py,sha256=DdO9GQE2mHDUrA1yQSvMBBbzN3ERPRBEj5ZmgBu7xmA,4228
sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=SkZwqKhxajeTXychIN7DUKtCGpso_kRnk7T0pEeLc8c,6681
sqlalchemy/dialects/mssql/__init__.py,sha256=vs9yfSni1Gz3Nz-k-8wfPSXBRo1LVLJMqEAtYd4CxAs,1873
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/mssql/__pycache__/base.cpython-310.pyc,,
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-310.pyc,,
sqlalchemy/dialects/mssql/__pycache__/json.cpython-310.pyc,,
sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-310.pyc,,
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-310.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-310.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-310.pyc,,
sqlalchemy/dialects/mssql/base.py,sha256=LPxNgmfw4pCpouIKEDJ550u3E1I6GkcoFG49kRGK8-g,120024
sqlalchemy/dialects/mssql/information_schema.py,sha256=gT8l6R_uPdmflprSiGdiZWq00i6TKC5_qfSz9cEdEiA,7816
sqlalchemy/dialects/mssql/json.py,sha256=bAi9z-htrC6adJUiBRetVj3RwtJ2GNN5I_KGeHFL8_s,4683
sqlalchemy/dialects/mssql/mxodbc.py,sha256=Wga13M6o1tpQH9axMKN6AL0JlA6xokoF9pIaSlb2HxQ,4958
sqlalchemy/dialects/mssql/provision.py,sha256=oTGw0l5RNcXIWkA0s2erVgk2XKiNw26IRzZE84tN2uE,4371
sqlalchemy/dialects/mssql/pymssql.py,sha256=_4_BDAC0Stw_e7tCsTs5znUXgjoezTsYS_3_A-GCTnU,3983
sqlalchemy/dialects/mssql/pyodbc.py,sha256=b_Kl7xnr9Q38cUJ8g4-caQtpXvhodF1YxLstcGtxEZo,25105
sqlalchemy/dialects/mysql/__init__.py,sha256=TjqqUAYi-dPaWLSZppFvFAw9fZcDqjZ5SSyE0Za3fko,2293
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/base.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/expression.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/json.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-310.pyc,,
sqlalchemy/dialects/mysql/__pycache__/types.cpython-310.pyc,,
sqlalchemy/dialects/mysql/aiomysql.py,sha256=qhZd-KR3xhGyLqiN4NjdDDb_VsBYnUWv0siYO7PrVZE,9926
sqlalchemy/dialects/mysql/asyncmy.py,sha256=szrjefzFuWXB8olZ1XO7x5mNfWK9apFaxcuwUJy51zQ,10213
sqlalchemy/dialects/mysql/base.py,sha256=WFocSULwwuyEJnYH5P15X0vrxwzsQe9VYfUv5MvgaIY,118534
sqlalchemy/dialects/mysql/cymysql.py,sha256=723TrwHA1fjSqAKSAuBnWyBY_K0Hr8POHq6xYbZey9I,2353
sqlalchemy/dialects/mysql/dml.py,sha256=d4dcIINNagmxaB-4APUKefPzq_rGFjav77G-NhUAP-8,6401
sqlalchemy/dialects/mysql/enumerated.py,sha256=EMZeA_8UEfGd0jlwSY21ajhzBqe9xMYofaEqFTyQOds,9627
sqlalchemy/dialects/mysql/expression.py,sha256=opg17QfNs805tLjcxdTLs_nGthpL-vSEiXl25iAPOO4,3871
sqlalchemy/dialects/mysql/json.py,sha256=OobtcQPNOcLwyQ-ydC3FOtlXr1Q2PhSwZrnmZP40opA,2397
sqlalchemy/dialects/mysql/mariadb.py,sha256=4NzNKjpGudJJLQQIYg0oYhlx4WKEPuncCPSTsfV6t6g,610
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=KcAu1GOwBa50X7pWh96dVUBkdFzUGauRC2X68KDjrko,7803
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=qB5mrfTA1GOEKPoLM2Bqi0FyfftQQ4gv0-CKB27IdCk,7930
sqlalchemy/dialects/mysql/mysqldb.py,sha256=wLhLazw4F7L_9OVoI6TLJcoYIZsyMzBbLafr1KIsaGg,10768
sqlalchemy/dialects/mysql/oursql.py,sha256=WY1sEFgn6NXnnxFCWtAaOrMWU-gVavjrM7YQS1y2vqA,8796
sqlalchemy/dialects/mysql/provision.py,sha256=-gCG7MwEMtztSmNU107Q563t-nN-oqTVkCiay-HRRQI,2727
sqlalchemy/dialects/mysql/pymysql.py,sha256=iXlxFuBQyCiV2EwbrwDImDw2al_fyFuYSgI9xXgo7ns,2868
sqlalchemy/dialects/mysql/pyodbc.py,sha256=cTkEvvMtTi8r2DblPDuaMRrolsJlFO5Wq6Mr7dk-94s,4426
sqlalchemy/dialects/mysql/reflection.py,sha256=oiWv6U-Us5DIbD0bH7zuuMOu9hi7fXPQLUbTKieOxsE,19111
sqlalchemy/dialects/mysql/reserved_words.py,sha256=0EkRPqXXQ8elC1aFeYSUHIbsk5pnBXynTiKlg428ixg,9668
sqlalchemy/dialects/mysql/types.py,sha256=hIOsOSHwxQ8nNbn8ZxWtEjPtoUb1WRTbN9x4v5apaJ8,25362
sqlalchemy/dialects/oracle/__init__.py,sha256=ZRvKUEwMK3VYoZKJGOEQWKuUVtVNCzfwyc4nB6iMzfA,1287
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/oracle/__pycache__/base.cpython-310.pyc,,
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-310.pyc,,
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-310.pyc,,
sqlalchemy/dialects/oracle/base.py,sha256=3L7YWkTKK6hU5KK2DBVNL9HPcR_q_dGIdHsC_PivAro,90085
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=mX9VtepXIGMcS0egCuQS7QYxYRxHS4VsEwBTLCbd3wM,54626
sqlalchemy/dialects/oracle/provision.py,sha256=SmVRWQL5WhTMhEyesNt_1CyROyQAKCoZz7-2IAKTb3I,5966
sqlalchemy/dialects/postgresql/__init__.py,sha256=ThqXwUhiylsdcV65hgldUNdHeyFQjWWq9XX_AtVsC8A,2626
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-310.pyc,,
sqlalchemy/dialects/postgresql/array.py,sha256=E1BWXSNbh2VNUuFmUP_zYhelrYhPGjP7joCMTZDF8GQ,14635
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=pvHAWjtjiWKG6AevBc5A5_sV8-jaCiiva8ahghgk3xk,36564
sqlalchemy/dialects/postgresql/base.py,sha256=9cuOJwn8rC5t_XfpgOtJi5n9L5ILAoIXUJ6R-0xSjPE,163752
sqlalchemy/dialects/postgresql/dml.py,sha256=7HGPHwfpTsAxTryQKjI8VLyQTRMTIfFsVduAlI7vpOk,9856
sqlalchemy/dialects/postgresql/ext.py,sha256=ZbiRJpnGua9_33cMRILHV_L2EwdeZepqYU0FiYRReDg,8716
sqlalchemy/dialects/postgresql/hstore.py,sha256=CPDPzxbr9T83kOVnqzAVC_1kmqcQpHvUK27jlUm_G7E,13332
sqlalchemy/dialects/postgresql/json.py,sha256=Z-V6h8Js_0rqLDNgmlTaEbimA126LyKTZ5ynprGLT78,10883
sqlalchemy/dialects/postgresql/pg8000.py,sha256=mxnnaOAjcwk4XLIQznXwopCJ2Mq4OhrsrhKzal9mWh8,17638
sqlalchemy/dialects/postgresql/provision.py,sha256=EI5cIapBexG9QGJSAwat98fGkaHuz0XC9JoQV9nN58w,4443
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=PqPP8zr3wATpOaa9g21hHTZkV9M9M96otqlzpoouxhs,41428
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=MFskOadJ1J9DFH9qFQfWI-3YB_jZTPmuDKRGW4CK8Zg,1751
sqlalchemy/dialects/postgresql/pygresql.py,sha256=g-cgyaIYtRZ81WVMX6bR7ZkGvWgSJFQSA5KXaYYtuvE,8863
sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=A78mYw5N2kJqc1LVidQPRZ6Xw1xqZ-WGDpCom3vTSKM,3819
sqlalchemy/dialects/postgresql/ranges.py,sha256=Mx6NXylBuaIc41OS0ukgXxsZIccQuG3SF6NgBCGqYbY,4901
sqlalchemy/dialects/sqlite/__init__.py,sha256=1gBHBjm3vx-m1Z2YaqtC8ylSmC1W8lhe9R8H8b5ZBo4,1256
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-310.pyc,,
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=hdIo9cm17A_CCW4Zwv49ooKDvTXH-28bYJQWImcrguU,10298
sqlalchemy/dialects/sqlite/base.py,sha256=TwkY4Gvps9499Vcz4nUzv6TTNIv5JVEPAGLQF_5S9pM,90991
sqlalchemy/dialects/sqlite/dml.py,sha256=u6DayAbj-FKv-xjD_XNu8-i2oXIOIfe3fBNakRz9tB4,7065
sqlalchemy/dialects/sqlite/json.py,sha256=bz_1axFG5YI9kLszE-oiCN3-z95zIPPcLgVwug_-AT4,2602
sqlalchemy/dialects/sqlite/provision.py,sha256=3F5ZX2dYGMFAGra99UVavCihth1_XazJXX9XAet8gbw,4818
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=9ZJh_h2uvKtIkCfhg7AywRMVeynMLC8I00kuPK0NUyE,5769
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=eAfyIYa_9KhHW39sffQcc0eNEXUMubx0lcSbYto1Uzo,24054
sqlalchemy/dialects/sybase/__init__.py,sha256=TlAeZ4gYkJk7s-t85JWcENL9o0jBR-RURkA1g1MGvDI,1431
sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/dialects/sybase/__pycache__/base.cpython-310.pyc,,
sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-310.pyc,,
sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-310.pyc,,
sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-310.pyc,,
sqlalchemy/dialects/sybase/base.py,sha256=nnIoOtknIQLc3qykSAMozwi5zqCbHDCW75fIEj1l3Gc,33521
sqlalchemy/dialects/sybase/mxodbc.py,sha256=rDteimLqg5DCVXG7HfOeUAiOgxf9yuj5MZ9ncOEK0dA,973
sqlalchemy/dialects/sybase/pyodbc.py,sha256=sYANs1jVJTXQbs3WciOuQuUOLO7Khgq1zuHa_09TLeU,2319
sqlalchemy/dialects/sybase/pysybase.py,sha256=sWyYgdHd6skj51mXAB3RMZC-moGOEUFv2dSk9a-tJDQ,3476
sqlalchemy/engine/__init__.py,sha256=3cq8BMoQfMgR8dZjnMPwI4bdnMFWbJ221ipPJSc1HCg,2170
sqlalchemy/engine/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/base.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/characteristics.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/create.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/cursor.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/default.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/events.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/interfaces.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/mock.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/reflection.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/result.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/row.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/strategies.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/url.cpython-310.pyc,,
sqlalchemy/engine/__pycache__/util.cpython-310.pyc,,
sqlalchemy/engine/base.py,sha256=voxLEfvXG7SOQBZVv0BzBiwaRO0l3EteTDdC3VLHU0U,128036
sqlalchemy/engine/characteristics.py,sha256=DrhLcmpnkMpmgo_kgQpcujoD4nW81vTt-GkuWqW9QaY,1873
sqlalchemy/engine/create.py,sha256=PW4MyzmJC7CU9qCmduN3AB-BnVRg0jIxdrYe-F0VQi8,31540
sqlalchemy/engine/cursor.py,sha256=8YrH8PHcO4aznkbwt2oY5ZUaWXWBUmQNGyF7EgUlmRw,70068
sqlalchemy/engine/default.py,sha256=LmgY7Db-4fbNr9fKISoOxotTnXK2gbO5tAz1C0LK7mk,68925
sqlalchemy/engine/events.py,sha256=yuG6AlOobpVZOANXK7T-z598VDiTIIUOb0lNCvVkZ08,34257
sqlalchemy/engine/interfaces.py,sha256=MysnRnMpO4JWHGK0XrbsknV8sabdhsfa5z6jU5126b0,60708
sqlalchemy/engine/mock.py,sha256=IO1Who4xVrfOc7apfataqhA8djkUrkIRIliRocCM81U,3744
sqlalchemy/engine/reflection.py,sha256=X-cPoERL95dyj4ukibKFSDas3ERHGnxReWlgUFTJP_I,40090
sqlalchemy/engine/result.py,sha256=mWQMuQuwvQospcI9qksl8ad0vHc-nTIYvPCAdQpPxek,60849
sqlalchemy/engine/row.py,sha256=PyQhXcqXpc7xrjAaitHtNBtkZop8UZLrDL0CbcxOY68,19311
sqlalchemy/engine/strategies.py,sha256=KV0fcHFsNFPs3DvB_PBSKz0AXSd5BeDyVXbIknMYKHA,431
sqlalchemy/engine/url.py,sha256=XB06xzpZqxyd0Lpjh9WURvrAa0erQfYdKgIpYn1pAIc,27279
sqlalchemy/engine/util.py,sha256=DYYyS8yGN7ivRIgqa2ikdKX6Gel4dklsDnCBcoEDz3Y,8695
sqlalchemy/event/__init__.py,sha256=W0EsKWlR7MwIz4PD4p2QtI_Boj3d8yYhHJjDau_qyrI,534
sqlalchemy/event/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/event/__pycache__/api.cpython-310.pyc,,
sqlalchemy/event/__pycache__/attr.cpython-310.pyc,,
sqlalchemy/event/__pycache__/base.cpython-310.pyc,,
sqlalchemy/event/__pycache__/legacy.cpython-310.pyc,,
sqlalchemy/event/__pycache__/registry.cpython-310.pyc,,
sqlalchemy/event/api.py,sha256=U1qNedjrauO62C0Nl2oUtQ1BOnL_FYh4u5KqMMrbWvk,8262
sqlalchemy/event/attr.py,sha256=gHOqhCoXMtzjLhJ_2yKRetFvsmSBUw_H1WXyecID8_I,14568
sqlalchemy/event/base.py,sha256=9LjfgYLbRq-3vTg-20Fn9_9NOrFR30-fUToPZXrGJ0g,11281
sqlalchemy/event/legacy.py,sha256=HcGeCJ1-LXPJfUAX_pPpB3dOZUMHtkkv6vfbLvEu4Kg,6455
sqlalchemy/event/registry.py,sha256=aH-pxTgMkbLjFqxRni9QgLCXn8jFP58ohrxSnqY0ahc,8783
sqlalchemy/events.py,sha256=D6pT2iZJzxyDShQsZmctR_3hsj0rFNdf5451vpx9Ff0,481
sqlalchemy/exc.py,sha256=BlvaHCAo2TFZgtV9fNnKlopeB5aTKr9iTZfTBuoBYUs,21849
sqlalchemy/ext/__init__.py,sha256=rW1kXSAfsGwr4nlz-rie0mkH_cIxx625r5pFgr-vYh4,333
sqlalchemy/ext/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/associationproxy.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/automap.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/baked.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/compiler.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/hybrid.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/indexable.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/instrumentation.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/mutable.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/orderinglist.cpython-310.pyc,,
sqlalchemy/ext/__pycache__/serializer.cpython-310.pyc,,
sqlalchemy/ext/associationproxy.py,sha256=x3r896H0OC4OLW9VKL-EAMewIdXIoXG9BByxtzV29W8,52766
sqlalchemy/ext/asyncio/__init__.py,sha256=CbQR6BQDDcB2u-u1RRo9d_mVWxJUBVkvzmaZ_vphyqk,845
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/ext/asyncio/__pycache__/base.cpython-310.pyc,,
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-310.pyc,,
sqlalchemy/ext/asyncio/__pycache__/events.cpython-310.pyc,,
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-310.pyc,,
sqlalchemy/ext/asyncio/__pycache__/result.cpython-310.pyc,,
sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-310.pyc,,
sqlalchemy/ext/asyncio/__pycache__/session.cpython-310.pyc,,
sqlalchemy/ext/asyncio/base.py,sha256=SgGrnW1A817GFDsAbA1-juPIMTCx-LX-agyYaE-99Ck,2369
sqlalchemy/ext/asyncio/engine.py,sha256=-_OBuXna46ZCAzKEWQaLsz3i-mb1CEWSZ7osjbsbWwg,27363
sqlalchemy/ext/asyncio/events.py,sha256=gOfyb6X2AF7_vbAy-UX690ieIR3A0-L1MEGfI_n-cPU,1467
sqlalchemy/ext/asyncio/exc.py,sha256=GORfjC6NfH8uZIt73JhxJKT0D59wOD8LoOxVOlXFw9g,660
sqlalchemy/ext/asyncio/result.py,sha256=uzMpZnDLmEcG6aNjOg2cCjCEnJhLuhgAvj5UOnN-zv8,21909
sqlalchemy/ext/asyncio/scoping.py,sha256=G8AGqbSyCViJc2fRKSNFx0hHToVnx43TA5mQt1E6MCo,3067
sqlalchemy/ext/asyncio/session.py,sha256=S9LY-ocexGHDDwTP-4z1zDH9ZwvAasuAMLC3FX6xyCM,24784
sqlalchemy/ext/automap.py,sha256=fIZXHSPb90Xj8DJO26EX6MnyrHt0cQOircVmuZzkpVg,47016
sqlalchemy/ext/baked.py,sha256=uKMnOFtXanvVnNgj3QOETY8iP3vDjA4G_ZKHmxSvx0k,20617
sqlalchemy/ext/compiler.py,sha256=IXdO7z8uTtKXsx45k1Mi0sQDmkLrZXUYW1ED62gExvc,23242
sqlalchemy/ext/declarative/__init__.py,sha256=GZ-jwVK_PLihu4qaLw-4NCn9dAT5j2FxbEQrOtnZnWE,1906
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-310.pyc,,
sqlalchemy/ext/declarative/extensions.py,sha256=RTbMBLqsI3cfpPm5Em4xNX2Su6CZiVQC1QxYT38u8ws,17004
sqlalchemy/ext/horizontal_shard.py,sha256=KyF4Diqr3mCk4PYyAmbxdHqTYAqLpcKriBuGWLOiKyk,9178
sqlalchemy/ext/hybrid.py,sha256=QX8vGO4E4SBylaxw8HONOiHWK4xufjcbmZP4Dv_nU9E,43145
sqlalchemy/ext/indexable.py,sha256=8niAhjt-tymCUtAXvyVyziyDAKFu8zMx4i5Xb1MO5lg,11607
sqlalchemy/ext/instrumentation.py,sha256=kx2zn8mN30F-CbWqiBxb7HgnMSJChLcvQhJ8Tb8VIXo,14802
sqlalchemy/ext/mutable.py,sha256=JbRRdDXtYAOlHG5LeLK3hqEfZ6lI71CZNnN1T5J4KP8,33458
sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/ext/mypy/__pycache__/apply.cpython-310.pyc,,
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-310.pyc,,
sqlalchemy/ext/mypy/__pycache__/infer.cpython-310.pyc,,
sqlalchemy/ext/mypy/__pycache__/names.cpython-310.pyc,,
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-310.pyc,,
sqlalchemy/ext/mypy/__pycache__/util.cpython-310.pyc,,
sqlalchemy/ext/mypy/apply.py,sha256=91CSdH2xynaH021h-3N8OgROti9AKEtr3vS1il7NpXU,9909
sqlalchemy/ext/mypy/decl_class.py,sha256=Prj8raNTj62sthYQM9YkAN5i253ZtsjkOerI13Q0g-Y,17859
sqlalchemy/ext/mypy/infer.py,sha256=FGlMhNssaytzeV2mY5eMzXPhFmqovjfNkC9jH-PoJbw,18584
sqlalchemy/ext/mypy/names.py,sha256=x7eRwmrR-Doo3TeiCmOdWKIHw0bINnSRnILzUStdvxM,8183
sqlalchemy/ext/mypy/plugin.py,sha256=-NJjASfM5fLPA85Mq7LN_78x565FetYx-WExRVHVyNM,9529
sqlalchemy/ext/mypy/util.py,sha256=zDlbABxIbMOFzeSswlAI2RFnWUqBWGdfBn2otOiTLVI,8547
sqlalchemy/ext/orderinglist.py,sha256=Q-TCtQ1bpi2Pq75yJkUL9K4Z2ALtqxTbvEMdF9y_Lpk,14263
sqlalchemy/ext/serializer.py,sha256=Wlb2im2MtVg9JPigiCAQwwmyGVdMhPf8Y51AeQaJ89E,6133
sqlalchemy/future/__init__.py,sha256=u-aPKVzAdaRVuXMRAtFcOVF3DsnnRuyFa3Hg6xQ4D7w,543
sqlalchemy/future/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/future/__pycache__/engine.cpython-310.pyc,,
sqlalchemy/future/engine.py,sha256=mSCb1fWX81P7iHKlulgZPnwLGlfcjB8Y5vhOB_fvOL4,16597
sqlalchemy/future/orm/__init__.py,sha256=EFbmp8wsBEdOsAMouDPBpP6xkONT0Eu73xayKQ5d2Uw,299
sqlalchemy/future/orm/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/inspection.py,sha256=kp-w5ERC-cGh91Itw0ATEtWiEz0FJuQG_LcU6qBIfsM,3144
sqlalchemy/log.py,sha256=hH5TstVQ32ivOB-s5gVQZ_LwqooDSAFSLjibB5EIi9U,7384
sqlalchemy/orm/__init__.py,sha256=DAWyuZ1U-4ZCAbWRO4iUfBCn3ESf-iLX11s_hPaVZsc,11308
sqlalchemy/orm/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/attributes.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/base.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/clsregistry.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/collections.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/context.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/decl_api.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/decl_base.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/dependency.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/descriptor_props.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/dynamic.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/evaluator.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/events.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/exc.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/identity.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/instrumentation.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/interfaces.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/loading.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/mapper.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/path_registry.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/persistence.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/properties.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/query.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/relationships.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/scoping.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/session.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/state.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/strategies.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/strategy_options.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/sync.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/unitofwork.cpython-310.pyc,,
sqlalchemy/orm/__pycache__/util.cpython-310.pyc,,
sqlalchemy/orm/attributes.py,sha256=RvhU6RrPWV9e-4pf1BvZdIbi0P1gIjuwVUK8yUm0ai0,79429
sqlalchemy/orm/base.py,sha256=wrB54uDLzbdvlgQzTA7AGxP0US45z0UAcXIb_eQlMd4,15640
sqlalchemy/orm/clsregistry.py,sha256=sHMwdpwuOS9aEUUfLG0btJWIjHbl2LVclw1NhTdXEHo,13727
sqlalchemy/orm/collections.py,sha256=iTS23m05siT0k1edk1B0q1FbF4CJUF2m6fRclr3es5Y,56429
sqlalchemy/orm/context.py,sha256=NZ-tyujmsYTKOyQO9AGDKaPMnMP9o5gacC5ZR3SyXl4,114246
sqlalchemy/orm/decl_api.py,sha256=MgWNxkqi0ohyHMMcIcefX1yh-lsKdwjrguI51EePg3U,36626
sqlalchemy/orm/decl_base.py,sha256=w7caFv9sIPxYhgXG_mAaYcqbDw5HPpvUJNhFSfNN0Ug,45956
sqlalchemy/orm/dependency.py,sha256=KtPFM25nNGZ06qOG1kFUi_VVRy_WSXFm0Dx9l9cS1-Q,48277
sqlalchemy/orm/descriptor_props.py,sha256=doXozzd36Lvedk39zb2bZYaZ4cvCVotaujijouRPd8g,26732
sqlalchemy/orm/dynamic.py,sha256=kheoDUs0Ff3kZ36_qf2CUtFLScKRWMYqNvt7JbjSITU,16448
sqlalchemy/orm/evaluator.py,sha256=MDmc5kKAhpPYYzOV_5cDvIs4GInZa47LUkE1tMXGQn8,7093
sqlalchemy/orm/events.py,sha256=kddD-YBxnfixLLbFLs_LJsT-Js81VYfMOq-RS6AAfp8,115173
sqlalchemy/orm/exc.py,sha256=IxhF_AZ2qbLjHPHrWNXen4KJNwACSnK9qLQVyaE2T3Y,6736
sqlalchemy/orm/identity.py,sha256=-eNr9jsNTYwKOmtTF3LGekm3nNL6OnvWF4L_uIgiSZA,7487
sqlalchemy/orm/instrumentation.py,sha256=_pLiv3F6YyfQMhqUi7jhdiRB3w-pb5jtMdZyV8XW9Ow,21030
sqlalchemy/orm/interfaces.py,sha256=NzvUB847ltQGj7jqC8F7nQhuUfLKR0ydHOtyS4_Zk-8,33362
sqlalchemy/orm/loading.py,sha256=iElrL1vyw2FHkKKURO67zbWZ5fVOIYC0vXa93ZkYwm8,50782
sqlalchemy/orm/mapper.py,sha256=nj00N8a11Bzp3BwQBuvRYeIt68wCe13hFoJLwNLEtaI,145947
sqlalchemy/orm/path_registry.py,sha256=eZWbu-pcNflx8n_AwiRithC1sOJsIaoy6DpNaC_1G2M,16930
sqlalchemy/orm/persistence.py,sha256=FP5knuE1xwDEbV2zc5khlQsdUYVPy_mgqTtt2g7ZL9s,86767
sqlalchemy/orm/properties.py,sha256=I3ZnJJ7ZPy8oWuSz2o3YV7I8WRuT_Bm6tkC7nsKhTnQ,15095
sqlalchemy/orm/query.py,sha256=yU6O2CD1o6FJs26XnCMyCkrH6IkkUNhG2b_awGpViVE,129061
sqlalchemy/orm/relationships.py,sha256=oF9rXIOnoxNr0Ot434txPQyalMU4-Eg9xK6YOqq1Xo8,146930
sqlalchemy/orm/scoping.py,sha256=30vfl1vr4yR0Vz-5OzG6VOwJ0ZGMN4SrypbOEybqU2I,7485
sqlalchemy/orm/session.py,sha256=TtVfLf9Zn1h_nBwVsJjf703mLQdjh-eeC2I-NHF_WOA,165430
sqlalchemy/orm/state.py,sha256=2p8xqSyLsK77hDoDN1WfoVI5v4y_nQd3FvvIq9eFz48,34549
sqlalchemy/orm/strategies.py,sha256=VTFbrof4JPIDxSJ1hHyn2OTNcrF__hJg7bQidLm-k6Q,110705
sqlalchemy/orm/strategy_options.py,sha256=UjHr5LBWFMSWvfPXxCJzml_4lD5Ct1ULca26r13cu0c,69465
sqlalchemy/orm/sync.py,sha256=EyKdz3NKVvVmAMwB5vdmGvLmAgppILpOWa7BxEIqsTw,5991
sqlalchemy/orm/unitofwork.py,sha256=xh-j1z66pmFDwxlWjZELHf6ExXV0-wlFn9eAuU1Il_E,27874
sqlalchemy/orm/util.py,sha256=sprQH4YW3-WjnivlbDGWFGyzpHWUyCClPRef4AZkGEI,77470
sqlalchemy/pool/__init__.py,sha256=9j4YgD_2W2OBbih80IY5_jHyKaDZBSIL8GhwsTupqkc,1659
sqlalchemy/pool/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/pool/__pycache__/base.cpython-310.pyc,,
sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-310.pyc,,
sqlalchemy/pool/__pycache__/events.cpython-310.pyc,,
sqlalchemy/pool/__pycache__/impl.cpython-310.pyc,,
sqlalchemy/pool/base.py,sha256=XTs1_0DtAv_-C95RLV3kfCTO1u-7_LJs1AqaZSTHmuI,40244
sqlalchemy/pool/dbapi_proxy.py,sha256=jLDwXbN3irsWPrbrAHs0jWGvIOKRcg44JDGWo-NEF10,4376
sqlalchemy/pool/events.py,sha256=UkEBvlJnaovQLOY2FDVQugkWOLz6LDB5aGvZSDM3dgE,10583
sqlalchemy/pool/impl.py,sha256=GiYG-rPejaB9eo9TZK_Gh5_SlGoz-uOyEl_AG4W4Fjc,16297
sqlalchemy/processors.py,sha256=wRXMZbN7yCZK1LLFq8-P1aRumS0_OrKMwhSDvBs75IM,5921
sqlalchemy/schema.py,sha256=Mw5ZePzKRbj_yDPnassmTcg8P462RSDa3nK4P6zEqlU,2472
sqlalchemy/sql/__init__.py,sha256=xlG-KPS7czj8JULYxdkcW8hI1DzwSoE9U6nNfWLIH5c,4811
sqlalchemy/sql/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/annotation.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/base.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/coercions.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/compiler.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/crud.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/ddl.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/default_comparator.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/dml.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/elements.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/events.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/expression.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/functions.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/lambdas.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/naming.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/operators.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/roles.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/schema.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/selectable.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/sqltypes.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/traversals.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/type_api.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/util.cpython-310.pyc,,
sqlalchemy/sql/__pycache__/visitors.cpython-310.pyc,,
sqlalchemy/sql/annotation.py,sha256=rEfFCBUM9VhmMlKE3ffeEk-oJu9k1TTLtcS9O_XUm7Y,11866
sqlalchemy/sql/base.py,sha256=LfadTCd4ZZIs8-8QUWz8MnyxTSFImaMZBAuoovdWBb8,57599
sqlalchemy/sql/coercions.py,sha256=lPJ6_p9ANElf0NgiEYefqEj7OeCRaKnnVv3HaKtRtlo,35626
sqlalchemy/sql/compiler.py,sha256=YbRdYKWtHFcMnKY28aZct4_H_OsvvxSnAPh6P7ffc-A,193341
sqlalchemy/sql/crud.py,sha256=PpisSEvrpSio2d2TJPCnxk7n0W70Gjwzo7yjFD2p-cU,37058
sqlalchemy/sql/ddl.py,sha256=Rj92-HBBZJJpZ3VmY6k-CuwJtu4NoZAWZanTzPau468,45549
sqlalchemy/sql/default_comparator.py,sha256=VL4Ykw5SlKK-95eIvLc4A-f5_FxF6ii1VtJ31r3rtzM,11538
sqlalchemy/sql/dml.py,sha256=ao8ReN6FV4XyCl07mL0JEJ2gTRdk60VH5dkoo9JwF_M,56177
sqlalchemy/sql/elements.py,sha256=sHt_1AujmnFdy0TnSP1uIfSbq9cEnTD-jSEe3SUGYJs,186936
sqlalchemy/sql/events.py,sha256=5F3mCd7wfRm5Q0hjKKkOPbrFf18VzraK7C3fDgGtCCg,13574
sqlalchemy/sql/expression.py,sha256=UEbH1-TdhgRZ-ZyC019l1YYJzah_rnzy1jr590KTsg0,9106
sqlalchemy/sql/functions.py,sha256=VSe_o7igqUyHvJsSpQe06Fc2BRl1ffqwCIvytUYyHwE,50057
sqlalchemy/sql/lambdas.py,sha256=n4f07Ojmzm2GU6VS9gFry9yKdhhAlv6knw8e6uHl_Ko,46227
sqlalchemy/sql/naming.py,sha256=fhlnu3UQ4o5v0yEhlbRZFbQQAW_I93ktHpOpwHK23P8,6996
sqlalchemy/sql/operators.py,sha256=_cwzYtOVUtD4ceRGrZM_0x9o6iQdPmkcREiQ-A3UuVs,49887
sqlalchemy/sql/roles.py,sha256=jumdmZ88-6eibELufo0fLZVLCIwrMZpFx2sUeGMP3NY,5877
sqlalchemy/sql/schema.py,sha256=JwiAxiu6rkZWP_hbjoijb7uPSqPgn9vDRW6GGwO7oj8,200615
sqlalchemy/sql/selectable.py,sha256=5hRHZOoPRiXYgJY4VMMK9rLQqhPcHXTnhT69QGHLpwo,244337
sqlalchemy/sql/sqltypes.py,sha256=XvfAyUmssJ4FZb8i9GL9uNUlEjRNNNJXj9f2fesHgIM,118122
sqlalchemy/sql/traversals.py,sha256=pKfe8xcaMCzAL8k7NGwpZsKA7_sibLxkuzIaaEC5Pio,54278
sqlalchemy/sql/type_api.py,sha256=XbE1qaNVj0kuTLGVrX4egnA-x09Qegz5OFDIdT1gP1o,73010
sqlalchemy/sql/util.py,sha256=-GNA0exQRn12W6gDQXGv7yqyT-a9n-XnJ6xBf--D5tM,36976
sqlalchemy/sql/visitors.py,sha256=yJGOQh_emoU24voh5mNITS8SR8BEyfMgGXh4J3EmNFM,28181
sqlalchemy/testing/__init__.py,sha256=vLicxa_6BjJyIV5nhHv7jE8zgOzrptVARZueBzPNQ1I,2936
sqlalchemy/testing/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/assertions.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/assertsql.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/asyncio.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/config.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/engines.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/entities.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/exclusions.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/fixtures.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/mock.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/pickleable.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/profiling.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/provision.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/requirements.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/schema.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/util.cpython-310.pyc,,
sqlalchemy/testing/__pycache__/warnings.cpython-310.pyc,,
sqlalchemy/testing/assertions.py,sha256=A4inDz60Q75AeRFr8qSW2r7LoQKFlex0XWNtmis9jSw,27347
sqlalchemy/testing/assertsql.py,sha256=CEkFaGpBkR_R0YgtrPYGYN6ELQIGygLzcFD1qFtcrck,15421
sqlalchemy/testing/asyncio.py,sha256=71yaLEj5B7PEIr0UKmno7VTiqkmUGmvxSeKCJwBIDu0,3799
sqlalchemy/testing/config.py,sha256=ZIaA6IRBq7-lEPeQaNVX6J15mVlEUcsgzMw-yQPSgRU,6752
sqlalchemy/testing/engines.py,sha256=dFF1Y7BK4dxPtnhcfzIuLot_KgwOW1P_ayBNFYmD_pg,13857
sqlalchemy/testing/entities.py,sha256=clFhuzXngYlpqBLpWdj7tDUEXzw3sFC3ylMbzNmnBxI,3364
sqlalchemy/testing/exclusions.py,sha256=U6KFE34ssz0y8KbjpF4YdEReENLtIxUUZlgWJhVxCV0,13794
sqlalchemy/testing/fixtures.py,sha256=3GdCoxM2LhEqzTItZZndBW7O6pkZYNw7PmU9kARD5_c,27699
sqlalchemy/testing/mock.py,sha256=Ui5wdDUdSGdXWicJoKCiwABlzaca1FsTFby0zFinNPA,926
sqlalchemy/testing/pickleable.py,sha256=XPFVTWNsnLLrTjw5F01JpeUUe4E4fHvIpAHCg9TU5mk,3037
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-310.pyc,,
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-310.pyc,,
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-310.pyc,,
sqlalchemy/testing/plugin/__pycache__/reinvent_fixtures_py2k.cpython-310.pyc,,
sqlalchemy/testing/plugin/bootstrap.py,sha256=Pykdrxtc5QVxY11Z0dyQjtRlpftk9pYeh35xunm-NdU,1755
sqlalchemy/testing/plugin/plugin_base.py,sha256=cxzzd8FWyCxY6o7tklRr8jR9TWtr9bDGiaUeeRBX6y0,22329
sqlalchemy/testing/plugin/pytestplugin.py,sha256=mG2b_NSpklZPSxKGRV7xrE4thcnPo6LED0f5RuVq2T8,26953
sqlalchemy/testing/plugin/reinvent_fixtures_py2k.py,sha256=b9fWp5RXdePykrNviZPXaGDIjOEOfovchez2Ovr4IRQ,3400
sqlalchemy/testing/profiling.py,sha256=oMzNhMpLbQr0lJUXtkl4MDIeZlUX0sN_W8cXhXoEHYI,10901
sqlalchemy/testing/provision.py,sha256=KgkqJazFsLEY9CjeniUApQiR90_ghoQHiDBY2nuCErU,12486
sqlalchemy/testing/requirements.py,sha256=gxvCI9f8P-B18hfMt-Q0H9cSpJJfbQD_TaBH1Entf0s,45017
sqlalchemy/testing/schema.py,sha256=u-twx4-MlLa_PKJ_sOPnoXUZnxyJgjDssrrOhW1O7_Y,6762
sqlalchemy/testing/suite/__init__.py,sha256=u3lEc0j47s7Dad_2SVWOZ6EU2aOMRWqE_WrQ17HmBsA,489
sqlalchemy/testing/suite/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_results.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_select.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_types.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-310.pyc,,
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-310.pyc,,
sqlalchemy/testing/suite/test_cte.py,sha256=shi2WJZpzAzDCdkmzx0IDEu-URsLLBsdyFdzDqsfpyw,6387
sqlalchemy/testing/suite/test_ddl.py,sha256=BZuYkKaG_tNUkZcFLNd6o1qnfbzU0IYiedCw_NhOJBg,12143
sqlalchemy/testing/suite/test_deprecations.py,sha256=0LUmXIiK8hHUr6tY8cJJC7VErOj9YNGNN-c324k08Dw,5204
sqlalchemy/testing/suite/test_dialect.py,sha256=XpwZxIXlMVsZlP74khuPW2wZskinT0IOT9S6PM5PyE8,11256
sqlalchemy/testing/suite/test_insert.py,sha256=QlMovNeDU-GB5UvTEumce3LLK94PAduKlWe2TqCcLF4,11501
sqlalchemy/testing/suite/test_reflection.py,sha256=nJNt13Z62x2SSaJkxGZkPa78KApE5upWttW9mJHzpGA,59868
sqlalchemy/testing/suite/test_results.py,sha256=wA4SjK46-1TCqqXcOw_a7_9ajMkF3aU4tuLOK9iVWyI,14409
sqlalchemy/testing/suite/test_rowcount.py,sha256=0kswa4gunxRreCpRkFR7Yxff0k0o-2R8fugMmOGcANg,5042
sqlalchemy/testing/suite/test_select.py,sha256=fFQg5DuGyNSTeBfW5hmSqiuAvPx0FUyCYKbETaCfeVQ,57247
sqlalchemy/testing/suite/test_sequence.py,sha256=2KBcs0FtoL3gk37IN2PnRZSnQwt7RKkShAbYQHFTBcw,8713
sqlalchemy/testing/suite/test_types.py,sha256=ur57ApD3PU_AslebV8f5MECRy68XuGqJFmBcsIwcRz4,49552
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=HhAM4_38vpwJFX2d9xh0S_YAIQJQ-79GmfO3ich2Tu4,6943
sqlalchemy/testing/suite/test_update_delete.py,sha256=5moBXgVXSHmSIJI26CfxT6K51-4z8Ftg2uaVOjeDKBs,1685
sqlalchemy/testing/util.py,sha256=mAbW7RfBqC3iRdj0HXy8wU8L7Z49gLVwZ3-DAOHFxDM,12961
sqlalchemy/testing/warnings.py,sha256=5_UhjnFmjWnNDQnGm1YL3XgX6HzsPw_cgP0S0Kb7XCA,2352
sqlalchemy/types.py,sha256=852TfWyZz9HATV9qyb0UcvqYNb8h8vsR03J_avMvIgw,3114
sqlalchemy/util/__init__.py,sha256=-y60JqqdTX0SDsMTX6UUVBv3O75J72YfhonUzaMxleY,6548
sqlalchemy/util/__pycache__/__init__.cpython-310.pyc,,
sqlalchemy/util/__pycache__/_collections.cpython-310.pyc,,
sqlalchemy/util/__pycache__/_compat_py3k.cpython-310.pyc,,
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-310.pyc,,
sqlalchemy/util/__pycache__/_preloaded.cpython-310.pyc,,
sqlalchemy/util/__pycache__/compat.cpython-310.pyc,,
sqlalchemy/util/__pycache__/concurrency.cpython-310.pyc,,
sqlalchemy/util/__pycache__/deprecations.cpython-310.pyc,,
sqlalchemy/util/__pycache__/langhelpers.cpython-310.pyc,,
sqlalchemy/util/__pycache__/queue.cpython-310.pyc,,
sqlalchemy/util/__pycache__/topological.cpython-310.pyc,,
sqlalchemy/util/_collections.py,sha256=zO9bwIkL4UsFchxdXioG7_2vkpl9FoivCDNV3TVYqsk,30228
sqlalchemy/util/_compat_py3k.py,sha256=KYniz_uePhjNtnFU4uCzdSm8xV07AMHaup9p1GbypzM,2262
sqlalchemy/util/_concurrency_py3k.py,sha256=2ZjMjNTYx_KpOHTb51RoB3BfXfqHSIJHNGh15zCIVHo,6792
sqlalchemy/util/_preloaded.py,sha256=sbQl4HD8GDVcf2aTh8_EuQKcFMZrJAd7K-9Wn4fze6I,2464
sqlalchemy/util/compat.py,sha256=NHZdvRyb2regNCmYRAio_bFSD7sh4L3KFcy4cwKXe0k,18913
sqlalchemy/util/concurrency.py,sha256=abTqffi0MZcq7pbhws78qV1CAt3v1L2sgXwtr8i5yU0,2351
sqlalchemy/util/deprecations.py,sha256=N417GuIN1cBYOwUzRh1tndxEa0ew1tR7JryT31T2ngA,12191
sqlalchemy/util/langhelpers.py,sha256=UlBjFpF5lmz4BfwGN7e_xYgzbO735dTNrlF_A6hFux0,58233
sqlalchemy/util/queue.py,sha256=k4Dm1vMGCfRIE4aI7vQ_F0MeOYn5wUVfkn01awf6x70,9584
sqlalchemy/util/topological.py,sha256=PhgKMik0lWCTJwavGcv_CksRKE9UuP0vXRHCMh4NXfA,2959

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: false
Tag: cp310-cp310-win_amd64

View File

@ -0,0 +1 @@
sqlalchemy

View File

@ -0,0 +1,51 @@
Original Authors
----------------
* Armin Rigo
* Christian Tismer
Contributors
------------
* Al Stone
* Alexander Schmidt
* Alexey Borzenkov
* Andreas Schwab
* Armin Ronacher
* Bin Wang <feisuzhu@163.com>
* Bob Ippolito
* ChangBo Guo
* Christoph Gohlke
* Denis Bilenko
* Dirk Mueller
* Donovan Preston
* Fantix King
* Floris Bruynooghe
* Fredrik Fornwall
* Gerd Woetzel
* Giel van Schijndel
* Gökhan Karabulut
* Gustavo Niemeyer
* Guy Rozendorn
* Hye-Shik Chang
* Jared Kuolt
* Jason Madden
* Josh Snyder
* Kyle Ambroff
* Laszlo Boszormenyi
* Mao Han
* Marc Abramowitz
* Marc Schlaich
* Marcin Bachry
* Matt Madison
* Matt Turner
* Michael Ellerman
* Michael Matz
* Ralf Schmitt
* Robie Basak
* Ronny Pfannschmidt
* Samual M. Rushing
* Tony Bowles
* Tony Breeds
* Trevor Bowen
* Tulio Magno Quites Machado Filho
* Ulrich Weigand
* Victor Stinner

View File

@ -0,0 +1,30 @@
The following files are derived from Stackless Python and are subject to the
same license as Stackless Python:
src/greenlet/slp_platformselect.h
files in src/greenlet/platform/ directory
See LICENSE.PSF and http://www.stackless.com/ for details.
Unless otherwise noted, the files in greenlet have been released under the
following MIT license:
Copyright (c) Armin Rigo, Christian Tismer and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,47 @@
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011 Python Software Foundation; All Rights Reserved" are retained in Python
alone or in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.

View File

@ -0,0 +1,103 @@
Metadata-Version: 2.1
Name: greenlet
Version: 1.1.3
Summary: Lightweight in-process concurrent programming
Home-page: https://greenlet.readthedocs.io/
Author: Alexey Borzenkov
Author-email: snaury@gmail.com
Maintainer: Jason Madden
Maintainer-email: jason@nextthought.com
License: MIT License
Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues
Project-URL: Source Code, https://github.com/python-greenlet/greenlet/
Project-URL: Documentation, https://greenlet.readthedocs.io/
Keywords: greenlet coroutine concurrency threads cooperative
Platform: any
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Natural Language :: English
Classifier: Programming Language :: C
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Operating System :: OS Independent
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
Description-Content-Type: text/x-rst
License-File: LICENSE
License-File: LICENSE.PSF
License-File: AUTHORS
Provides-Extra: docs
Requires-Dist: Sphinx ; extra == 'docs'
Provides-Extra: test
.. This file is included into docs/history.rst
.. image:: https://github.com/python-greenlet/greenlet/workflows/tests/badge.svg
:target: https://github.com/python-greenlet/greenlet/actions
Greenlets are lightweight coroutines for in-process concurrent
programming.
The "greenlet" package is a spin-off of `Stackless`_, a version of
CPython that supports micro-threads called "tasklets". Tasklets run
pseudo-concurrently (typically in a single or a few OS-level threads)
and are synchronized with data exchanges on "channels".
A "greenlet", on the other hand, is a still more primitive notion of
micro-thread with no implicit scheduling; coroutines, in other words.
This is useful when you want to control exactly when your code runs.
You can build custom scheduled micro-threads on top of greenlet;
however, it seems that greenlets are useful on their own as a way to
make advanced control flow structures. For example, we can recreate
generators; the difference with Python's own generators is that our
generators can call nested functions and the nested functions can
yield values too. (Additionally, you don't need a "yield" keyword. See
the example in `test_generator.py
<https://github.com/python-greenlet/greenlet/blob/adca19bf1f287b3395896a8f41f3f4fd1797fdc7/src/greenlet/tests/test_generator.py#L1>`_).
Greenlets are provided as a C extension module for the regular unmodified
interpreter.
.. _`Stackless`: http://www.stackless.com
Who is using Greenlet?
======================
There are several libraries that use Greenlet as a more flexible
alternative to Python's built in coroutine support:
- `Concurrence`_
- `Eventlet`_
- `Gevent`_
.. _Concurrence: http://opensource.hyves.org/concurrence/
.. _Eventlet: http://eventlet.net/
.. _Gevent: http://www.gevent.org/
Getting Greenlet
================
The easiest way to get Greenlet is to install it with pip::
pip install greenlet
Source code archives and binary distributions are vailable on the
python package index at https://pypi.org/project/greenlet
The source code repository is hosted on github:
https://github.com/python-greenlet/greenlet
Documentation is available on readthedocs.org:
https://greenlet.readthedocs.io

View File

@ -0,0 +1,71 @@
../../include/site/python3.10/greenlet/greenlet.h,sha256=muQGuDPNWzBVjWoObFXddpDP_DLeE2GtdnF41cyYgy0,4648
greenlet-1.1.3.dist-info/AUTHORS,sha256=swW28t2knVRxRkaEQNZtO7MP9Sgnompb7B6cNgJM8Gk,849
greenlet-1.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
greenlet-1.1.3.dist-info/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434
greenlet-1.1.3.dist-info/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424
greenlet-1.1.3.dist-info/METADATA,sha256=DQAWGnxur5YBtMAo1zxHoCE1xEBsLwFP-df-a0A7oWU,3930
greenlet-1.1.3.dist-info/RECORD,,
greenlet-1.1.3.dist-info/WHEEL,sha256=W26pYN7HLsBT1jrDSL9udgf_mdNKJmYmL23sIP-FcgM,102
greenlet-1.1.3.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9
greenlet/__init__.py,sha256=f2pBI8kauTC7tFFi8r-JUUPXuthYvspSRCNENiqAH8k,1323
greenlet/__pycache__/__init__.cpython-310.pyc,,
greenlet/_greenlet.cp310-win_amd64.pyd,sha256=KlMu-ohRHk28Xr-eUdM0rS9bzknuJmnK8HAgDkI3atc,29184
greenlet/greenlet.c,sha256=tTKIwaPu9MhiGwhtlSkWWbSTXPbddgY-Xoq7xUfvfpA,67295
greenlet/greenlet.h,sha256=muQGuDPNWzBVjWoObFXddpDP_DLeE2GtdnF41cyYgy0,4648
greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143
greenlet/platform/switch_aarch64_gcc.h,sha256=TRH22e9TNRA_mys8hhLbNwz3efZk7BtKZhyhK7ucgyM,2385
greenlet/platform/switch_alpha_unix.h,sha256=T6kOBiHy3hLmy1vrmFrxbnOnRu0EJkoG_yuWy7fykZ4,689
greenlet/platform/switch_amd64_unix.h,sha256=KWB4PB2wcAaWvWbMzcq8tYBe02vEGPBCRMnHnfeI7gE,2610
greenlet/platform/switch_arm32_gcc.h,sha256=wflI2cGZBfLzM_GGgYx3OrFeoOq7OTsJP53dKLsrxS0,2488
greenlet/platform/switch_arm32_ios.h,sha256=yQZXCa0AZbyAIS9tKceyTCrRYlihpFBKDbiPCn_3im0,1901
greenlet/platform/switch_csky_gcc.h,sha256=GHlaVXrzQuSkrDqgL7-Ji9YwZnprpFhjPznNyp0NnvU,1340
greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928
greenlet/platform/switch_mips_unix.h,sha256=9ptMGEBXafee15RxOm5NrxiC2bEnwM9AkxJ7ktVatU8,1444
greenlet/platform/switch_ppc64_aix.h,sha256=ADpifLPlr6pTdT76bt6ozcqPjHrfPsJ93lQfc1VNaug,3878
greenlet/platform/switch_ppc64_linux.h,sha256=jqPKpTg09FzmCn59Kt6OJi2-40aoazFVJcf1YETLlwA,3833
greenlet/platform/switch_ppc_aix.h,sha256=nClVVlsRlFAI-I3fmivSJyJK7Xzx3_8l3Wf8QNJ9FMU,2959
greenlet/platform/switch_ppc_linux.h,sha256=J4eKMA73WbPYSaq0yAedzHB6J6ZKE8tIIzkqYxlaA2c,2777
greenlet/platform/switch_ppc_macosx.h,sha256=bnL2MqIUm9--NHizb5NYijvSrqutvuJx4auYCdqXllM,2642
greenlet/platform/switch_ppc_unix.h,sha256=5UW9c71NGJh6xksEbAOButBFH168QRyZ5O53yXdXGxg,2670
greenlet/platform/switch_riscv_unix.h,sha256=c3v3GRDMooslDKQLM75IqokWivtelbAj3-XZK31vWlE,758
greenlet/platform/switch_s390_unix.h,sha256=9oJkYnyUovPvXOAsVLXoj-Unl_Rr_DidkXYMaRXLS0w,2781
greenlet/platform/switch_sparc_sun_gcc.h,sha256=0vHXNNCdz-1ioQsw-OtK0ridnBVIzErYWiK7bBu6OgM,2815
greenlet/platform/switch_x32_unix.h,sha256=ie7Nxo6Cf_x4UVOSA_a3bJYPlRKZ1BvLWsclyQle_SY,1527
greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841
greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078
greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805
greenlet/platform/switch_x86_msvc.h,sha256=hi0dgp-k14IhMCxwtJtcI_ciPnMGd37uMnMaHaeQVWg,2481
greenlet/platform/switch_x86_unix.h,sha256=WvY2sNMFIEfoFVNVakl-osygJui3pSnlVj5jBrdaU08,3068
greenlet/slp_platformselect.h,sha256=-J5Px9Yk7Ths4hQTecC3iadxfte1CYaFoeqfg1lUl-A,3095
greenlet/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
greenlet/tests/__pycache__/__init__.cpython-310.pyc,,
greenlet/tests/__pycache__/test_contextvars.cpython-310.pyc,,
greenlet/tests/__pycache__/test_cpp.cpython-310.pyc,,
greenlet/tests/__pycache__/test_extension_interface.cpython-310.pyc,,
greenlet/tests/__pycache__/test_gc.cpython-310.pyc,,
greenlet/tests/__pycache__/test_generator.cpython-310.pyc,,
greenlet/tests/__pycache__/test_generator_nested.cpython-310.pyc,,
greenlet/tests/__pycache__/test_greenlet.cpython-310.pyc,,
greenlet/tests/__pycache__/test_leaks.cpython-310.pyc,,
greenlet/tests/__pycache__/test_stack_saved.cpython-310.pyc,,
greenlet/tests/__pycache__/test_throw.cpython-310.pyc,,
greenlet/tests/__pycache__/test_tracing.cpython-310.pyc,,
greenlet/tests/__pycache__/test_version.cpython-310.pyc,,
greenlet/tests/__pycache__/test_weakref.cpython-310.pyc,,
greenlet/tests/_test_extension.c,sha256=Tceb6kMFPSvAPW2LJ_zUlj--Wz_DtLzIPmgZcqkqAEU,5402
greenlet/tests/_test_extension.cp310-win_amd64.pyd,sha256=Hgf87W9aVeV7knnsuNBbvNoQ5Rf87VfI9swFHXQj3cU,13312
greenlet/tests/_test_extension_cpp.cp310-win_amd64.pyd,sha256=Y87bPUTHpjwH5D3rUrvcofnGlW3bV7bO2hzEde1bBXM,12800
greenlet/tests/_test_extension_cpp.cpp,sha256=zKfz0FxBXicq-53rItZ_NP8M406OBtyQFdH5bv_pRmk,3212
greenlet/tests/test_contextvars.py,sha256=d69XSuRrdU80xAPmzdObLjrjXnbTQChG0MgsvBF_nGM,9205
greenlet/tests/test_cpp.py,sha256=SXMuqsHTYTxFPBrasdbx5Sgplc89wvYEuPZvwafD-3k,488
greenlet/tests/test_extension_interface.py,sha256=1FhUkxL-NrxmQV_sxUdlt8tvIWpDcGi27JcdQ6VyvFc,2521
greenlet/tests/test_gc.py,sha256=oATPCmEAagdf1dZBYfZ0aiDklovLo_pQt5HZNTygCzk,2892
greenlet/tests/test_generator.py,sha256=_MLDA1kBtZQR-9a74AOZZQECQCIFljMa7vbucE0cOxw,1280
greenlet/tests/test_generator_nested.py,sha256=pGYRpNn_WjdhY_5ZHHBuBw10wskG_7mjJjR8IqleY3M,3579
greenlet/tests/test_greenlet.py,sha256=SVDi0e1RrJtJhiOFggmoWTZL1sFdxRpdALFRCie-n60,23427
greenlet/tests/test_leaks.py,sha256=STvFoZsFsZ_E24kYFaIASGBx97TRgTIur6uJXnoevWc,6677
greenlet/tests/test_stack_saved.py,sha256=SyIHZycTBfm1TxFsq1VLCAgVm02t5GSke8tT28qwi7c,450
greenlet/tests/test_throw.py,sha256=OOWfgcEaymvGVJQ3d4xDGzC5IVH0rZAiazWuyZV9270,2755
greenlet/tests/test_tracing.py,sha256=hZ6Cl5NMq9IaeH7NGqWYl8aQ0_5nFUSYuo6TeSXvrKw,7455
greenlet/tests/test_version.py,sha256=lHDe3qcLvfsOHcFKFW8yrcl5wBvy6UIxaNkZZzNlpHE,1229
greenlet/tests/test_weakref.py,sha256=gqAQunjVzbwF6qEUZijhv6UqhH4apWNIRHeoWLUo9tM,884

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: false
Tag: cp310-cp310-win_amd64

View File

@ -0,0 +1 @@
greenlet

View File

@ -0,0 +1,63 @@
# -*- coding: utf-8 -*-
"""
The root of the greenlet package.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__all__ = [
'__version__',
'_C_API',
'GreenletExit',
'error',
'getcurrent',
'greenlet',
'gettrace',
'settrace',
]
# pylint:disable=no-name-in-module
###
# Metadata
###
__version__ = '1.1.3'
from ._greenlet import _C_API # pylint:disable=no-name-in-module
###
# Exceptions
###
from ._greenlet import GreenletExit
from ._greenlet import error
###
# greenlets
###
from ._greenlet import getcurrent
from ._greenlet import greenlet
###
# tracing
###
try:
from ._greenlet import gettrace
from ._greenlet import settrace
except ImportError:
# Tracing wasn't supported.
# XXX: The option to disable it was removed in 1.0,
# so this branch should be dead code.
pass
###
# Constants
# These constants aren't documented and aren't recommended.
# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS
# is the same as ``sys.version_info[:2] >= 3.7``
###
from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import
from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import
from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,161 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
#if PY_VERSION_HEX >= 0x30B00A6
# define GREENLET_PY311 1
/* _PyInterpreterFrame moved to the internal C API in Python 3.11 */
# include <internal/pycore_frame.h>
#else
# define GREENLET_PY311 0
# define _PyCFrame CFrame
#endif
typedef struct _greenlet {
PyObject_HEAD
char* stack_start;
char* stack_stop;
char* stack_copy;
intptr_t stack_saved;
struct _greenlet* stack_prev;
struct _greenlet* parent;
PyObject* run_info;
struct _frame* top_frame;
int recursion_depth;
#if GREENLET_PY311
_PyInterpreterFrame *current_frame;
_PyStackChunk *datastack_chunk;
PyObject **datastack_top;
PyObject **datastack_limit;
#endif
PyObject* weakreflist;
#if PY_VERSION_HEX >= 0x030700A3
_PyErr_StackItem* exc_info;
_PyErr_StackItem exc_state;
#else
PyObject* exc_type;
PyObject* exc_value;
PyObject* exc_traceback;
#endif
PyObject* dict;
#if PY_VERSION_HEX >= 0x030700A3
PyObject* context;
#endif
#if PY_VERSION_HEX >= 0x30A00B1
_PyCFrame* cframe;
#endif
} PyGreenlet;
#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type)
#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1)
#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL)
#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL)
#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent)
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 8
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

View File

@ -0,0 +1,2 @@
call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64
ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm

View File

@ -0,0 +1,69 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall
* 13-Apr-13 Add support for strange GCC caller-save decisions
* 08-Apr-13 File creation. Michael Matz
*
* NOTES
*
* Simply save all callee saved registers
*
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \
"x27", "x28", "x30" /* aka lr */, \
"v8", "v9", "v10", "v11", \
"v12", "v13", "v14", "v15"
static int
slp_switch(void)
{
int err;
void *fp;
register long *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("str x29, %0" : "=m"(fp) : : );
__asm__ ("mov %0, sp" : "=r" (stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"add sp,sp,%0\n"
"add x29,x29,%0\n"
:
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
/* SLP_SAVE_STATE macro contains some return statements
(of -1 and 1). It falls through only when
the return value of slp_save_state() is zero, which
is placed in x0.
In that case we (slp_switch) also want to return zero
(also in x0 of course).
Now, some GCC versions (seen with 4.8) think it's a
good idea to save/restore x0 around the call to
slp_restore_state(), instead of simply zeroing it
at the return below. But slp_restore_state
writes random values to the stack slot used for this
save/restore (from when it once was saved above in
SLP_SAVE_STATE, when it was still uninitialized), so
"restoring" that precious zero actually makes us
return random values. There are some ways to make
GCC not use that zero value in the normal return path
(e.g. making err volatile, but that costs a little
stack space), and the simplest is to call a function
that returns an unknown value (which happens to be zero),
so the saved/restored value is unused. */
__asm__ volatile ("mov %0, #0" : "=r" (err));
}
__asm__ volatile ("ldr x29, %0" : : "m" (fp) :);
__asm__ volatile ("" : : : REGS_TO_SAVE);
return err;
}
#endif

View File

@ -0,0 +1,30 @@
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \
"$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9"
static int
slp_switch(void)
{
register int ret;
register long *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("mov $30, %0" : "=r" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"addq $30, %0, $30\n\t"
: /* no outputs */
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("mov $31, %0" : "=r" (ret) : );
return ret;
}
#endif

View File

@ -0,0 +1,84 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 3-May-13 Ralf Schmitt <ralf@systemexit.de>
* Add support for strange GCC caller-save decisions
* (ported from switch_aarch64_gcc.h)
* 18-Aug-11 Alexey Borzenkov <snaury@gmail.com>
* Correctly save rbp, csr and cw
* 01-Apr-04 Hye-Shik Chang <perky@FreeBSD.org>
* Ported from i386 to amd64.
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for spark
* 31-Avr-02 Armin Rigo <arigo@ulb.ac.be>
* Added ebx, esi and edi register-saves.
* 01-Mar-02 Samual M. Rushing <rushing@ironport.com>
* Ported from i386.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
/* #define STACK_MAGIC 3 */
/* the above works fine with gcc 2.96, but 2.95.3 wants this */
#define STACK_MAGIC 0
#define REGS_TO_SAVE "r12", "r13", "r14", "r15"
static int
slp_switch(void)
{
int err;
void* rbp;
void* rbx;
unsigned int csr;
unsigned short cw;
register long *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("fstcw %0" : "=m" (cw));
__asm__ volatile ("stmxcsr %0" : "=m" (csr));
__asm__ volatile ("movq %%rbp, %0" : "=m" (rbp));
__asm__ volatile ("movq %%rbx, %0" : "=m" (rbx));
__asm__ ("movq %%rsp, %0" : "=g" (stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"addq %0, %%rsp\n"
"addq %0, %%rbp\n"
:
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
__asm__ volatile ("xorq %%rax, %%rax" : "=a" (err));
}
__asm__ volatile ("movq %0, %%rbx" : : "m" (rbx));
__asm__ volatile ("movq %0, %%rbp" : : "m" (rbp));
__asm__ volatile ("ldmxcsr %0" : : "m" (csr));
__asm__ volatile ("fldcw %0" : : "m" (cw));
__asm__ volatile ("" : : : REGS_TO_SAVE);
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,79 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro
* 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I
* read that these do not need to be saved. Also added notes and
* errors related to the frame pointer. Richard Tew.
*
* NOTES
*
* It is not possible to detect if fp is used or not, so the supplied
* switch function needs to support it, so that you can remove it if
* it does not apply to you.
*
* POSSIBLE ERRORS
*
* "fp cannot be used in asm here"
*
* - Try commenting out "fp" in REGS_TO_SAVE.
*
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REG_SP "sp"
#define REG_SPSP "sp,sp"
#ifdef __thumb__
#define REG_FP "r7"
#define REG_FPFP "r7,r7"
#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr"
#else
#define REG_FP "fp"
#define REG_FPFP "fp,fp"
#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr"
#endif
#if defined(__SOFTFP__)
#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL
#elif defined(__VFP_FP__)
#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \
"d12", "d13", "d14", "d15"
#elif defined(__MAVERICK__)
#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \
"mvf8", "mvf9", "mvf10", "mvf11", \
"mvf12", "mvf13", "mvf14", "mvf15"
#else
#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7"
#endif
static int
#ifdef __GNUC__
__attribute__((optimize("no-omit-frame-pointer")))
#endif
slp_switch(void)
{
void *fp;
register int *stackref, stsizediff;
int result;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0");
__asm__ ("mov %0," REG_SP : "=r" (stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"add " REG_SPSP ",%0\n"
"add " REG_FPFP ",%0\n"
:
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0");
__asm__ volatile ("" : : : REGS_TO_SAVE);
return result;
}
#endif

View File

@ -0,0 +1,67 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 31-May-15 iOS support. Ported from arm32. Proton <feisuzhu@163.com>
*
* NOTES
*
* It is not possible to detect if fp is used or not, so the supplied
* switch function needs to support it, so that you can remove it if
* it does not apply to you.
*
* POSSIBLE ERRORS
*
* "fp cannot be used in asm here"
*
* - Try commenting out "fp" in REGS_TO_SAVE.
*
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REG_SP "sp"
#define REG_SPSP "sp,sp"
#define REG_FP "r7"
#define REG_FPFP "r7,r7"
#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr"
#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \
"d12", "d13", "d14", "d15"
static int
#ifdef __GNUC__
__attribute__((optimize("no-omit-frame-pointer")))
#endif
slp_switch(void)
{
void *fp;
register int *stackref, stsizediff, result;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("str " REG_FP ",%0" : "=m" (fp));
__asm__ ("mov %0," REG_SP : "=r" (stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"add " REG_SPSP ",%0\n"
"add " REG_FPFP ",%0\n"
:
: "r" (stsizediff)
: REGS_TO_SAVE /* Clobber registers, force compiler to
* recalculate address of void *fp from REG_SP or REG_FP */
);
SLP_RESTORE_STATE();
}
__asm__ volatile (
"ldr " REG_FP ", %1\n\t"
"mov %0, #0"
: "=r" (result)
: "m" (fp)
: REGS_TO_SAVE /* Force compiler to restore saved registers after this */
);
return result;
}
#endif

View File

@ -0,0 +1,48 @@
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REG_FP "r8"
#ifdef __CSKYABIV2__
#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\
"r16", "r17", "r18", "r19", "r20", "r21", "r22",\
"r23", "r24", "r25"
#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__)
#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\
"vr13", "vr14", "vr15"
#else
#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL
#endif
#else
#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15"
#endif
static int
#ifdef __GNUC__
__attribute__((optimize("no-omit-frame-pointer")))
#endif
slp_switch(void)
{
register int *stackref, stsizediff;
int result;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ ("mov %0, sp" : "=r" (stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"addu sp,%0\n"
"addu "REG_FP",%0\n"
:
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("movi %0, 0" : "=r" (result));
__asm__ volatile ("" : : : REGS_TO_SAVE);
return result;
}
#endif

View File

@ -0,0 +1,38 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 2014-01-06 Andreas Schwab <schwab@linux-m68k.org>
* File created.
*/
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \
"%a2", "%a3", "%a4"
static int
slp_switch(void)
{
int err;
int *stackref, stsizediff;
void *fp, *a5;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("move.l %%fp, %0" : "=m"(fp));
__asm__ volatile ("move.l %%a5, %0" : "=m"(a5));
__asm__ ("move.l %%sp, %0" : "=r"(stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff));
SLP_RESTORE_STATE();
__asm__ volatile ("clr.l %0" : "=g" (err));
}
__asm__ volatile ("move.l %0, %%a5" : : "m"(a5));
__asm__ volatile ("move.l %0, %%fp" : : "m"(fp));
__asm__ volatile ("" : : : REGS_TO_SAVE);
return err;
}
#endif

View File

@ -0,0 +1,64 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 20-Sep-14 Matt Madison <madison@bliss-m.org>
* Re-code the saving of the gp register for MIPS64.
* 05-Jan-08 Thiemo Seufer <ths@debian.org>
* Ported from ppc.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \
"$23", "$30"
static int
slp_switch(void)
{
register int err;
register int *stackref, stsizediff;
#ifdef __mips64
uint64_t gpsave;
#endif
__asm__ __volatile__ ("" : : : REGS_TO_SAVE);
#ifdef __mips64
__asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : );
#endif
__asm__ ("move %0, $29" : "=r" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ __volatile__ (
#ifdef __mips64
"daddu $29, %0\n"
#else
"addu $29, %0\n"
#endif
: /* no outputs */
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
}
#ifdef __mips64
__asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : );
#endif
__asm__ __volatile__ ("" : : : REGS_TO_SAVE);
__asm__ __volatile__ ("move %0, $0" : "=r" (err));
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,103 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 16-Oct-20 Jesse Gorzinski <jgorzins@us.ibm.com>
* Copied from Linux PPC64 implementation
* 04-Sep-18 Alexey Borzenkov <snaury@gmail.com>
* Workaround a gcc bug using manual save/restore of r30
* 21-Mar-18 Tulio Magno Quites Machado Filho <tuliom@linux.vnet.ibm.com>
* Added r30 to the list of saved registers in order to fully comply with
* both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this
* register as a nonvolatile register used for local variables.
* 21-Mar-18 Laszlo Boszormenyi <gcs@debian.org>
* Save r2 (TOC pointer) manually.
* 10-Dec-13 Ulrich Weigand <uweigand@de.ibm.com>
* Support ELFv2 ABI. Save float/vector registers.
* 09-Mar-12 Michael Ellerman <michael@ellerman.id.au>
* 64-bit implementation, copied from 32-bit.
* 07-Sep-05 (py-dev mailing list discussion)
* removed 'r31' from the register-saved. !!!! WARNING !!!!
* It means that this file can no longer be compiled statically!
* It is now only suitable as part of a dynamic library!
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
* added cr2-cr4 to the registers to be saved.
* Open questions: Should we save FP registers?
* What about vector registers?
* Differences between darwin and unix?
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 04-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
* Ported from MacOS version.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
* Added register 13-29, 31 saves. The same way as
* Armin Rigo did for the x86_unix version.
* This seems to be now fully functional!
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
* Ported from i386.
* 31-Jul-12 Trevor Bowen <trevorbowen@gmail.com>
* Changed memory constraints to register only.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 6
#if defined(__ALTIVEC__)
#define ALTIVEC_REGS \
"v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \
"v28", "v29", "v30", "v31",
#else
#define ALTIVEC_REGS
#endif
#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
"r31", \
"fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \
"fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \
"fr30", "fr31", \
ALTIVEC_REGS \
"cr2", "cr3", "cr4"
static int
slp_switch(void)
{
register int err;
register long *stackref, stsizediff;
void * toc;
void * r30;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("std 2, %0" : "=m" (toc));
__asm__ volatile ("std 30, %0" : "=m" (r30));
__asm__ ("mr %0, 1" : "=r" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"mr 11, %0\n"
"add 1, 1, 11\n"
: /* no outputs */
: "r" (stsizediff)
: "11"
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("ld 30, %0" : : "m" (r30));
__asm__ volatile ("ld 2, %0" : : "m" (toc));
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("li %0, 0" : "=r" (err));
return err;
}
#endif

View File

@ -0,0 +1,105 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 04-Sep-18 Alexey Borzenkov <snaury@gmail.com>
* Workaround a gcc bug using manual save/restore of r30
* 21-Mar-18 Tulio Magno Quites Machado Filho <tuliom@linux.vnet.ibm.com>
* Added r30 to the list of saved registers in order to fully comply with
* both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this
* register as a nonvolatile register used for local variables.
* 21-Mar-18 Laszlo Boszormenyi <gcs@debian.org>
* Save r2 (TOC pointer) manually.
* 10-Dec-13 Ulrich Weigand <uweigand@de.ibm.com>
* Support ELFv2 ABI. Save float/vector registers.
* 09-Mar-12 Michael Ellerman <michael@ellerman.id.au>
* 64-bit implementation, copied from 32-bit.
* 07-Sep-05 (py-dev mailing list discussion)
* removed 'r31' from the register-saved. !!!! WARNING !!!!
* It means that this file can no longer be compiled statically!
* It is now only suitable as part of a dynamic library!
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
* added cr2-cr4 to the registers to be saved.
* Open questions: Should we save FP registers?
* What about vector registers?
* Differences between darwin and unix?
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 04-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
* Ported from MacOS version.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
* Added register 13-29, 31 saves. The same way as
* Armin Rigo did for the x86_unix version.
* This seems to be now fully functional!
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
* Ported from i386.
* 31-Jul-12 Trevor Bowen <trevorbowen@gmail.com>
* Changed memory constraints to register only.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#if _CALL_ELF == 2
#define STACK_MAGIC 4
#else
#define STACK_MAGIC 6
#endif
#if defined(__ALTIVEC__)
#define ALTIVEC_REGS \
"v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \
"v28", "v29", "v30", "v31",
#else
#define ALTIVEC_REGS
#endif
#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
"r31", \
"fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \
"fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \
"fr30", "fr31", \
ALTIVEC_REGS \
"cr2", "cr3", "cr4"
static int
slp_switch(void)
{
register int err;
register long *stackref, stsizediff;
void * toc;
void * r30;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("std 2, %0" : "=m" (toc));
__asm__ volatile ("std 30, %0" : "=m" (r30));
__asm__ ("mr %0, 1" : "=r" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"mr 11, %0\n"
"add 1, 1, 11\n"
: /* no outputs */
: "r" (stsizediff)
: "11"
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("ld 30, %0" : : "m" (r30));
__asm__ volatile ("ld 2, %0" : : "m" (toc));
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("li %0, 0" : "=r" (err));
return err;
}
#endif

View File

@ -0,0 +1,87 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 07-Mar-11 Floris Bruynooghe <flub@devork.be>
* Do not add stsizediff to general purpose
* register (GPR) 30 as this is a non-volatile and
* unused by the PowerOpen Environment, therefore
* this was modifying a user register instead of the
* frame pointer (which does not seem to exist).
* 07-Sep-05 (py-dev mailing list discussion)
* removed 'r31' from the register-saved. !!!! WARNING !!!!
* It means that this file can no longer be compiled statically!
* It is now only suitable as part of a dynamic library!
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
* added cr2-cr4 to the registers to be saved.
* Open questions: Should we save FP registers?
* What about vector registers?
* Differences between darwin and unix?
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 04-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
* Ported from MacOS version.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
* Added register 13-29, 31 saves. The same way as
* Armin Rigo did for the x86_unix version.
* This seems to be now fully functional!
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
* Ported from i386.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 3
/* !!!!WARNING!!!! need to add "r31" in the next line if this header file
* is meant to be compiled non-dynamically!
*/
#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
"cr2", "cr3", "cr4"
static int
slp_switch(void)
{
register int err;
register int *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ ("mr %0, 1" : "=r" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"mr 11, %0\n"
"add 1, 1, 11\n"
: /* no outputs */
: "r" (stsizediff)
: "11"
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("li %0, 0" : "=r" (err));
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,84 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 07-Sep-05 (py-dev mailing list discussion)
* removed 'r31' from the register-saved. !!!! WARNING !!!!
* It means that this file can no longer be compiled statically!
* It is now only suitable as part of a dynamic library!
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
* added cr2-cr4 to the registers to be saved.
* Open questions: Should we save FP registers?
* What about vector registers?
* Differences between darwin and unix?
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 04-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
* Ported from MacOS version.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
* Added register 13-29, 31 saves. The same way as
* Armin Rigo did for the x86_unix version.
* This seems to be now fully functional!
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
* Ported from i386.
* 31-Jul-12 Trevor Bowen <trevorbowen@gmail.com>
* Changed memory constraints to register only.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 3
/* !!!!WARNING!!!! need to add "r31" in the next line if this header file
* is meant to be compiled non-dynamically!
*/
#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
"cr2", "cr3", "cr4"
static int
slp_switch(void)
{
register int err;
register int *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ ("mr %0, 1" : "=r" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"mr 11, %0\n"
"add 1, 1, 11\n"
"add 30, 30, 11\n"
: /* no outputs */
: "r" (stsizediff)
: "11"
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("li %0, 0" : "=r" (err));
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,82 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 07-Sep-05 (py-dev mailing list discussion)
* removed 'r31' from the register-saved. !!!! WARNING !!!!
* It means that this file can no longer be compiled statically!
* It is now only suitable as part of a dynamic library!
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
* added cr2-cr4 to the registers to be saved.
* Open questions: Should we save FP registers?
* What about vector registers?
* Differences between darwin and unix?
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
* Added register 13-29, 31 saves. The same way as
* Armin Rigo did for the x86_unix version.
* This seems to be now fully functional!
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
* Ported from i386.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 3
/* !!!!WARNING!!!! need to add "r31" in the next line if this header file
* is meant to be compiled non-dynamically!
*/
#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
"cr2", "cr3", "cr4"
static int
slp_switch(void)
{
register int err;
register int *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ ("; asm block 2\n\tmr %0, r1" : "=g" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"; asm block 3\n"
"\tmr r11, %0\n"
"\tadd r1, r1, r11\n"
"\tadd r30, r30, r11\n"
: /* no outputs */
: "g" (stsizediff)
: "r11"
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("li %0, 0" : "=r" (err));
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,82 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 07-Sep-05 (py-dev mailing list discussion)
* removed 'r31' from the register-saved. !!!! WARNING !!!!
* It means that this file can no longer be compiled statically!
* It is now only suitable as part of a dynamic library!
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
* added cr2-cr4 to the registers to be saved.
* Open questions: Should we save FP registers?
* What about vector registers?
* Differences between darwin and unix?
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 04-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
* Ported from MacOS version.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
* Added register 13-29, 31 saves. The same way as
* Armin Rigo did for the x86_unix version.
* This seems to be now fully functional!
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
* Ported from i386.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 3
/* !!!!WARNING!!!! need to add "r31" in the next line if this header file
* is meant to be compiled non-dynamically!
*/
#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
"cr2", "cr3", "cr4"
static int
slp_switch(void)
{
register int err;
register int *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ ("mr %0, 1" : "=g" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"mr 11, %0\n"
"add 1, 1, 11\n"
"add 30, 30, 11\n"
: /* no outputs */
: "g" (stsizediff)
: "11"
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("li %0, 0" : "=r" (err));
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,32 @@
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \
"s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \
"fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \
"fs10", "fs11"
static int
slp_switch(void)
{
register int ret;
register long *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("mv %0, sp" : "=r" (stackref) : );
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"add sp, sp, %0\n\t"
: /* no outputs */
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("mv %0, zero" : "=r" (ret) : );
return ret;
}
#endif

View File

@ -0,0 +1,87 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 25-Jan-12 Alexey Borzenkov <snaury@gmail.com>
* Fixed Linux/S390 port to work correctly with
* different optimization options both on 31-bit
* and 64-bit. Thanks to Stefan Raabe for lots
* of testing.
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 06-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
* Ported to Linux/S390.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#ifdef __s390x__
#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */
#else
#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */
#endif
/* Technically, r11-r13 also need saving, but function prolog starts
with stm(g) and since there are so many saved registers already
it won't be optimized, resulting in all r6-r15 being saved */
#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \
"f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \
"f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15"
static int
slp_switch(void)
{
register int ret;
register long *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
#ifdef __s390x__
__asm__ volatile ("lgr %0, 15" : "=r" (stackref) : );
#else
__asm__ volatile ("lr %0, 15" : "=r" (stackref) : );
#endif
{
SLP_SAVE_STATE(stackref, stsizediff);
/* N.B.
r11 may be used as the frame pointer, and in that case it cannot be
clobbered and needs offsetting just like the stack pointer (but in cases
where frame pointer isn't used we might clobber it accidentally). What's
scary is that r11 is 2nd (and even 1st when GOT is used) callee saved
register that gcc would chose for surviving function calls. However,
since r6-r10 are clobbered above, their cost for reuse is reduced, so
gcc IRA will chose them over r11 (not seeing r11 is implicitly saved),
making it relatively safe to offset in all cases. :) */
__asm__ volatile (
#ifdef __s390x__
"agr 15, %0\n\t"
"agr 11, %0"
#else
"ar 15, %0\n\t"
"ar 11, %0"
#endif
: /* no outputs */
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("lhi %0, 0" : "=r" (ret) : );
return ret;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,92 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 16-May-15 Alexey Borzenkov <snaury@gmail.com>
* Move stack spilling code inside save/restore functions
* 30-Aug-13 Floris Bruynooghe <flub@devork.be>
Clean the register windows again before returning.
This does not clobber the PIC register as it leaves
the current window intact and is required for multi-
threaded code to work correctly.
* 08-Mar-11 Floris Bruynooghe <flub@devork.be>
* No need to set return value register explicitly
* before the stack and framepointer are adjusted
* as none of the other registers are influenced by
* this. Also don't needlessly clean the windows
* ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that
* clobbers the gcc PIC register (%l7).
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* added support for SunOS sparc with gcc
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#if defined(__sparcv9)
#define SLP_FLUSHW __asm__ volatile ("flushw")
#else
#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */
#endif
/* On sparc we need to spill register windows inside save/restore functions */
#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW
#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW
static int
slp_switch(void)
{
register int err;
register int *stackref, stsizediff;
/* Put current stack pointer into stackref.
* Register spilling is done in save/restore.
*/
__asm__ volatile ("mov %%sp, %0" : "=r" (stackref));
{
/* Thou shalt put SLP_SAVE_STATE into a local block */
/* Copy the current stack onto the heap */
SLP_SAVE_STATE(stackref, stsizediff);
/* Increment stack and frame pointer by stsizediff */
__asm__ volatile (
"add %0, %%sp, %%sp\n\t"
"add %0, %%fp, %%fp"
: : "r" (stsizediff));
/* Copy new stack from it's save store on the heap */
SLP_RESTORE_STATE();
__asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0));
return err;
}
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,63 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 17-Aug-12 Fantix King <fantix.king@gmail.com>
* Ported from amd64.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
#define REGS_TO_SAVE "r12", "r13", "r14", "r15"
static int
slp_switch(void)
{
void* ebp;
void* ebx;
unsigned int csr;
unsigned short cw;
register int err;
register int *stackref, stsizediff;
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("fstcw %0" : "=m" (cw));
__asm__ volatile ("stmxcsr %0" : "=m" (csr));
__asm__ volatile ("movl %%ebp, %0" : "=m" (ebp));
__asm__ volatile ("movl %%ebx, %0" : "=m" (ebx));
__asm__ ("movl %%esp, %0" : "=g" (stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"addl %0, %%esp\n"
"addl %0, %%ebp\n"
:
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
}
__asm__ volatile ("movl %0, %%ebx" : : "m" (ebx));
__asm__ volatile ("movl %0, %%ebp" : : "m" (ebp));
__asm__ volatile ("ldmxcsr %0" : : "m" (csr));
__asm__ volatile ("fldcw %0" : : "m" (cw));
__asm__ volatile ("" : : : REGS_TO_SAVE);
__asm__ volatile ("xorl %%eax, %%eax" : "=a" (err));
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,111 @@
;
; stack switching code for MASM on x641
; Kristjan Valur Jonsson, sept 2005
;
;prototypes for our calls
slp_save_state_asm PROTO
slp_restore_state_asm PROTO
pushxmm MACRO reg
sub rsp, 16
.allocstack 16
movaps [rsp], reg ; faster than movups, but we must be aligned
; .savexmm128 reg, offset (don't know what offset is, no documentation)
ENDM
popxmm MACRO reg
movaps reg, [rsp] ; faster than movups, but we must be aligned
add rsp, 16
ENDM
pushreg MACRO reg
push reg
.pushreg reg
ENDM
popreg MACRO reg
pop reg
ENDM
.code
slp_switch PROC FRAME
;realign stack to 16 bytes after return address push, makes the following faster
sub rsp,8
.allocstack 8
pushxmm xmm15
pushxmm xmm14
pushxmm xmm13
pushxmm xmm12
pushxmm xmm11
pushxmm xmm10
pushxmm xmm9
pushxmm xmm8
pushxmm xmm7
pushxmm xmm6
pushreg r15
pushreg r14
pushreg r13
pushreg r12
pushreg rbp
pushreg rbx
pushreg rdi
pushreg rsi
sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16)
.allocstack 10h
.endprolog
lea rcx, [rsp+10h] ;load stack base that we are saving
call slp_save_state_asm ;pass stackpointer, return offset in eax
cmp rax, 1
je EXIT1
cmp rax, -1
je EXIT2
;actual stack switch:
add rsp, rax
call slp_restore_state_asm
xor rax, rax ;return 0
EXIT:
add rsp, 10h
popreg rsi
popreg rdi
popreg rbx
popreg rbp
popreg r12
popreg r13
popreg r14
popreg r15
popxmm xmm6
popxmm xmm7
popxmm xmm8
popxmm xmm9
popxmm xmm10
popxmm xmm11
popxmm xmm12
popxmm xmm13
popxmm xmm14
popxmm xmm15
add rsp, 8
ret
EXIT1:
mov rax, 1
jmp EXIT
EXIT2:
sar rax, 1
jmp EXIT
slp_switch ENDP
END

View File

@ -0,0 +1,60 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 26-Sep-02 Christian Tismer <tismer@tismer.com>
* again as a result of virtualized stack access,
* the compiler used less registers. Needed to
* explicit mention registers in order to get them saved.
* Thanks to Jeff Senn for pointing this out and help.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 01-Mar-02 Christian Tismer <tismer@tismer.com>
* Initial final version after lots of iterations for i386.
*/
/* Avoid alloca redefined warning on mingw64 */
#ifndef alloca
#define alloca _alloca
#endif
#define STACK_REFPLUS 1
#define STACK_MAGIC 0
/* Use the generic support for an external assembly language slp_switch function. */
#define EXTERNAL_ASM
#ifdef SLP_EVAL
/* This always uses the external masm assembly file. */
#endif
/*
* further self-processing support
*/
/* we have IsBadReadPtr available, so we can peek at objects */
/*
#define STACKLESS_SPY
#ifdef IMPLEMENT_STACKLESSMODULE
#include "Windows.h"
#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes)
static int IS_ON_STACK(void*p)
{
int stackref;
intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000;
return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000;
}
#endif
*/

View File

@ -0,0 +1,88 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 26-Sep-02 Christian Tismer <tismer@tismer.com>
* again as a result of virtualized stack access,
* the compiler used less registers. Needed to
* explicit mention registers in order to get them saved.
* Thanks to Jeff Senn for pointing this out and help.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for sparc
* 01-Mar-02 Christian Tismer <tismer@tismer.com>
* Initial final version after lots of iterations for i386.
*/
#define alloca _alloca
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
#define STACK_MAGIC 0
/* Some magic to quell warnings and keep slp_switch() from crashing when built
with VC90. Disable global optimizations, and the warning: frame pointer
register 'ebp' modified by inline assembly code */
#pragma optimize("g", off)
#pragma warning(disable:4731)
static int
slp_switch(void)
{
void* seh;
register int *stackref, stsizediff;
__asm mov eax, fs:[0]
__asm mov [seh], eax
__asm mov stackref, esp;
/* modify EBX, ESI and EDI in order to get them preserved */
__asm mov ebx, ebx;
__asm xchg esi, edi;
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm {
mov eax, stsizediff
add esp, eax
add ebp, eax
}
SLP_RESTORE_STATE();
}
__asm mov eax, [seh]
__asm mov fs:[0], eax
return 0;
}
/* re-enable ebp warning and global optimizations. */
#pragma optimize("g", on)
#pragma warning(default:4731)
#endif
/*
* further self-processing support
*/
/* we have IsBadReadPtr available, so we can peek at objects */
#define STACKLESS_SPY
#ifdef IMPLEMENT_STACKLESSMODULE
#include "Windows.h"
#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes)
static int IS_ON_STACK(void*p)
{
int stackref;
int stackbase = ((int)&stackref) & 0xfffff000;
return (int)p >= stackbase && (int)p < stackbase + 0x00100000;
}
#endif

View File

@ -0,0 +1,105 @@
/*
* this is the internal transfer function.
*
* HISTORY
* 3-May-13 Ralf Schmitt <ralf@systemexit.de>
* Add support for strange GCC caller-save decisions
* (ported from switch_aarch64_gcc.h)
* 19-Aug-11 Alexey Borzenkov <snaury@gmail.com>
* Correctly save ebp, ebx and cw
* 07-Sep-05 (py-dev mailing list discussion)
* removed 'ebx' from the register-saved. !!!! WARNING !!!!
* It means that this file can no longer be compiled statically!
* It is now only suitable as part of a dynamic library!
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
* needed to add another magic constant to insure
* that f in slp_eval_frame(PyFrameObject *f)
* STACK_REFPLUS will probably be 1 in most cases.
* gets included into the saved stack area.
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
* after virtualizing stack save/restore, the
* stack size shrunk a bit. Needed to introduce
* an adjustment STACK_MAGIC per platform.
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
* slightly changed framework for spark
* 31-Avr-02 Armin Rigo <arigo@ulb.ac.be>
* Added ebx, esi and edi register-saves.
* 01-Mar-02 Samual M. Rushing <rushing@ironport.com>
* Ported from i386.
*/
#define STACK_REFPLUS 1
#ifdef SLP_EVAL
/* #define STACK_MAGIC 3 */
/* the above works fine with gcc 2.96, but 2.95.3 wants this */
#define STACK_MAGIC 0
#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
# define ATTR_NOCLONE __attribute__((noclone))
#else
# define ATTR_NOCLONE
#endif
static int
slp_switch(void)
{
int err;
#ifdef _WIN32
void *seh;
#endif
void *ebp, *ebx;
unsigned short cw;
register int *stackref, stsizediff;
__asm__ volatile ("" : : : "esi", "edi");
__asm__ volatile ("fstcw %0" : "=m" (cw));
__asm__ volatile ("movl %%ebp, %0" : "=m" (ebp));
__asm__ volatile ("movl %%ebx, %0" : "=m" (ebx));
#ifdef _WIN32
__asm__ volatile (
"movl %%fs:0x0, %%eax\n"
"movl %%eax, %0\n"
: "=m" (seh)
:
: "eax");
#endif
__asm__ ("movl %%esp, %0" : "=g" (stackref));
{
SLP_SAVE_STATE(stackref, stsizediff);
__asm__ volatile (
"addl %0, %%esp\n"
"addl %0, %%ebp\n"
:
: "r" (stsizediff)
);
SLP_RESTORE_STATE();
__asm__ volatile ("xorl %%eax, %%eax" : "=a" (err));
}
#ifdef _WIN32
__asm__ volatile (
"movl %0, %%eax\n"
"movl %%eax, %%fs:0x0\n"
:
: "m" (seh)
: "eax");
#endif
__asm__ volatile ("movl %0, %%ebx" : : "m" (ebx));
__asm__ volatile ("movl %0, %%ebp" : : "m" (ebp));
__asm__ volatile ("fldcw %0" : : "m" (cw));
__asm__ volatile ("" : : : "esi", "edi");
return err;
}
#endif
/*
* further self-processing support
*/
/*
* if you want to add self-inspection tools, place them
* here. See the x86_msvc for the necessary defines.
* These features are highly experimental und not
* essential yet.
*/

View File

@ -0,0 +1,58 @@
/*
* Platform Selection for Stackless Python
*/
#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER)
#include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */
#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__)
#include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */
#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__)
#include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */
#elif defined(__GNUC__) && defined(__amd64__)
#include "platform/switch_amd64_unix.h" /* gcc on amd64 */
#elif defined(__GNUC__) && defined(__i386__)
#include "platform/switch_x86_unix.h" /* gcc on X86 */
#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__))
#include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */
#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__))
#include "platform/switch_ppc_linux.h" /* gcc on PowerPC */
#elif defined(__GNUC__) && defined(__ppc__) && defined(__APPLE__)
#include "platform/switch_ppc_macosx.h" /* Apple MacOS X on PowerPC */
#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX)
#include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */
#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX)
#include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */
#elif defined(__GNUC__) && defined(sparc)
#include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */
#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun)
#include "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */
#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun)
#include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */
#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun)
#include "platform/switch_x86_unix.h" /* SunStudio on x86 */
#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__)
#include "platform/switch_s390_unix.h" /* Linux/S390 */
#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__)
#include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */
#elif defined(__GNUC__) && defined(__arm__)
#ifdef __APPLE__
#include <TargetConditionals.h>
#endif
#if TARGET_OS_IPHONE
#include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */
#else
#include "platform/switch_arm32_gcc.h" /* gcc using arm32 */
#endif
#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__)
#include "platform/switch_mips_unix.h" /* Linux/MIPS */
#elif defined(__GNUC__) && defined(__aarch64__)
#include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */
#elif defined(__GNUC__) && defined(__mc68000__)
#include "platform/switch_m68k_gcc.h" /* gcc on m68k */
#elif defined(__GNUC__) && defined(__csky__)
#include "platform/switch_csky_gcc.h" /* gcc on csky */
#elif defined(__GNUC__) && defined(__riscv)
#include "platform/switch_riscv_unix.h" /* gcc on RISC-V */
#elif defined(__GNUC__) && defined(__alpha__)
#include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */
#endif

View File

@ -0,0 +1,216 @@
/* This is a set of functions used by test_extension_interface.py to test the
* Greenlet C API.
*/
#include "../greenlet.h"
#ifndef Py_RETURN_NONE
# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None
#endif
#define TEST_MODULE_NAME "_test_extension"
static PyObject*
test_switch(PyObject* self, PyObject* greenlet)
{
PyObject* result = NULL;
if (greenlet == NULL || !PyGreenlet_Check(greenlet)) {
PyErr_BadArgument();
return NULL;
}
result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL);
if (result == NULL) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_AssertionError,
"greenlet.switch() failed for some reason.");
}
return NULL;
}
Py_INCREF(result);
return result;
}
static PyObject*
test_switch_kwargs(PyObject* self, PyObject* args, PyObject* kwargs)
{
PyGreenlet* g = NULL;
PyObject* result = NULL;
PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g);
if (g == NULL || !PyGreenlet_Check(g)) {
PyErr_BadArgument();
return NULL;
}
result = PyGreenlet_Switch(g, NULL, kwargs);
if (result == NULL) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_AssertionError,
"greenlet.switch() failed for some reason.");
}
return NULL;
}
Py_XINCREF(result);
return result;
}
static PyObject*
test_getcurrent(PyObject* self)
{
PyGreenlet* g = PyGreenlet_GetCurrent();
if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) {
PyErr_SetString(PyExc_AssertionError,
"getcurrent() returned an invalid greenlet");
Py_XDECREF(g);
return NULL;
}
Py_DECREF(g);
Py_RETURN_NONE;
}
static PyObject*
test_setparent(PyObject* self, PyObject* arg)
{
PyGreenlet* current;
PyGreenlet* greenlet = NULL;
if (arg == NULL || !PyGreenlet_Check(arg)) {
PyErr_BadArgument();
return NULL;
}
if ((current = PyGreenlet_GetCurrent()) == NULL) {
return NULL;
}
greenlet = (PyGreenlet*)arg;
if (PyGreenlet_SetParent(greenlet, current)) {
Py_DECREF(current);
return NULL;
}
Py_DECREF(current);
if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) {
return NULL;
}
Py_RETURN_NONE;
}
static PyObject*
test_new_greenlet(PyObject* self, PyObject* callable)
{
PyObject* result = NULL;
PyGreenlet* greenlet = PyGreenlet_New(callable, NULL);
if (!greenlet) {
return NULL;
}
result = PyGreenlet_Switch(greenlet, NULL, NULL);
if (result == NULL) {
return NULL;
}
Py_INCREF(result);
return result;
}
static PyObject*
test_raise_dead_greenlet(PyObject* self)
{
PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception.");
return NULL;
}
static PyObject*
test_raise_greenlet_error(PyObject* self)
{
PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception");
return NULL;
}
static PyObject*
test_throw(PyObject* self, PyGreenlet* g)
{
const char msg[] = "take that sucka!";
PyObject* msg_obj = Py_BuildValue("s", msg);
PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL);
Py_DECREF(msg_obj);
Py_RETURN_NONE;
}
static PyMethodDef test_methods[] = {
{"test_switch",
(PyCFunction)test_switch,
METH_O,
"Switch to the provided greenlet sending provided arguments, and \n"
"return the results."},
{"test_switch_kwargs",
(PyCFunction)test_switch_kwargs,
METH_VARARGS | METH_KEYWORDS,
"Switch to the provided greenlet sending the provided keyword args."},
{"test_getcurrent",
(PyCFunction)test_getcurrent,
METH_NOARGS,
"Test PyGreenlet_GetCurrent()"},
{"test_setparent",
(PyCFunction)test_setparent,
METH_O,
"Se the parent of the provided greenlet and switch to it."},
{"test_new_greenlet",
(PyCFunction)test_new_greenlet,
METH_O,
"Test PyGreenlet_New()"},
{"test_raise_dead_greenlet",
(PyCFunction)test_raise_dead_greenlet,
METH_NOARGS,
"Just raise greenlet.GreenletExit"},
{"test_raise_greenlet_error",
(PyCFunction)test_raise_greenlet_error,
METH_NOARGS,
"Just raise greenlet.error"},
{"test_throw",
(PyCFunction)test_throw,
METH_O,
"Throw a ValueError at the provided greenlet"},
{NULL, NULL, 0, NULL}};
#if PY_MAJOR_VERSION >= 3
# define INITERROR return NULL
static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT,
TEST_MODULE_NAME,
NULL,
0,
test_methods,
NULL,
NULL,
NULL,
NULL};
PyMODINIT_FUNC
PyInit__test_extension(void)
#else
# define INITERROR return
PyMODINIT_FUNC
init_test_extension(void)
#endif
{
PyObject* module = NULL;
#if PY_MAJOR_VERSION >= 3
module = PyModule_Create(&moduledef);
#else
module = Py_InitModule(TEST_MODULE_NAME, test_methods);
#endif
if (module == NULL) {
INITERROR;
}
PyGreenlet_Import();
#if PY_MAJOR_VERSION >= 3
return module;
#endif
}

View File

@ -0,0 +1,121 @@
/* This is a set of functions used to test C++ exceptions are not
* broken during greenlet switches
*/
#include "../greenlet.h"
struct exception_t {
int depth;
exception_t(int depth) : depth(depth) {}
};
/* Functions are called via pointers to prevent inlining */
static void (*p_test_exception_throw)(int depth);
static PyObject* (*p_test_exception_switch_recurse)(int depth, int left);
static void
test_exception_throw(int depth)
{
throw exception_t(depth);
}
static PyObject*
test_exception_switch_recurse(int depth, int left)
{
if (left > 0) {
return p_test_exception_switch_recurse(depth, left - 1);
}
PyObject* result = NULL;
PyGreenlet* self = PyGreenlet_GetCurrent();
if (self == NULL)
return NULL;
try {
PyGreenlet_Switch(self->parent, NULL, NULL);
p_test_exception_throw(depth);
PyErr_SetString(PyExc_RuntimeError,
"throwing C++ exception didn't work");
}
catch (exception_t& e) {
if (e.depth != depth)
PyErr_SetString(PyExc_AssertionError, "depth mismatch");
else
result = PyLong_FromLong(depth);
}
catch (...) {
PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception");
}
Py_DECREF(self);
return result;
}
/* test_exception_switch(int depth)
* - recurses depth times
* - switches to parent inside try/catch block
* - throws an exception that (expected to be caught in the same function)
* - verifies depth matches (exceptions shouldn't be caught in other greenlets)
*/
static PyObject*
test_exception_switch(PyObject* self, PyObject* args)
{
int depth;
if (!PyArg_ParseTuple(args, "i", &depth))
return NULL;
return p_test_exception_switch_recurse(depth, depth);
}
static PyMethodDef test_methods[] = {
{"test_exception_switch",
(PyCFunction)&test_exception_switch,
METH_VARARGS,
"Switches to parent twice, to test exception handling and greenlet "
"switching."},
{NULL, NULL, 0, NULL}};
#if PY_MAJOR_VERSION >= 3
# define INITERROR return NULL
static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT,
"greenlet.tests._test_extension_cpp",
NULL,
0,
test_methods,
NULL,
NULL,
NULL,
NULL};
PyMODINIT_FUNC
PyInit__test_extension_cpp(void)
#else
# define INITERROR return
PyMODINIT_FUNC
init_test_extension_cpp(void)
#endif
{
PyObject* module = NULL;
#if PY_MAJOR_VERSION >= 3
module = PyModule_Create(&moduledef);
#else
module = Py_InitModule("greenlet.tests._test_extension_cpp", test_methods);
#endif
if (module == NULL) {
INITERROR;
}
PyGreenlet_Import();
if (_PyGreenlet_API == NULL) {
INITERROR;
}
p_test_exception_throw = test_exception_throw;
p_test_exception_switch_recurse = test_exception_switch_recurse;
#if PY_MAJOR_VERSION >= 3
return module;
#endif
}

View File

@ -0,0 +1,266 @@
import unittest
import gc
import sys
from functools import partial
from greenlet import greenlet
from greenlet import getcurrent
try:
from contextvars import Context
from contextvars import ContextVar
from contextvars import copy_context
except ImportError:
Context = ContextVar = copy_context = None
# We don't support testing if greenlet's built-in context var support is disabled.
@unittest.skipUnless(Context is not None, "ContextVar not supported")
class ContextVarsTests(unittest.TestCase):
def _new_ctx_run(self, *args, **kwargs):
return copy_context().run(*args, **kwargs)
def _increment(self, greenlet_id, ctx_var, callback, counts, expect):
if expect is None:
self.assertIsNone(ctx_var.get())
else:
self.assertEqual(ctx_var.get(), expect)
ctx_var.set(greenlet_id)
for _ in range(2):
counts[ctx_var.get()] += 1
callback()
def _test_context(self, propagate_by):
id_var = ContextVar("id", default=None)
id_var.set(0)
callback = getcurrent().switch
counts = dict((i, 0) for i in range(5))
lets = [
greenlet(partial(
partial(
copy_context().run,
self._increment
) if propagate_by == "run" else self._increment,
greenlet_id=i,
ctx_var=id_var,
callback=callback,
counts=counts,
expect=(
i - 1 if propagate_by == "share" else
0 if propagate_by in ("set", "run") else None
)
))
for i in range(1, 5)
]
for let in lets:
if propagate_by == "set":
let.gr_context = copy_context()
elif propagate_by == "share":
let.gr_context = getcurrent().gr_context
for i in range(2):
counts[id_var.get()] += 1
for let in lets:
let.switch()
if propagate_by == "run":
# Must leave each context.run() in reverse order of entry
for let in reversed(lets):
let.switch()
else:
# No context.run(), so fine to exit in any order.
for let in lets:
let.switch()
for let in lets:
self.assertTrue(let.dead)
# When using run(), we leave the run() as the greenlet dies,
# and there's no context "underneath". When not using run(),
# gr_context still reflects the context the greenlet was
# running in.
self.assertEqual(let.gr_context is None, propagate_by == "run")
if propagate_by == "share":
self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6})
else:
self.assertEqual(set(counts.values()), set([2]))
def test_context_propagated_by_context_run(self):
self._new_ctx_run(self._test_context, "run")
def test_context_propagated_by_setting_attribute(self):
self._new_ctx_run(self._test_context, "set")
def test_context_not_propagated(self):
self._new_ctx_run(self._test_context, None)
def test_context_shared(self):
self._new_ctx_run(self._test_context, "share")
def test_break_ctxvars(self):
let1 = greenlet(copy_context().run)
let2 = greenlet(copy_context().run)
let1.switch(getcurrent().switch)
let2.switch(getcurrent().switch)
# Since let2 entered the current context and let1 exits its own, the
# interpreter emits:
# RuntimeError: cannot exit context: thread state references a different context object
let1.switch()
def test_not_broken_if_using_attribute_instead_of_context_run(self):
let1 = greenlet(getcurrent().switch)
let2 = greenlet(getcurrent().switch)
let1.gr_context = copy_context()
let2.gr_context = copy_context()
let1.switch()
let2.switch()
let1.switch()
let2.switch()
def test_context_assignment_while_running(self):
id_var = ContextVar("id", default=None)
def target():
self.assertIsNone(id_var.get())
self.assertIsNone(gr.gr_context)
# Context is created on first use
id_var.set(1)
self.assertIsInstance(gr.gr_context, Context)
self.assertEqual(id_var.get(), 1)
self.assertEqual(gr.gr_context[id_var], 1)
# Clearing the context makes it get re-created as another
# empty context when next used
old_context = gr.gr_context
gr.gr_context = None # assign None while running
self.assertIsNone(id_var.get())
self.assertIsNone(gr.gr_context)
id_var.set(2)
self.assertIsInstance(gr.gr_context, Context)
self.assertEqual(id_var.get(), 2)
self.assertEqual(gr.gr_context[id_var], 2)
new_context = gr.gr_context
getcurrent().parent.switch((old_context, new_context))
# parent switches us back to old_context
self.assertEqual(id_var.get(), 1)
gr.gr_context = new_context # assign non-None while running
self.assertEqual(id_var.get(), 2)
getcurrent().parent.switch()
# parent switches us back to no context
self.assertIsNone(id_var.get())
self.assertIsNone(gr.gr_context)
gr.gr_context = old_context
self.assertEqual(id_var.get(), 1)
getcurrent().parent.switch()
# parent switches us back to no context
self.assertIsNone(id_var.get())
self.assertIsNone(gr.gr_context)
gr = greenlet(target)
with self.assertRaisesRegex(AttributeError, "can't delete attr"):
del gr.gr_context
self.assertIsNone(gr.gr_context)
old_context, new_context = gr.switch()
self.assertIs(new_context, gr.gr_context)
self.assertEqual(old_context[id_var], 1)
self.assertEqual(new_context[id_var], 2)
self.assertEqual(new_context.run(id_var.get), 2)
gr.gr_context = old_context # assign non-None while suspended
gr.switch()
self.assertIs(gr.gr_context, new_context)
gr.gr_context = None # assign None while suspended
gr.switch()
self.assertIs(gr.gr_context, old_context)
gr.gr_context = None
gr.switch()
self.assertIsNone(gr.gr_context)
# Make sure there are no reference leaks
gr = None
gc.collect()
self.assertEqual(sys.getrefcount(old_context), 2)
self.assertEqual(sys.getrefcount(new_context), 2)
def test_context_assignment_different_thread(self):
import threading
ctx = Context()
var = ContextVar("var", default=None)
is_running = threading.Event()
should_suspend = threading.Event()
did_suspend = threading.Event()
should_exit = threading.Event()
holder = []
def greenlet_in_thread_fn():
var.set(1)
is_running.set()
should_suspend.wait()
var.set(2)
getcurrent().parent.switch()
holder.append(var.get())
def thread_fn():
gr = greenlet(greenlet_in_thread_fn)
gr.gr_context = ctx
holder.append(gr)
gr.switch()
did_suspend.set()
should_exit.wait()
gr.switch()
thread = threading.Thread(target=thread_fn, daemon=True)
thread.start()
is_running.wait()
gr = holder[0]
# Can't access or modify context if the greenlet is running
# in a different thread
with self.assertRaisesRegex(ValueError, "running in a different"):
getattr(gr, 'gr_context')
with self.assertRaisesRegex(ValueError, "running in a different"):
gr.gr_context = None
should_suspend.set()
did_suspend.wait()
# OK to access and modify context if greenlet is suspended
self.assertIs(gr.gr_context, ctx)
self.assertEqual(gr.gr_context[var], 2)
gr.gr_context = None
should_exit.set()
thread.join()
self.assertEqual(holder, [gr, None])
# Context can still be accessed/modified when greenlet is dead:
self.assertIsNone(gr.gr_context)
gr.gr_context = ctx
self.assertIs(gr.gr_context, ctx)
@unittest.skipIf(Context is not None, "ContextVar supported")
class NoContextVarsTests(unittest.TestCase):
def test_contextvars_errors(self):
let1 = greenlet(getcurrent().switch)
self.assertFalse(hasattr(let1, 'gr_context'))
with self.assertRaises(AttributeError):
getattr(let1, 'gr_context')
with self.assertRaises(AttributeError):
let1.gr_context = None
let1.switch()
with self.assertRaises(AttributeError):
getattr(let1, 'gr_context')
with self.assertRaises(AttributeError):
let1.gr_context = None

View File

@ -0,0 +1,18 @@
from __future__ import print_function
from __future__ import absolute_import
import unittest
import greenlet
from . import _test_extension_cpp
class CPPTests(unittest.TestCase):
def test_exception_switch(self):
greenlets = []
for i in range(4):
g = greenlet.greenlet(_test_extension_cpp.test_exception_switch)
g.switch(i)
greenlets.append(g)
for i, g in enumerate(greenlets):
self.assertEqual(g.switch(), i)

View File

@ -0,0 +1,77 @@
from __future__ import print_function
from __future__ import absolute_import
import sys
import unittest
import greenlet
from . import _test_extension
class CAPITests(unittest.TestCase):
def test_switch(self):
self.assertEqual(
50, _test_extension.test_switch(greenlet.greenlet(lambda: 50)))
def test_switch_kwargs(self):
def foo(x, y):
return x * y
g = greenlet.greenlet(foo)
self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2))
def test_setparent(self):
def foo():
def bar():
greenlet.getcurrent().parent.switch()
# This final switch should go back to the main greenlet, since
# the test_setparent() function in the C extension should have
# reparented this greenlet.
greenlet.getcurrent().parent.switch()
raise AssertionError("Should never have reached this code")
child = greenlet.greenlet(bar)
child.switch()
greenlet.getcurrent().parent.switch(child)
greenlet.getcurrent().parent.throw(
AssertionError("Should never reach this code"))
foo_child = greenlet.greenlet(foo).switch()
self.assertEqual(None, _test_extension.test_setparent(foo_child))
def test_getcurrent(self):
_test_extension.test_getcurrent()
def test_new_greenlet(self):
self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15))
def test_raise_greenlet_dead(self):
self.assertRaises(
greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet)
def test_raise_greenlet_error(self):
self.assertRaises(
greenlet.error, _test_extension.test_raise_greenlet_error)
def test_throw(self):
seen = []
def foo():
try:
greenlet.getcurrent().parent.switch()
except ValueError:
seen.append(sys.exc_info()[1])
except greenlet.GreenletExit:
raise AssertionError
g = greenlet.greenlet(foo)
g.switch()
_test_extension.test_throw(g)
self.assertEqual(len(seen), 1)
self.assertTrue(
isinstance(seen[0], ValueError),
"ValueError was not raised in foo()")
self.assertEqual(
str(seen[0]),
'take that sucka!',
"message doesn't match")
if __name__ == '__main__':
unittest.main()

View File

@ -0,0 +1,77 @@
import gc
import sys
import unittest
import weakref
import greenlet
class GCTests(unittest.TestCase):
def test_dead_circular_ref(self):
o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch())
gc.collect()
self.assertTrue(o() is None)
self.assertFalse(gc.garbage, gc.garbage)
if greenlet.GREENLET_USE_GC:
# These only work with greenlet gc support
def test_circular_greenlet(self):
class circular_greenlet(greenlet.greenlet):
pass
o = circular_greenlet()
o.self = o
o = weakref.ref(o)
gc.collect()
self.assertTrue(o() is None)
self.assertFalse(gc.garbage, gc.garbage)
def test_inactive_ref(self):
class inactive_greenlet(greenlet.greenlet):
def __init__(self):
greenlet.greenlet.__init__(self, run=self.run)
def run(self):
pass
o = inactive_greenlet()
o = weakref.ref(o)
gc.collect()
self.assertTrue(o() is None)
self.assertFalse(gc.garbage, gc.garbage)
def test_finalizer_crash(self):
# This test is designed to crash when active greenlets
# are made garbage collectable, until the underlying
# problem is resolved. How does it work:
# - order of object creation is important
# - array is created first, so it is moved to unreachable first
# - we create a cycle between a greenlet and this array
# - we create an object that participates in gc, is only
# referenced by a greenlet, and would corrupt gc lists
# on destruction, the easiest is to use an object with
# a finalizer
# - because array is the first object in unreachable it is
# cleared first, which causes all references to greenlet
# to disappear and causes greenlet to be destroyed, but since
# it is still live it causes a switch during gc, which causes
# an object with finalizer to be destroyed, which causes stack
# corruption and then a crash
class object_with_finalizer(object):
def __del__(self):
pass
array = []
parent = greenlet.getcurrent()
def greenlet_body():
greenlet.getcurrent().object = object_with_finalizer()
try:
parent.switch()
finally:
del greenlet.getcurrent().object
g = greenlet.greenlet(greenlet_body)
g.array = array
array.append(g)
g.switch()
del array
del g
greenlet.getcurrent()
gc.collect()

View File

@ -0,0 +1,59 @@
import unittest
from greenlet import greenlet
class genlet(greenlet):
def __init__(self, *args, **kwds):
self.args = args
self.kwds = kwds
def run(self):
fn, = self.fn
fn(*self.args, **self.kwds)
def __iter__(self):
return self
def __next__(self):
self.parent = greenlet.getcurrent()
result = self.switch()
if self:
return result
else:
raise StopIteration
# Hack: Python < 2.6 compatibility
next = __next__
def Yield(value):
g = greenlet.getcurrent()
while not isinstance(g, genlet):
if g is None:
raise RuntimeError('yield outside a genlet')
g = g.parent
g.parent.switch(value)
def generator(func):
class generator(genlet):
fn = (func,)
return generator
# ____________________________________________________________
class GeneratorTests(unittest.TestCase):
def test_generator(self):
seen = []
def g(n):
for i in range(n):
seen.append(i)
Yield(i)
g = generator(g)
for k in range(3):
for j in g(5):
seen.append(j)
self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4])

View File

@ -0,0 +1,165 @@
import unittest
from greenlet import greenlet
class genlet(greenlet):
def __init__(self, *args, **kwds):
self.args = args
self.kwds = kwds
self.child = None
def run(self):
fn, = self.fn
fn(*self.args, **self.kwds)
def __iter__(self):
return self
def set_child(self, child):
self.child = child
def __next__(self):
if self.child:
child = self.child
while child.child:
tmp = child
child = child.child
tmp.child = None
result = child.switch()
else:
self.parent = greenlet.getcurrent()
result = self.switch()
if self:
return result
else:
raise StopIteration
# Hack: Python < 2.6 compatibility
next = __next__
def Yield(value, level=1):
g = greenlet.getcurrent()
while level != 0:
if not isinstance(g, genlet):
raise RuntimeError('yield outside a genlet')
if level > 1:
g.parent.set_child(g)
g = g.parent
level -= 1
g.switch(value)
def Genlet(func):
class Genlet(genlet):
fn = (func,)
return Genlet
# ____________________________________________________________
def g1(n, seen):
for i in range(n):
seen.append(i + 1)
yield i
def g2(n, seen):
for i in range(n):
seen.append(i + 1)
Yield(i)
g2 = Genlet(g2)
def nested(i):
Yield(i)
def g3(n, seen):
for i in range(n):
seen.append(i + 1)
nested(i)
g3 = Genlet(g3)
def a(n):
if n == 0:
return
for ii in ax(n - 1):
Yield(ii)
Yield(n)
ax = Genlet(a)
def perms(l):
if len(l) > 1:
for e in l:
# No syntactical sugar for generator expressions
[Yield([e] + p) for p in perms([x for x in l if x != e])]
else:
Yield(l)
perms = Genlet(perms)
def gr1(n):
for ii in range(1, n):
Yield(ii)
Yield(ii * ii, 2)
gr1 = Genlet(gr1)
def gr2(n, seen):
for ii in gr1(n):
seen.append(ii)
gr2 = Genlet(gr2)
class NestedGeneratorTests(unittest.TestCase):
def test_layered_genlets(self):
seen = []
for ii in gr2(5, seen):
seen.append(ii)
self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16])
def test_permutations(self):
gen_perms = perms(list(range(4)))
permutations = list(gen_perms)
self.assertEqual(len(permutations), 4 * 3 * 2 * 1)
self.assertTrue([0, 1, 2, 3] in permutations)
self.assertTrue([3, 2, 1, 0] in permutations)
res = []
for ii in zip(perms(list(range(4))), perms(list(range(3)))):
res.append(ii)
self.assertEqual(
res,
[([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]),
([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]),
([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])])
# XXX Test to make sure we are working as a generator expression
def test_genlet_simple(self):
for g in [g1, g2, g3]:
seen = []
for k in range(3):
for j in g(5, seen):
seen.append(j)
self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4])
def test_genlet_bad(self):
try:
Yield(10)
except RuntimeError:
pass
def test_nested_genlets(self):
seen = []
for ii in ax(5):
seen.append(ii)

View File

@ -0,0 +1,728 @@
import gc
import sys
import time
import threading
import unittest
from abc import ABCMeta, abstractmethod
from greenlet import greenlet
# We manually manage locks in many tests
# pylint:disable=consider-using-with
class SomeError(Exception):
pass
def fmain(seen):
try:
greenlet.getcurrent().parent.switch()
except:
seen.append(sys.exc_info()[0])
raise
raise SomeError
def send_exception(g, exc):
# note: send_exception(g, exc) can be now done with g.throw(exc).
# the purpose of this test is to explicitely check the propagation rules.
def crasher(exc):
raise exc
g1 = greenlet(crasher, parent=g)
g1.switch(exc)
class TestGreenlet(unittest.TestCase):
def test_simple(self):
lst = []
def f():
lst.append(1)
greenlet.getcurrent().parent.switch()
lst.append(3)
g = greenlet(f)
lst.append(0)
g.switch()
lst.append(2)
g.switch()
lst.append(4)
self.assertEqual(lst, list(range(5)))
def test_parent_equals_None(self):
g = greenlet(parent=None)
self.assertIsNotNone(g)
self.assertIs(g.parent, greenlet.getcurrent())
def test_run_equals_None(self):
g = greenlet(run=None)
self.assertIsNotNone(g)
self.assertIsNone(g.run)
def test_two_children(self):
lst = []
def f():
lst.append(1)
greenlet.getcurrent().parent.switch()
lst.extend([1, 1])
g = greenlet(f)
h = greenlet(f)
g.switch()
self.assertEqual(len(lst), 1)
h.switch()
self.assertEqual(len(lst), 2)
h.switch()
self.assertEqual(len(lst), 4)
self.assertEqual(h.dead, True)
g.switch()
self.assertEqual(len(lst), 6)
self.assertEqual(g.dead, True)
def test_two_recursive_children(self):
lst = []
def f():
lst.append(1)
greenlet.getcurrent().parent.switch()
def g():
lst.append(1)
g = greenlet(f)
g.switch()
lst.append(1)
g = greenlet(g)
g.switch()
self.assertEqual(len(lst), 3)
self.assertEqual(sys.getrefcount(g), 2)
def test_threads(self):
success = []
def f():
self.test_simple()
success.append(True)
ths = [threading.Thread(target=f) for i in range(10)]
for th in ths:
th.start()
for th in ths:
th.join()
self.assertEqual(len(success), len(ths))
def test_exception(self):
seen = []
g1 = greenlet(fmain)
g2 = greenlet(fmain)
g1.switch(seen)
g2.switch(seen)
g2.parent = g1
self.assertEqual(seen, [])
self.assertRaises(SomeError, g2.switch)
self.assertEqual(seen, [SomeError])
g2.switch()
self.assertEqual(seen, [SomeError])
def test_send_exception(self):
seen = []
g1 = greenlet(fmain)
g1.switch(seen)
self.assertRaises(KeyError, send_exception, g1, KeyError)
self.assertEqual(seen, [KeyError])
def test_dealloc(self):
seen = []
g1 = greenlet(fmain)
g2 = greenlet(fmain)
g1.switch(seen)
g2.switch(seen)
self.assertEqual(seen, [])
del g1
gc.collect()
self.assertEqual(seen, [greenlet.GreenletExit])
del g2
gc.collect()
self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit])
def test_dealloc_other_thread(self):
seen = []
someref = []
lock = threading.Lock()
lock.acquire()
lock2 = threading.Lock()
lock2.acquire()
def f():
g1 = greenlet(fmain)
g1.switch(seen)
someref.append(g1)
del g1
gc.collect()
lock.release()
lock2.acquire()
greenlet() # trigger release
lock.release()
lock2.acquire()
t = threading.Thread(target=f)
t.start()
lock.acquire()
self.assertEqual(seen, [])
self.assertEqual(len(someref), 1)
del someref[:]
gc.collect()
# g1 is not released immediately because it's from another thread
self.assertEqual(seen, [])
lock2.release()
lock.acquire()
self.assertEqual(seen, [greenlet.GreenletExit])
lock2.release()
t.join()
def test_frame(self):
def f1():
f = sys._getframe(0) # pylint:disable=protected-access
self.assertEqual(f.f_back, None)
greenlet.getcurrent().parent.switch(f)
return "meaning of life"
g = greenlet(f1)
frame = g.switch()
self.assertTrue(frame is g.gr_frame)
self.assertTrue(g)
from_g = g.switch()
self.assertFalse(g)
self.assertEqual(from_g, 'meaning of life')
self.assertEqual(g.gr_frame, None)
def test_thread_bug(self):
def runner(x):
g = greenlet(lambda: time.sleep(x))
g.switch()
t1 = threading.Thread(target=runner, args=(0.2,))
t2 = threading.Thread(target=runner, args=(0.3,))
t1.start()
t2.start()
t1.join()
t2.join()
def test_switch_kwargs(self):
def run(a, b):
self.assertEqual(a, 4)
self.assertEqual(b, 2)
return 42
x = greenlet(run).switch(a=4, b=2)
self.assertEqual(x, 42)
def test_switch_kwargs_to_parent(self):
def run(x):
greenlet.getcurrent().parent.switch(x=x)
greenlet.getcurrent().parent.switch(2, x=3)
return x, x ** 2
g = greenlet(run)
self.assertEqual({'x': 3}, g.switch(3))
self.assertEqual(((2,), {'x': 3}), g.switch())
self.assertEqual((3, 9), g.switch())
def test_switch_to_another_thread(self):
data = {}
error = None
created_event = threading.Event()
done_event = threading.Event()
def run():
data['g'] = greenlet(lambda: None)
created_event.set()
done_event.wait()
thread = threading.Thread(target=run)
thread.start()
created_event.wait()
try:
data['g'].switch()
except greenlet.error:
error = sys.exc_info()[1]
self.assertIsNotNone(error, "greenlet.error was not raised!")
done_event.set()
thread.join()
def test_exc_state(self):
def f():
try:
raise ValueError('fun')
except: # pylint:disable=bare-except
exc_info = sys.exc_info()
greenlet(h).switch()
self.assertEqual(exc_info, sys.exc_info())
def h():
self.assertEqual(sys.exc_info(), (None, None, None))
greenlet(f).switch()
def test_instance_dict(self):
def f():
greenlet.getcurrent().test = 42
def deldict(g):
del g.__dict__
def setdict(g, value):
g.__dict__ = value
g = greenlet(f)
self.assertEqual(g.__dict__, {})
g.switch()
self.assertEqual(g.test, 42)
self.assertEqual(g.__dict__, {'test': 42})
g.__dict__ = g.__dict__
self.assertEqual(g.__dict__, {'test': 42})
self.assertRaises(TypeError, deldict, g)
self.assertRaises(TypeError, setdict, g, 42)
def test_threaded_reparent(self):
data = {}
created_event = threading.Event()
done_event = threading.Event()
def run():
data['g'] = greenlet(lambda: None)
created_event.set()
done_event.wait()
def blank():
greenlet.getcurrent().parent.switch()
def setparent(g, value):
g.parent = value
thread = threading.Thread(target=run)
thread.start()
created_event.wait()
g = greenlet(blank)
g.switch()
self.assertRaises(ValueError, setparent, g, data['g'])
done_event.set()
thread.join()
def test_deepcopy(self):
import copy
self.assertRaises(TypeError, copy.copy, greenlet())
self.assertRaises(TypeError, copy.deepcopy, greenlet())
def test_parent_restored_on_kill(self):
hub = greenlet(lambda: None)
main = greenlet.getcurrent()
result = []
def worker():
try:
# Wait to be killed
main.switch()
except greenlet.GreenletExit:
# Resurrect and switch to parent
result.append(greenlet.getcurrent().parent)
result.append(greenlet.getcurrent())
hub.switch()
g = greenlet(worker, parent=hub)
g.switch()
del g
self.assertTrue(result)
self.assertEqual(result[0], main)
self.assertEqual(result[1].parent, hub)
def test_parent_return_failure(self):
# No run causes AttributeError on switch
g1 = greenlet()
# Greenlet that implicitly switches to parent
g2 = greenlet(lambda: None, parent=g1)
# AttributeError should propagate to us, no fatal errors
self.assertRaises(AttributeError, g2.switch)
def test_throw_exception_not_lost(self):
class mygreenlet(greenlet):
def __getattribute__(self, name):
try:
raise Exception()
except: # pylint:disable=bare-except
pass
return greenlet.__getattribute__(self, name)
g = mygreenlet(lambda: None)
self.assertRaises(SomeError, g.throw, SomeError())
def test_throw_doesnt_crash(self):
result = []
def worker():
greenlet.getcurrent().parent.switch()
def creator():
g = greenlet(worker)
g.switch()
result.append(g)
t = threading.Thread(target=creator)
t.start()
t.join()
self.assertRaises(greenlet.error, result[0].throw, SomeError())
def test_recursive_startup(self):
class convoluted(greenlet):
def __init__(self):
greenlet.__init__(self)
self.count = 0
def __getattribute__(self, name):
if name == 'run' and self.count == 0:
self.count = 1
self.switch(43)
return greenlet.__getattribute__(self, name)
def run(self, value):
while True:
self.parent.switch(value)
g = convoluted()
self.assertEqual(g.switch(42), 43)
def test_unexpected_reparenting(self):
another = []
def worker():
g = greenlet(lambda: None)
another.append(g)
g.switch()
t = threading.Thread(target=worker)
t.start()
t.join()
class convoluted(greenlet):
def __getattribute__(self, name):
if name == 'run':
self.parent = another[0] # pylint:disable=attribute-defined-outside-init
return greenlet.__getattribute__(self, name)
g = convoluted(lambda: None)
self.assertRaises(greenlet.error, g.switch)
def test_threaded_updatecurrent(self):
# released when main thread should execute
lock1 = threading.Lock()
lock1.acquire()
# released when another thread should execute
lock2 = threading.Lock()
lock2.acquire()
class finalized(object):
def __del__(self):
# happens while in green_updatecurrent() in main greenlet
# should be very careful not to accidentally call it again
# at the same time we must make sure another thread executes
lock2.release()
lock1.acquire()
# now ts_current belongs to another thread
def deallocator():
greenlet.getcurrent().parent.switch()
def fthread():
lock2.acquire()
greenlet.getcurrent()
del g[0]
lock1.release()
lock2.acquire()
greenlet.getcurrent()
lock1.release()
main = greenlet.getcurrent()
g = [greenlet(deallocator)]
g[0].bomb = finalized()
g[0].switch()
t = threading.Thread(target=fthread)
t.start()
# let another thread grab ts_current and deallocate g[0]
lock2.release()
lock1.acquire()
# this is the corner stone
# getcurrent() will notice that ts_current belongs to another thread
# and start the update process, which would notice that g[0] should
# be deallocated, and that will execute an object's finalizer. Now,
# that object will let another thread run so it can grab ts_current
# again, which would likely crash the interpreter if there's no
# check for this case at the end of green_updatecurrent(). This test
# passes if getcurrent() returns correct result, but it's likely
# to randomly crash if it's not anyway.
self.assertEqual(greenlet.getcurrent(), main)
# wait for another thread to complete, just in case
t.join()
def test_dealloc_switch_args_not_lost(self):
seen = []
def worker():
# wait for the value
value = greenlet.getcurrent().parent.switch()
# delete all references to ourself
del worker[0]
initiator.parent = greenlet.getcurrent().parent
# switch to main with the value, but because
# ts_current is the last reference to us we
# return immediately
try:
greenlet.getcurrent().parent.switch(value)
finally:
seen.append(greenlet.getcurrent())
def initiator():
return 42 # implicitly falls thru to parent
worker = [greenlet(worker)]
worker[0].switch() # prime worker
initiator = greenlet(initiator, worker[0])
value = initiator.switch()
self.assertTrue(seen)
self.assertEqual(value, 42)
def test_tuple_subclass(self):
if sys.version_info[0] > 2:
# There's no apply in Python 3.x
def _apply(func, a, k):
func(*a, **k)
else:
_apply = apply # pylint:disable=undefined-variable
class mytuple(tuple):
def __len__(self):
greenlet.getcurrent().switch()
return tuple.__len__(self)
args = mytuple()
kwargs = dict(a=42)
def switchapply():
_apply(greenlet.getcurrent().parent.switch, args, kwargs)
g = greenlet(switchapply)
self.assertEqual(g.switch(), kwargs)
def test_abstract_subclasses(self):
AbstractSubclass = ABCMeta(
'AbstractSubclass',
(greenlet,),
{'run': abstractmethod(lambda self: None)})
class BadSubclass(AbstractSubclass):
pass
class GoodSubclass(AbstractSubclass):
def run(self):
pass
GoodSubclass() # should not raise
self.assertRaises(TypeError, BadSubclass)
def test_implicit_parent_with_threads(self):
if not gc.isenabled():
return # cannot test with disabled gc
N = gc.get_threshold()[0]
if N < 50:
return # cannot test with such a small N
def attempt():
lock1 = threading.Lock()
lock1.acquire()
lock2 = threading.Lock()
lock2.acquire()
recycled = [False]
def another_thread():
lock1.acquire() # wait for gc
greenlet.getcurrent() # update ts_current
lock2.release() # release gc
t = threading.Thread(target=another_thread)
t.start()
class gc_callback(object):
def __del__(self):
lock1.release()
lock2.acquire()
recycled[0] = True
class garbage(object):
def __init__(self):
self.cycle = self
self.callback = gc_callback()
l = []
x = range(N*2)
current = greenlet.getcurrent()
g = garbage()
for _ in x:
g = None # lose reference to garbage
if recycled[0]:
# gc callback called prematurely
t.join()
return False
last = greenlet()
if recycled[0]:
break # yes! gc called in green_new
l.append(last) # increase allocation counter
else:
# gc callback not called when expected
gc.collect()
if recycled[0]:
t.join()
return False
self.assertEqual(last.parent, current)
for g in l:
self.assertEqual(g.parent, current)
return True
for _ in range(5):
if attempt():
break
def test_issue_245_reference_counting_subclass_no_threads(self):
# https://github.com/python-greenlet/greenlet/issues/245
# Before the fix, this crashed pretty reliably on
# Python 3.10, at least on macOS; but much less reliably on other
# interpreters (memory layout must have changed).
# The threaded test crashed more reliably on more interpreters.
from greenlet import getcurrent
from greenlet import GreenletExit
class Greenlet(greenlet):
pass
initial_refs = sys.getrefcount(Greenlet)
# This has to be an instance variable because
# Python 2 raises a SyntaxError if we delete a local
# variable referenced in an inner scope.
self.glets = [] # pylint:disable=attribute-defined-outside-init
def greenlet_main():
try:
getcurrent().parent.switch()
except GreenletExit:
self.glets.append(getcurrent())
# Before the
for _ in range(10):
Greenlet(greenlet_main).switch()
del self.glets
self.assertEqual(sys.getrefcount(Greenlet), initial_refs)
def test_issue_245_reference_counting_subclass_threads(self):
# https://github.com/python-greenlet/greenlet/issues/245
from threading import Thread
from threading import Event
from greenlet import getcurrent
class MyGreenlet(greenlet):
pass
glets = []
ref_cleared = Event()
def greenlet_main():
getcurrent().parent.switch()
def thread_main(greenlet_running_event):
mine = MyGreenlet(greenlet_main)
glets.append(mine)
# The greenlets being deleted must be active
mine.switch()
# Don't keep any reference to it in this thread
del mine
# Let main know we published our greenlet.
greenlet_running_event.set()
# Wait for main to let us know the references are
# gone and the greenlet objects no longer reachable
ref_cleared.wait()
# The creating thread must call getcurrent() (or a few other
# greenlet APIs) because that's when the thread-local list of dead
# greenlets gets cleared.
getcurrent()
# We start with 3 references to the subclass:
# - This module
# - Its __mro__
# - The __subclassess__ attribute of greenlet
# - (If we call gc.get_referents(), we find four entries, including
# some other tuple ``(greenlet)`` that I'm not sure about but must be part
# of the machinery.)
#
# On Python 3.10 it's often enough to just run 3 threads; on Python 2.7,
# more threads are needed, and the results are still
# non-deterministic. Presumably the memory layouts are different
initial_refs = sys.getrefcount(MyGreenlet)
thread_ready_events = []
for _ in range(
initial_refs + 45
):
event = Event()
thread = Thread(target=thread_main, args=(event,))
thread_ready_events.append(event)
thread.start()
for done_event in thread_ready_events:
done_event.wait()
del glets[:]
ref_cleared.set()
# Let any other thread run; it will crash the interpreter
# if not fixed (or silently corrupt memory and we possibly crash
# later).
time.sleep(1)
self.assertEqual(sys.getrefcount(MyGreenlet), initial_refs)
class TestRepr(unittest.TestCase):
def assertEndsWith(self, got, suffix):
self.assertTrue(got.endswith(suffix), (got, suffix))
def test_main_while_running(self):
r = repr(greenlet.getcurrent())
self.assertEndsWith(r, " current active started main>")
def test_main_in_background(self):
main = greenlet.getcurrent()
def run():
return repr(main)
g = greenlet(run)
r = g.switch()
self.assertEndsWith(r, ' suspended active started main>')
def test_initial(self):
r = repr(greenlet())
self.assertEndsWith(r, ' pending>')
def test_main_from_other_thread(self):
main = greenlet.getcurrent()
class T(threading.Thread):
original_main = thread_main = None
main_glet = None
def run(self):
self.original_main = repr(main)
self.main_glet = greenlet.getcurrent()
self.thread_main = repr(self.main_glet)
t = T()
t.start()
t.join(10)
self.assertEndsWith(t.original_main, ' suspended active started main>')
self.assertEndsWith(t.thread_main, ' current active started main>')
r = repr(t.main_glet)
# main greenlets, even from dead threads, never really appear dead
# TODO: Can we find a better way to differentiate that?
assert not t.main_glet.dead
self.assertEndsWith(r, ' suspended active started main>')
def test_dead(self):
g = greenlet(lambda: None)
g.switch()
self.assertEndsWith(repr(g), ' dead>')
self.assertNotIn('suspended', repr(g))
self.assertNotIn('started', repr(g))
self.assertNotIn('active', repr(g))
def test_formatting_produces_native_str(self):
# https://github.com/python-greenlet/greenlet/issues/218
# %s formatting on Python 2 was producing unicode, not str.
g_dead = greenlet(lambda: None)
g_not_started = greenlet(lambda: None)
g_cur = greenlet.getcurrent()
for g in g_dead, g_not_started, g_cur:
self.assertIsInstance(
'%s' % (g,),
str
)
self.assertIsInstance(
'%r' % (g,),
str,
)
if __name__ == '__main__':
unittest.main()

View File

@ -0,0 +1,178 @@
import unittest
import sys
import gc
import time
import weakref
import threading
import greenlet
class TestLeaks(unittest.TestCase):
def test_arg_refs(self):
args = ('a', 'b', 'c')
refcount_before = sys.getrefcount(args)
# pylint:disable=unnecessary-lambda
g = greenlet.greenlet(
lambda *args: greenlet.getcurrent().parent.switch(*args))
for _ in range(100):
g.switch(*args)
self.assertEqual(sys.getrefcount(args), refcount_before)
def test_kwarg_refs(self):
kwargs = {}
# pylint:disable=unnecessary-lambda
g = greenlet.greenlet(
lambda **kwargs: greenlet.getcurrent().parent.switch(**kwargs))
for _ in range(100):
g.switch(**kwargs)
self.assertEqual(sys.getrefcount(kwargs), 2)
assert greenlet.GREENLET_USE_GC # Option to disable this was removed in 1.0
def recycle_threads(self):
# By introducing a thread that does sleep we allow other threads,
# that have triggered their __block condition, but did not have a
# chance to deallocate their thread state yet, to finally do so.
# The way it works is by requiring a GIL switch (different thread),
# which does a GIL release (sleep), which might do a GIL switch
# to finished threads and allow them to clean up.
def worker():
time.sleep(0.001)
t = threading.Thread(target=worker)
t.start()
time.sleep(0.001)
t.join()
def test_threaded_leak(self):
gg = []
def worker():
# only main greenlet present
gg.append(weakref.ref(greenlet.getcurrent()))
for _ in range(2):
t = threading.Thread(target=worker)
t.start()
t.join()
del t
greenlet.getcurrent() # update ts_current
self.recycle_threads()
greenlet.getcurrent() # update ts_current
gc.collect()
greenlet.getcurrent() # update ts_current
for g in gg:
self.assertIsNone(g())
def test_threaded_adv_leak(self):
gg = []
def worker():
# main and additional *finished* greenlets
ll = greenlet.getcurrent().ll = []
def additional():
ll.append(greenlet.getcurrent())
for _ in range(2):
greenlet.greenlet(additional).switch()
gg.append(weakref.ref(greenlet.getcurrent()))
for _ in range(2):
t = threading.Thread(target=worker)
t.start()
t.join()
del t
greenlet.getcurrent() # update ts_current
self.recycle_threads()
greenlet.getcurrent() # update ts_current
gc.collect()
greenlet.getcurrent() # update ts_current
for g in gg:
self.assertIsNone(g())
def test_issue251_killing_cross_thread_leaks_list(self, manually_collect_background=True):
# See https://github.com/python-greenlet/greenlet/issues/251
# Killing a greenlet (probably not the main one)
# in one thread from another thread would
# result in leaking a list (the ts_delkey list).
# For the test to be valid, even empty lists have to be tracked by the
# GC
assert gc.is_tracked([])
def count_objects(kind=list):
# pylint:disable=unidiomatic-typecheck
# Collect the garbage.
for _ in range(3):
gc.collect()
gc.collect()
return sum(
1
for x in gc.get_objects()
if type(x) is kind
)
# XXX: The main greenlet of a dead thread is only released
# when one of the proper greenlet APIs is used from a different
# running thread. See #252 (https://github.com/python-greenlet/greenlet/issues/252)
greenlet.getcurrent()
greenlets_before = count_objects(greenlet.greenlet)
background_glet_running = threading.Event()
background_glet_killed = threading.Event()
background_greenlets = []
def background_greenlet():
# Throw control back to the main greenlet.
greenlet.getcurrent().parent.switch()
def background_thread():
glet = greenlet.greenlet(background_greenlet)
background_greenlets.append(glet)
glet.switch() # Be sure it's active.
# Control is ours again.
del glet # Delete one reference from the thread it runs in.
background_glet_running.set()
background_glet_killed.wait()
# To trigger the background collection of the dead
# greenlet, thus clearing out the contents of the list, we
# need to run some APIs. See issue 252.
if manually_collect_background:
greenlet.getcurrent()
t = threading.Thread(target=background_thread)
t.start()
background_glet_running.wait()
lists_before = count_objects()
assert len(background_greenlets) == 1
self.assertFalse(background_greenlets[0].dead)
# Delete the last reference to the background greenlet
# from a different thread. This puts it in the background thread's
# ts_delkey list.
del background_greenlets[:]
background_glet_killed.set()
# Now wait for the background thread to die.
t.join(10)
del t
# Free the background main greenlet by forcing greenlet to notice a difference.
greenlet.getcurrent()
greenlets_after = count_objects(greenlet.greenlet)
lists_after = count_objects()
# On 2.7, we observe that lists_after is smaller than
# lists_before. No idea what lists got cleaned up. All the
# Python 3 versions match exactly.
self.assertLessEqual(lists_after, lists_before)
self.assertEqual(greenlets_before, greenlets_after)
@unittest.expectedFailure
def test_issue251_issue252_need_to_collect_in_background(self):
# This still fails because the leak of the list
# still exists when we don't call a greenlet API before exiting the
# thread. The proximate cause is that neither of the two greenlets
# from the background thread are actually being destroyed, even though
# the GC is in fact visiting both objects.
# It's not clear where that leak is? For some reason the thread-local dict
# holding it isn't being cleaned up.
self.test_issue251_killing_cross_thread_leaks_list(manually_collect_background=False)

View File

@ -0,0 +1,19 @@
import greenlet
import unittest
class Test(unittest.TestCase):
def test_stack_saved(self):
main = greenlet.getcurrent()
self.assertEqual(main._stack_saved, 0)
def func():
main.switch(main._stack_saved)
g = greenlet.greenlet(func)
x = g.switch()
assert x > 0, x
assert g._stack_saved > 0, g._stack_saved
g.switch()
assert g._stack_saved == 0, g._stack_saved

View File

@ -0,0 +1,100 @@
import sys
import unittest
from greenlet import greenlet
def switch(*args):
return greenlet.getcurrent().parent.switch(*args)
class ThrowTests(unittest.TestCase):
def test_class(self):
def f():
try:
switch("ok")
except RuntimeError:
switch("ok")
return
switch("fail")
g = greenlet(f)
res = g.switch()
self.assertEqual(res, "ok")
res = g.throw(RuntimeError)
self.assertEqual(res, "ok")
def test_val(self):
def f():
try:
switch("ok")
except RuntimeError:
val = sys.exc_info()[1]
if str(val) == "ciao":
switch("ok")
return
switch("fail")
g = greenlet(f)
res = g.switch()
self.assertEqual(res, "ok")
res = g.throw(RuntimeError("ciao"))
self.assertEqual(res, "ok")
g = greenlet(f)
res = g.switch()
self.assertEqual(res, "ok")
res = g.throw(RuntimeError, "ciao")
self.assertEqual(res, "ok")
def test_kill(self):
def f():
switch("ok")
switch("fail")
g = greenlet(f)
res = g.switch()
self.assertEqual(res, "ok")
res = g.throw()
self.assertTrue(isinstance(res, greenlet.GreenletExit))
self.assertTrue(g.dead)
res = g.throw() # immediately eaten by the already-dead greenlet
self.assertTrue(isinstance(res, greenlet.GreenletExit))
def test_throw_goes_to_original_parent(self):
main = greenlet.getcurrent()
def f1():
try:
main.switch("f1 ready to catch")
except IndexError:
return "caught"
else:
return "normal exit"
def f2():
main.switch("from f2")
g1 = greenlet(f1)
g2 = greenlet(f2, parent=g1)
self.assertRaises(IndexError, g2.throw, IndexError)
self.assertTrue(g2.dead)
self.assertTrue(g1.dead)
g1 = greenlet(f1)
g2 = greenlet(f2, parent=g1)
res = g1.switch()
self.assertEqual(res, "f1 ready to catch")
res = g2.throw(IndexError)
self.assertEqual(res, "caught")
self.assertTrue(g2.dead)
self.assertTrue(g1.dead)
g1 = greenlet(f1)
g2 = greenlet(f2, parent=g1)
res = g1.switch()
self.assertEqual(res, "f1 ready to catch")
res = g2.switch()
self.assertEqual(res, "from f2")
res = g2.throw(IndexError)
self.assertEqual(res, "caught")
self.assertTrue(g2.dead)
self.assertTrue(g1.dead)

View File

@ -0,0 +1,267 @@
import sys
import unittest
import greenlet
class SomeError(Exception):
pass
class GreenletTracer(object):
oldtrace = None
def __init__(self, error_on_trace=False):
self.actions = []
self.error_on_trace = error_on_trace
def __call__(self, *args):
self.actions.append(args)
if self.error_on_trace:
raise SomeError
def __enter__(self):
self.oldtrace = greenlet.settrace(self)
return self.actions
def __exit__(self, *args):
greenlet.settrace(self.oldtrace)
class TestGreenletTracing(unittest.TestCase):
"""
Tests of ``greenlet.settrace()``
"""
def test_greenlet_tracing(self):
main = greenlet.getcurrent()
def dummy():
pass
def dummyexc():
raise SomeError()
with GreenletTracer() as actions:
g1 = greenlet.greenlet(dummy)
g1.switch()
g2 = greenlet.greenlet(dummyexc)
self.assertRaises(SomeError, g2.switch)
self.assertEqual(actions, [
('switch', (main, g1)),
('switch', (g1, main)),
('switch', (main, g2)),
('throw', (g2, main)),
])
def test_exception_disables_tracing(self):
main = greenlet.getcurrent()
def dummy():
main.switch()
g = greenlet.greenlet(dummy)
g.switch()
with GreenletTracer(error_on_trace=True) as actions:
self.assertRaises(SomeError, g.switch)
self.assertEqual(greenlet.gettrace(), None)
self.assertEqual(actions, [
('switch', (main, g)),
])
class PythonTracer(object):
oldtrace = None
def __init__(self):
self.actions = []
def __call__(self, frame, event, arg):
# Record the co_name so we have an idea what function we're in.
self.actions.append((event, frame.f_code.co_name))
def __enter__(self):
self.oldtrace = sys.setprofile(self)
return self.actions
def __exit__(self, *args):
sys.setprofile(self.oldtrace)
def tpt_callback():
return 42
class TestPythonTracing(unittest.TestCase):
"""
Tests of the interaction of ``sys.settrace()``
with greenlet facilities.
NOTE: Most of this is probably CPython specific.
"""
maxDiff = None
def test_trace_events_trivial(self):
with PythonTracer() as actions:
tpt_callback()
# If we use the sys.settrace instead of setprofile, we get
# this:
# self.assertEqual(actions, [
# ('call', 'tpt_callback'),
# ('call', '__exit__'),
# ])
self.assertEqual(actions, [
('return', '__enter__'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('call', '__exit__'),
('c_call', '__exit__'),
])
def _trace_switch(self, glet):
with PythonTracer() as actions:
glet.switch()
return actions
def _check_trace_events_func_already_set(self, glet):
actions = self._trace_switch(glet)
self.assertEqual(actions, [
('return', '__enter__'),
('c_call', '_trace_switch'),
('call', 'run'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('return', 'run'),
('c_return', '_trace_switch'),
('call', '__exit__'),
('c_call', '__exit__'),
])
def test_trace_events_into_greenlet_func_already_set(self):
def run():
return tpt_callback()
self._check_trace_events_func_already_set(greenlet.greenlet(run))
def test_trace_events_into_greenlet_subclass_already_set(self):
class X(greenlet.greenlet):
def run(self):
return tpt_callback()
self._check_trace_events_func_already_set(X())
def _check_trace_events_from_greenlet_sets_profiler(self, g, tracer):
g.switch()
tpt_callback()
tracer.__exit__()
self.assertEqual(tracer.actions, [
('return', '__enter__'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('return', 'run'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('call', '__exit__'),
('c_call', '__exit__'),
])
def test_trace_events_from_greenlet_func_sets_profiler(self):
tracer = PythonTracer()
def run():
tracer.__enter__()
return tpt_callback()
self._check_trace_events_from_greenlet_sets_profiler(greenlet.greenlet(run),
tracer)
def test_trace_events_from_greenlet_subclass_sets_profiler(self):
tracer = PythonTracer()
class X(greenlet.greenlet):
def run(self):
tracer.__enter__()
return tpt_callback()
self._check_trace_events_from_greenlet_sets_profiler(X(), tracer)
def test_trace_events_multiple_greenlets_switching(self):
tracer = PythonTracer()
g1 = None
g2 = None
def g1_run():
tracer.__enter__()
tpt_callback()
g2.switch()
tpt_callback()
return 42
def g2_run():
tpt_callback()
tracer.__exit__()
tpt_callback()
g1.switch()
g1 = greenlet.greenlet(g1_run)
g2 = greenlet.greenlet(g2_run)
x = g1.switch()
self.assertEqual(x, 42)
tpt_callback() # ensure not in the trace
self.assertEqual(tracer.actions, [
('return', '__enter__'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('c_call', 'g1_run'),
('call', 'g2_run'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('call', '__exit__'),
('c_call', '__exit__'),
])
def test_trace_events_multiple_greenlets_switching_siblings(self):
# Like the first version, but get both greenlets running first
# as "siblings" and then establish the tracing.
tracer = PythonTracer()
g1 = None
g2 = None
def g1_run():
greenlet.getcurrent().parent.switch()
tracer.__enter__()
tpt_callback()
g2.switch()
tpt_callback()
return 42
def g2_run():
greenlet.getcurrent().parent.switch()
tpt_callback()
tracer.__exit__()
tpt_callback()
g1.switch()
g1 = greenlet.greenlet(g1_run)
g2 = greenlet.greenlet(g2_run)
# Start g1
g1.switch()
# And it immediately returns control to us.
# Start g2
g2.switch()
# Which also returns. Now kick of the real part of the
# test.
x = g1.switch()
self.assertEqual(x, 42)
tpt_callback() # ensure not in the trace
self.assertEqual(tracer.actions, [
('return', '__enter__'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('c_call', 'g1_run'),
('call', 'tpt_callback'),
('return', 'tpt_callback'),
('call', '__exit__'),
('c_call', '__exit__'),
])

View File

@ -0,0 +1,39 @@
#! /usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import sys
import os
import unittest
import greenlet
class VersionTests(unittest.TestCase):
def test_version(self):
def find_dominating_file(name):
if os.path.exists(name):
return name
tried = []
here = os.path.abspath(os.path.dirname(__file__))
for i in range(10):
up = ['..'] * i
path = [here] + up + [name]
fname = os.path.join(*path)
fname = os.path.abspath(fname)
tried.append(fname)
if os.path.exists(fname):
return fname
raise AssertionError("Could not find file " + name + "; checked " + str(tried))
try:
setup_py = find_dominating_file('setup.py')
except AssertionError as e:
raise unittest.SkipTest("Unable to find setup.py; must be out of tree. " + str(e))
invoke_setup = "%s %s --version" % (sys.executable, setup_py)
with os.popen(invoke_setup) as f:
sversion = f.read().strip()
self.assertEqual(sversion, greenlet.__version__)

View File

@ -0,0 +1,34 @@
import gc
import greenlet
import weakref
import unittest
class WeakRefTests(unittest.TestCase):
def test_dead_weakref(self):
def _dead_greenlet():
g = greenlet.greenlet(lambda: None)
g.switch()
return g
o = weakref.ref(_dead_greenlet())
gc.collect()
self.assertEqual(o(), None)
def test_inactive_weakref(self):
o = weakref.ref(greenlet.greenlet())
gc.collect()
self.assertEqual(o(), None)
def test_dealloc_weakref(self):
seen = []
def worker():
try:
greenlet.getcurrent().parent.switch()
finally:
seen.append(g())
g = greenlet.greenlet(worker)
g.switch()
g2 = greenlet.greenlet(lambda: None, g)
g = weakref.ref(g2)
g2 = None
self.assertEqual(seen, [None])

View File

@ -0,0 +1 @@
pip

View File

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: pip
Version: 22.0.4
Version: 22.2.2
Summary: The PyPA recommended tool for installing Python packages.
Home-page: https://pip.pypa.io/
Author: The pip developers
@ -9,7 +9,6 @@ License: MIT
Project-URL: Documentation, https://pip.pypa.io
Project-URL: Source, https://github.com/pypa/pip
Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
@ -21,6 +20,7 @@ Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.7
@ -88,5 +88,3 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md

View File

@ -1,5 +1,4 @@
[console_scripts]
pip = pip._internal.cli.main:main
pip3 = pip._internal.cli.main:main
pip3.9 = pip._internal.cli.main:main
pip3.8 = pip._internal.cli.main:main

View File

@ -0,0 +1 @@
pip

View File

@ -1,6 +1,6 @@
from typing import List, Optional
__version__ = "22.0.4"
__version__ = "22.2.2"
def main(args: Optional[List[str]] = None) -> int:

View File

@ -0,0 +1,36 @@
"""Execute exactly this copy of pip, within a different environment.
This file is named as it is, to ensure that this module can't be imported via
an import statement.
"""
import runpy
import sys
import types
from importlib.machinery import ModuleSpec, PathFinder
from os.path import dirname
from typing import Optional, Sequence, Union
PIP_SOURCES_ROOT = dirname(dirname(__file__))
class PipImportRedirectingFinder:
@classmethod
def find_spec(
self,
fullname: str,
path: Optional[Sequence[Union[bytes, str]]] = None,
target: Optional[types.ModuleType] = None,
) -> Optional[ModuleSpec]:
if fullname != "pip":
return None
spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
assert spec, (PIP_SOURCES_ROOT, fullname)
return spec
sys.meta_path.insert(0, PipImportRedirectingFinder())
assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
runpy.run_module("pip", run_name="__main__", alter_sys=True)

Some files were not shown because too many files have changed in this diff Show More